diff --git a/README.md b/README.md index 25c9163e00dbbd1e70719ddf10d25c821b53bd32..901ccd1ecc366f30c59fff78af116c82650ef840 100644 --- a/README.md +++ b/README.md @@ -33,14 +33,18 @@ Each library has its own package (`adql` for ADQL, `uws` for UWS and `tap` for T ### Dependencies Below are summed up the dependencies of each library: -* ADQL: `adql`, `cds.utils` -* UWS: `uws`, `org.json` -* TAP: `adql`, `uws`, `cds.*`, `org.json` +* ADQL: `adql`, `cds.utils`, `org.postgresql` *(for adql.translator.PgSphereTranslator only)* +* UWS: `uws`, `org.json`, HTTP Multipart lib. (`com.oreilly.servlet`) +* TAP: `adql`, `uws`, `cds.*`, `org.json`, `org.postgresql` *(for adql.translator.PgSphereTranslator only)*, HTTP Multipart lib. (`com.oreilly.servlet`), STIL (`nom.tap`, `org.apache.tools.bzip2`, `uk.ac.starlink`) + +In the `lib` directory, you will find 2 JAR files: +* `cos-1.5beta.jar` to deal with HTTP multipart requests +* `stil3.0-5.jar` for [STIL](http://www.star.bris.ac.uk/~mbt/stil/) (VOTable and other formats support) ### ANT scripts At the root of the repository, there are 3 ANT scripts. Each is dedicated to one library. They are able to generate JAR for sources, binaries and Javadoc. 3 properties must be set before using one of these scripts: -* `CATALINA`: a path toward a JAR or a binary directory containing org.apache.catalina.connector.ClientAbortException.class +* `POSTGRES`: a path toward a JAR or a binary directory containing all org.postgresql.* - [https://jdbc.postgresql.org/download.html](JDBC Postgres driver) - **(ONLY for ADQL and TAP if you want to keep adql.translator.PgSphereTranslator)** * `SERVLET-API`: a path toward a JAR or a binary directory containing all javax.servlet.* -* (`JUNIT-API` *not required before the version 2.0 of the tap library*: a path toward one or several JARs or binary directories containing all classes to use JUnit.) +* (`JUNIT-API` *not required before the version 2.0 of the tap library OR if you are not interested by the `test` directory (JUnit tests)*: a path toward one or several JARs or binary directories containing all classes to use JUnit.) diff --git a/buildADQL.xml b/buildADQL.xml index 398c5020287719612361c47c6765f43702037bfb..26de09d72c099ece8b75272734779d37d322fcdf 100644 --- a/buildADQL.xml +++ b/buildADQL.xml @@ -2,7 +2,7 @@ - + @@ -11,7 +11,7 @@ - + @@ -22,6 +22,33 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + ADQL LIBRARY VERSION = ${version} @@ -30,7 +57,24 @@ - + + + + + + + + + + + + + + + + + + @@ -41,9 +85,11 @@ - + - + + + @@ -63,7 +109,9 @@ - + + + @@ -76,7 +124,7 @@ - + @@ -85,4 +133,4 @@ - \ No newline at end of file + diff --git a/buildTAP.xml b/buildTAP.xml index 4ad484e22698cc89b029cce743893a7a1d54f8e7..c8af0a6516c6bf0894bad378e9cbc749c0fab2aa 100644 --- a/buildTAP.xml +++ b/buildTAP.xml @@ -2,39 +2,68 @@ - + + - - + + + + + - - - + + + - - + + + + + + + + + + + + + + + + + + - - + + + + + + + + + + + TAP LIBRARY VERSION = ${version} @@ -43,7 +72,34 @@ - + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -52,9 +108,9 @@ - + - + @@ -62,14 +118,26 @@ Generate the library: - + + + + + + + - - + + + + + + + + @@ -83,7 +151,7 @@ - + @@ -93,4 +161,4 @@ - \ No newline at end of file + diff --git a/buildUWS.xml b/buildUWS.xml index 88a8dedd0e71e6edb10a2e9efe22efb3678c09f2..d6ddd7f9bdb01ff997106ad8b4b90829498543ee 100644 --- a/buildUWS.xml +++ b/buildUWS.xml @@ -2,35 +2,47 @@ - + + + + + - - + + + - - - + + + - + + - + + + + + + + UWS LIBRARY VERSION = ${version} @@ -39,7 +51,24 @@ - + + + + + + + + + + + + + + + + + + @@ -48,21 +77,27 @@ - + - + Generate the library: - + + + + - + + + + @@ -75,7 +110,7 @@ - + @@ -85,4 +120,4 @@ - \ No newline at end of file + diff --git a/lib/cos-1.5beta.jar b/lib/cos-1.5beta.jar new file mode 100644 index 0000000000000000000000000000000000000000..c30b6238dd5372f873e46acc19273a1d3394edb9 Binary files /dev/null and b/lib/cos-1.5beta.jar differ diff --git a/lib/uploadUtilsSrc/cos.jar b/lib/cos.jar similarity index 100% rename from lib/uploadUtilsSrc/cos.jar rename to lib/cos.jar diff --git a/lib/stil_3.0-11.jar b/lib/stil_3.0-11.jar new file mode 100644 index 0000000000000000000000000000000000000000..1c7d53a179b8dd7c4d50f91a028d533e4cf59aa6 Binary files /dev/null and b/lib/stil_3.0-11.jar differ diff --git a/lib/uploadUtils.jar b/lib/uploadUtils.jar deleted file mode 100644 index 074638e30fbd33bca8d7bf6593d0920dbcc91fdd..0000000000000000000000000000000000000000 Binary files a/lib/uploadUtils.jar and /dev/null differ diff --git a/lib/uploadUtilsSrc/binarySavot.jar b/lib/uploadUtilsSrc/binarySavot.jar deleted file mode 100644 index 175bfaf7b60d030f0600a80f826bbf338c8c19c3..0000000000000000000000000000000000000000 Binary files a/lib/uploadUtilsSrc/binarySavot.jar and /dev/null differ diff --git a/lib/uploadUtilsSrc/buildJar.xml b/lib/uploadUtilsSrc/buildJar.xml deleted file mode 100644 index de5beb64495e00d108e5252a31d495d2846cb7df..0000000000000000000000000000000000000000 --- a/lib/uploadUtilsSrc/buildJar.xml +++ /dev/null @@ -1,23 +0,0 @@ - - - - - - - - - - - - Création du jar (${jarDest}/${jarFile})... - - - - - - - - - - - \ No newline at end of file diff --git a/lib/uploadUtilsSrc/cds.savot.common.jar b/lib/uploadUtilsSrc/cds.savot.common.jar deleted file mode 100644 index 9966915834caa384e344a8668a5f0f2dac02be09..0000000000000000000000000000000000000000 Binary files a/lib/uploadUtilsSrc/cds.savot.common.jar and /dev/null differ diff --git a/lib/uploadUtilsSrc/cds.savot.model.jar b/lib/uploadUtilsSrc/cds.savot.model.jar deleted file mode 100644 index 00aac3bb37db258347bec5768027c82a70e90ff3..0000000000000000000000000000000000000000 Binary files a/lib/uploadUtilsSrc/cds.savot.model.jar and /dev/null differ diff --git a/lib/uploadUtilsSrc/cds.savot.pull.jar b/lib/uploadUtilsSrc/cds.savot.pull.jar deleted file mode 100644 index fb8dc22d327c808110a07324a3bd8beee9643560..0000000000000000000000000000000000000000 Binary files a/lib/uploadUtilsSrc/cds.savot.pull.jar and /dev/null differ diff --git a/lib/uploadUtilsSrc/cds.savot.writer.jar b/lib/uploadUtilsSrc/cds.savot.writer.jar deleted file mode 100644 index 92113cdb854ccfa625a7f70f37975fdc5b5feb73..0000000000000000000000000000000000000000 Binary files a/lib/uploadUtilsSrc/cds.savot.writer.jar and /dev/null differ diff --git a/lib/uploadUtilsSrc/kxml2-min.jar b/lib/uploadUtilsSrc/kxml2-min.jar deleted file mode 100644 index a77dd1d0063db3d18e1a81cb5a895c45531eccc5..0000000000000000000000000000000000000000 Binary files a/lib/uploadUtilsSrc/kxml2-min.jar and /dev/null differ diff --git a/src/adql/db/DBChecker.java b/src/adql/db/DBChecker.java index 62db0555d7741ee15f21ee101c61dbcffd0e43ff..5aeefc40a8375461df296db9e9205bba71c61db6 100644 --- a/src/adql/db/DBChecker.java +++ b/src/adql/db/DBChecker.java @@ -16,17 +16,25 @@ package adql.db; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2011,2013-2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2011,2013-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; +import java.util.Map; import java.util.Stack; +import adql.db.STCS.CoordSys; +import adql.db.STCS.Region; +import adql.db.STCS.RegionType; import adql.db.exception.UnresolvedColumnException; +import adql.db.exception.UnresolvedFunctionException; import adql.db.exception.UnresolvedIdentifiersException; import adql.db.exception.UnresolvedTableException; import adql.parser.ParseException; @@ -42,14 +50,38 @@ import adql.query.SelectItem; import adql.query.from.ADQLTable; import adql.query.from.FromContent; import adql.query.operand.ADQLColumn; +import adql.query.operand.ADQLOperand; +import adql.query.operand.StringConstant; +import adql.query.operand.UnknownType; +import adql.query.operand.function.ADQLFunction; +import adql.query.operand.function.DefaultUDF; +import adql.query.operand.function.UserDefinedFunction; +import adql.query.operand.function.geometry.BoxFunction; +import adql.query.operand.function.geometry.CircleFunction; +import adql.query.operand.function.geometry.GeometryFunction; +import adql.query.operand.function.geometry.PointFunction; +import adql.query.operand.function.geometry.PolygonFunction; +import adql.query.operand.function.geometry.RegionFunction; import adql.search.ISearchHandler; import adql.search.SearchColumnHandler; +import adql.search.SimpleReplaceHandler; import adql.search.SimpleSearchHandler; /** + * This {@link QueryChecker} implementation is able to do the following verifications on an ADQL query: + *
    + *
  1. Check the existence of all table and column references found in a query
  2. + *
  3. Resolve all unknown functions as supported User Defined Functions (UDFs)
  4. + *
  5. Check whether all used geometrical functions are supported
  6. + *
  7. Check whether all used coordinate systems are supported
  8. + *
  9. Check that types of columns and UDFs match with their context
  10. + *
+ * + *

Check tables and columns

*

- * Checks the existence of tables and columns, but also adds database metadata - * on {@link ADQLTable} and {@link ADQLColumn} instances when they are resolved. + * In addition to check the existence of tables and columns referenced in the query, + * this checked will also attach database metadata on these references ({@link ADQLTable} + * and {@link ADQLColumn} instances when they are resolved. *

* *

These information are:

@@ -59,35 +91,242 @@ import adql.search.SimpleSearchHandler; * * *

Note: - * Knowing DB metadata of {@link ADQLTable} and {@link ADQLColumn} is particularly useful for the translation of the ADQL query to SQL, because the ADQL name of columns and tables - * can be replaced in SQL by their DB name, if different. This mapping is done automatically by {@link adql.translator.PostgreSQLTranslator}. + * Knowing DB metadata of {@link ADQLTable} and {@link ADQLColumn} is particularly useful for the translation of the ADQL query to SQL, + * because the ADQL name of columns and tables can be replaced in SQL by their DB name, if different. This mapping is done automatically + * by {@link adql.translator.JDBCTranslator}. *

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.2 (04/2014) + * @version 1.3 (05/2015) */ public class DBChecker implements QueryChecker { /** List of all available tables ({@link DBTable}). */ protected SearchTableList lstTables; + /**

List of all allowed geometrical functions (i.e. CONTAINS, REGION, POINT, COORD2, ...).

+ *

+ * If this list is NULL, all geometrical functions are allowed. + * However, if not, all items of this list must be the only allowed geometrical functions. + * So, if the list is empty, no such function is allowed. + *

+ * @since 1.3 */ + protected String[] allowedGeo = null; + + /**

List of all allowed coordinate systems.

+ *

+ * Each item of this list must be of the form: "{frame} {refpos} {flavor}". + * Each of these 3 items can be either of value, a list of values expressed with the syntax "({value1}|{value2}|...)" + * or a '*' to mean all possible values. + *

+ *

Note: since a default value (corresponding to the empty string - '') should always be possible for each part of a coordinate system, + * the checker will always add the default value (UNKNOWNFRAME, UNKNOWNREFPOS or SPHERICAL2) into the given list of possible values for each coord. sys. part.

+ *

+ * If this list is NULL, all coordinates systems are allowed. + * However, if not, all items of this list must be the only allowed coordinate systems. + * So, if the list is empty, none is allowed. + *

+ * @since 1.3 */ + protected String[] allowedCoordSys = null; + + /**

A regular expression built using the list of allowed coordinate systems. + * With this regex, it is possible to known whether a coordinate system expression is allowed or not.

+ *

If NULL, all coordinate systems are allowed.

+ * @since 1.3 */ + protected String coordSysRegExp = null; + + /**

List of all allowed User Defined Functions (UDFs).

+ *

+ * If this list is NULL, any encountered UDF will be allowed. + * However, if not, all items of this list must be the only allowed UDFs. + * So, if the list is empty, no UDF is allowed. + *

+ * @since 1.3 */ + protected FunctionDef[] allowedUdfs = null; + /* ************ */ /* CONSTRUCTORS */ /* ************ */ /** - * Builds a {@link DBChecker} with an empty list of tables. + *

Builds a {@link DBChecker} with an empty list of tables.

+ * + *

Verifications done by this object after creation:

+ * */ public DBChecker(){ - lstTables = new SearchTableList(); + this(null, null); } /** - * Builds a {@link DBChecker} with the given list of tables. + *

Builds a {@link DBChecker} with the given list of known tables.

+ * + *

Verifications done by this object after creation:

+ * * * @param tables List of all available tables. */ - public DBChecker(final Collection tables){ + public DBChecker(final Collection tables){ + this(tables, null); + } + + /** + *

Builds a {@link DBChecker} with the given list of known tables and with a restricted list of user defined functions.

+ * + *

Verifications done by this object after creation:

+ * + * + * @param tables List of all available tables. + * @param allowedUdfs List of all allowed user defined functions. + * If NULL, no verification will be done (and so, all UDFs are allowed). + * If empty list, no "unknown" (or UDF) is allowed. + * Note: match with items of this list are done case insensitively. + * + * @since 1.3 + */ + public DBChecker(final Collection tables, final Collection allowedUdfs){ + // Sort and store the given tables: setTables(tables); + + Object[] tmp; + int cnt; + + // Store all allowed UDFs in a sorted array: + if (allowedUdfs != null){ + // Remove all NULL and empty strings: + tmp = new FunctionDef[allowedUdfs.size()]; + cnt = 0; + for(FunctionDef udf : allowedUdfs){ + if (udf != null && udf.name.trim().length() > 0) + tmp[cnt++] = udf; + } + // make a copy of the array: + this.allowedUdfs = new FunctionDef[cnt]; + System.arraycopy(tmp, 0, this.allowedUdfs, 0, cnt); + + tmp = null; + // sort the values: + Arrays.sort(this.allowedUdfs); + } + } + + /** + *

Builds a {@link DBChecker} with the given list of known tables and with a restricted list of user defined functions.

+ * + *

Verifications done by this object after creation:

+ * + * + * @param tables List of all available tables. + * @param allowedGeoFcts List of all allowed geometrical functions (i.e. CONTAINS, POINT, UNION, CIRCLE, COORD1). + * If NULL, no verification will be done (and so, all geometries are allowed). + * If empty list, no geometry function is allowed. + * Note: match with items of this list are done case insensitively. + * @param allowedCoordSys List of all allowed coordinate system patterns. The syntax of a such pattern is the following: + * "{frame} {refpos} {flavor}" ; on the contrary to a coordinate system expression, here no part is optional. + * Each part of this pattern can be one the possible values (case insensitive), a list of possible values + * expressed with the syntax "({value1}|{value2}|...)", or a '*' for any valid value. + * For instance: "ICRS (GEOCENTER|heliocenter) *". + * If the given list is NULL, no verification will be done (and so, all coordinate systems are allowed). + * If it is empty, no coordinate system is allowed (except the default values - generally expressed by an empty string: ''). + * + * @since 1.3 + */ + public DBChecker(final Collection tables, final Collection allowedGeoFcts, final Collection allowedCoordSys) throws ParseException{ + this(tables, null, allowedGeoFcts, allowedCoordSys); + } + + /** + *

Builds a {@link DBChecker}.

+ * + *

Verifications done by this object after creation:

+ *
    + *
  • Existence of tables and columns: OK
  • + *
  • Existence of User Defined Functions (UDFs): OK
  • + *
  • Support of geometrical functions: OK
  • + *
  • Support of coordinate systems: OK
  • + *
+ * + * @param tables List of all available tables. + * @param allowedUdfs List of all allowed user defined functions. + * If NULL, no verification will be done (and so, all UDFs are allowed). + * If empty list, no "unknown" (or UDF) is allowed. + * Note: match with items of this list are done case insensitively. + * @param allowedGeoFcts List of all allowed geometrical functions (i.e. CONTAINS, POINT, UNION, CIRCLE, COORD1). + * If NULL, no verification will be done (and so, all geometries are allowed). + * If empty list, no geometry function is allowed. + * Note: match with items of this list are done case insensitively. + * @param allowedCoordSys List of all allowed coordinate system patterns. The syntax of a such pattern is the following: + * "{frame} {refpos} {flavor}" ; on the contrary to a coordinate system expression, here no part is optional. + * Each part of this pattern can be one the possible values (case insensitive), a list of possible values + * expressed with the syntax "({value1}|{value2}|...)", or a '*' for any valid value. + * For instance: "ICRS (GEOCENTER|heliocenter) *". + * If the given list is NULL, no verification will be done (and so, all coordinate systems are allowed). + * If it is empty, no coordinate system is allowed (except the default values - generally expressed by an empty string: ''). + * + * @since 1.3 + */ + public DBChecker(final Collection tables, final Collection allowedUdfs, final Collection allowedGeoFcts, final Collection allowedCoordSys) throws ParseException{ + // Set the list of available tables + Set the list of all known UDFs: + this(tables, allowedUdfs); + + // Set the list of allowed geometrical functions: + allowedGeo = specialSort(allowedGeoFcts); + + // Set the list of allowed coordinate systems: + this.allowedCoordSys = specialSort(allowedCoordSys); + coordSysRegExp = STCS.buildCoordSysRegExp(this.allowedCoordSys); + } + + /** + * Transform the given collection of string elements in a sorted array. + * Only non-NULL and non-empty strings are kept. + * + * @param items Items to copy and sort. + * + * @return A sorted array containing all - except NULL and empty strings - items of the given collection. + * + * @since 1.3 + */ + protected final static String[] specialSort(final Collection items){ + // Nothing to do if the array is NULL: + if (items == null) + return null; + + // Keep only valid items (not NULL and not empty string): + String[] tmp = new String[items.size()]; + int cnt = 0; + for(String item : items){ + if (item != null && item.trim().length() > 0) + tmp[cnt++] = item; + } + + // Make an adjusted array copy: + String[] copy = new String[cnt]; + System.arraycopy(tmp, 0, copy, 0, cnt); + + // Sort the values: + Arrays.sort(copy); + + return copy; } /* ****** */ @@ -98,12 +337,12 @@ public class DBChecker implements QueryChecker { * *

Note: * Only if the given collection is NOT an instance of {@link SearchTableList}, - * the collection will be copied inside a new {@link SearchTableList}. + * the collection will be copied inside a new {@link SearchTableList}, otherwise it is used as provided. *

* * @param tables List of {@link DBTable}s. */ - public final void setTables(final Collection tables){ + public final void setTables(final Collection tables){ if (tables == null) lstTables = new SearchTableList(); else if (tables instanceof SearchTableList) @@ -130,63 +369,136 @@ public class DBChecker implements QueryChecker { * @see #check(ADQLQuery, Stack) */ @Override - public void check(final ADQLQuery query) throws ParseException{ + public final void check(final ADQLQuery query) throws ParseException{ check(query, null); } /** - * Followed algorithm: - *
-	 * Map<DBTable,ADQLTable> mapTables;
-	 * 
-	 * For each ADQLTable t
-	 * 	if (t.isSubQuery())
-	 * 		dbTable = generateDBTable(t.getSubQuery, t.getAlias());
-	 * 	else
-	 * 		dbTable = resolveTable(t);
-	 * 	t.setDBLink(dbTable);
-	 * 	dbTables.put(t, dbTable);
-	 * End
-	 * 
-	 * For each SelectAllColumns c
-	 * 	table = c.getAdqlTable();
-	 * 	if (table != null){
-	 * 		dbTable = resolveTable(table);
-	 * 		if (dbTable == null)
-	 * 			dbTable = query.getFrom().getTablesByAlias(table.getTableName(), table.isCaseSensitive(IdentifierField.TABLE));
-	 *		if (dbTable == null)
-	 *			throw new UnresolvedTableException(table);
-	 * 		table.setDBLink(dbTable);
-	 * 	}
-	 * End
-	 * 
-	 * SearchColumnList list = query.getFrom().getDBColumns();
-	 * 
-	 * For each ADQLColumn c
-	 * 	dbColumn = resolveColumn(c, list);
-	 * 	c.setDBLink(dbColumn);
-	 * 	c.setAdqlTable(mapTables.get(dbColumn.getTable()));
-	 * End
-	 * 
-	 * For each ColumnReference colRef
-	 *	checkColumnReference(colRef, query.getSelect(), list);
-	 * End
-	 * 
+ *

Process several (semantic) verifications in the given ADQL query.

+ * + *

Main verifications done in this function:

+ *
    + *
  1. Existence of DB items (tables and columns)
  2. + *
  3. Semantic verification of sub-queries
  4. + *
  5. Support of every encountered User Defined Functions (UDFs - functions unknown by the syntactic parser)
  6. + *
  7. Support of every encountered geometries (functions, coordinate systems and STC-S expressions)
  8. + *
  9. Consistency of types still unknown (because the syntactic parser could not yet resolve them)
  10. + *
* * @param query The query to check. - * @param fathersList List of all columns available in the father query. + * @param fathersList List of all columns available in the father queries and that should be accessed in sub-queries. + * Each item of this stack is a list of columns available in each father-level query. + * Note: this parameter is NULL if this function is called with the root/father query as parameter. * - * @throws UnresolvedIdentifiersException An {@link UnresolvedIdentifiersException} if some tables or columns can not be resolved. + * @throws UnresolvedIdentifiersException An {@link UnresolvedIdentifiersException} if one or several of the above listed tests have detected + * some semantic errors (i.e. unresolved table, columns, function). * * @since 1.2 * - * @see #resolveTable(ADQLTable) - * @see #generateDBTable(ADQLQuery, String) - * @see #resolveColumn(ADQLColumn, SearchColumnList, Stack) - * @see #checkColumnReference(ColumnReference, ClauseSelect, SearchColumnList) + * @see #checkDBItems(ADQLQuery, Stack, UnresolvedIdentifiersException) + * @see #checkSubQueries(ADQLQuery, Stack, SearchColumnList, UnresolvedIdentifiersException) + * @see #checkUDFs(ADQLQuery, UnresolvedIdentifiersException) + * @see #checkGeometries(ADQLQuery, UnresolvedIdentifiersException) + * @see #checkTypes(ADQLQuery, UnresolvedIdentifiersException) */ - protected void check(final ADQLQuery query, Stack fathersList) throws UnresolvedIdentifiersException{ + protected void check(final ADQLQuery query, final Stack fathersList) throws UnresolvedIdentifiersException{ UnresolvedIdentifiersException errors = new UnresolvedIdentifiersException(); + + // A. Check DB items (tables and columns): + SearchColumnList availableColumns = checkDBItems(query, fathersList, errors); + + // B. Check UDFs: + if (allowedUdfs != null) + checkUDFs(query, errors); + + // C. Check geometries: + checkGeometries(query, errors); + + // D. Check types: + checkTypes(query, errors); + + // E. Check sub-queries: + checkSubQueries(query, fathersList, availableColumns, errors); + + // Throw all errors, if any: + if (errors.getNbErrors() > 0) + throw errors; + } + + /* ************************************************ */ + /* CHECKING METHODS FOR DB ITEMS (TABLES & COLUMNS) */ + /* ************************************************ */ + + /** + *

Check DB items (tables and columns) used in the given ADQL query.

+ * + *

Operations done in this function:

+ *
    + *
  1. Resolve all found tables
  2. + *
  3. Get the whole list of all available columns Note: this list is returned by this function.
  4. + *
  5. Resolve all found columns
  6. + *
+ * + * @param query Query in which the existence of DB items must be checked. + * @param fathersList List of all columns available in the father queries and that should be accessed in sub-queries. + * Each item of this stack is a list of columns available in each father-level query. + * Note: this parameter is NULL if this function is called with the root/father query as parameter. + * @param errors List of errors to complete in this function each time an unknown table or column is encountered. + * + * @return List of all columns available in the given query. + * + * @see #resolveTables(ADQLQuery, Stack, UnresolvedIdentifiersException) + * @see FromContent#getDBColumns() + * @see #resolveColumns(ADQLQuery, Stack, Map, SearchColumnList, UnresolvedIdentifiersException) + * + * @since 1.3 + */ + protected SearchColumnList checkDBItems(final ADQLQuery query, final Stack fathersList, final UnresolvedIdentifiersException errors){ + // a. Resolve all tables: + Map mapTables = resolveTables(query, fathersList, errors); + + // b. Get the list of all columns made available in the clause FROM: + SearchColumnList availableColumns; + try{ + availableColumns = query.getFrom().getDBColumns(); + }catch(ParseException pe){ + errors.addException(pe); + availableColumns = new SearchColumnList(); + } + + // c. Resolve all columns: + resolveColumns(query, fathersList, mapTables, availableColumns, errors); + + return availableColumns; + } + + /** + *

Search all table references inside the given query, resolve them against the available tables, and if there is only one match, + * attach the matching metadata to them.

+ * + * Management of sub-query tables + *

+ * If a table is not a DB table reference but a sub-query, this latter is first checked (using {@link #check(ADQLQuery, Stack)} ; + * but the father list must not contain tables of the given query, because on the same level) and then corresponding table metadata + * are generated (using {@link #generateDBTable(ADQLQuery, String)}) and attached to it. + *

+ * + * Management of "{table}.*" in the SELECT clause + *

+ * For each of this SELECT item, this function tries to resolve the table name. If only one match is found, the corresponding ADQL table object + * is got from the list of resolved tables and attached to this SELECT item (thus, the joker item will also have the good metadata, + * particularly if the referenced table is a sub-query). + *

+ * + * @param query Query in which the existence of tables must be checked. + * @param fathersList List of all columns available in the father queries and that should be accessed in sub-queries. + * Each item of this stack is a list of columns available in each father-level query. + * Note: this parameter is NULL if this function is called with the root/father query as parameter. + * @param errors List of errors to complete in this function each time an unknown table or column is encountered. + * + * @return An associative map of all the resolved tables. + */ + protected Map resolveTables(final ADQLQuery query, final Stack fathersList, final UnresolvedIdentifiersException errors){ HashMap mapTables = new HashMap(); ISearchHandler sHandler; @@ -200,14 +512,14 @@ public class DBChecker implements QueryChecker { // resolve the table: DBTable dbTable = null; if (table.isSubQuery()){ - // check the subquery tables: + // check the sub-query tables: check(table.getSubQuery(), fathersList); // generate its DBTable: dbTable = generateDBTable(table.getSubQuery(), table.getAlias()); }else{ dbTable = resolveTable(table); if (table.hasAlias()) - dbTable = dbTable.copy(dbTable.getDBName(), table.getAlias()); + dbTable = dbTable.copy(null, table.getAlias()); } // link with the matched DBTable: @@ -220,9 +532,9 @@ public class DBChecker implements QueryChecker { // Attach table information on wildcards with the syntax "{tableName}.*" of the SELECT clause: /* Note: no need to check the table name among the father tables, because there is - * no interest to select a father column in a subquery - * (which can return only one column ; besides, no aggregate is not allowed - * in subqueries).*/ + * no interest to select a father column in a sub-query + * (which can return only one column ; besides, no aggregate is allowed + * in sub-queries).*/ sHandler = new SearchWildCardHandler(); sHandler.search(query.getSelect()); for(ADQLObject result : sHandler){ @@ -230,31 +542,77 @@ public class DBChecker implements QueryChecker { SelectAllColumns wildcard = (SelectAllColumns)result; ADQLTable table = wildcard.getAdqlTable(); DBTable dbTable = null; - // First, try to resolve the table by table alias: + + // first, try to resolve the table by table alias: if (table.getTableName() != null && table.getSchemaName() == null){ ArrayList tables = query.getFrom().getTablesByAlias(table.getTableName(), table.isCaseSensitive(IdentifierField.TABLE)); if (tables.size() == 1) dbTable = tables.get(0).getDBLink(); } - // Then try to resolve the table reference by table name: + + // then try to resolve the table reference by table name: if (dbTable == null) dbTable = resolveTable(table); - // table.setDBLink(dbTable); + // set the corresponding tables among the list of resolved tables: wildcard.setAdqlTable(mapTables.get(dbTable)); }catch(ParseException pe){ errors.addException(pe); } } - // Get the list of all columns made available in the clause FROM: - SearchColumnList list; - try{ - list = query.getFrom().getDBColumns(); - }catch(ParseException pe){ - errors.addException(pe); - list = new SearchColumnList(); - } + return mapTables; + } + + /** + * Resolve the given table, that's to say search for the corresponding {@link DBTable}. + * + * @param table The table to resolve. + * + * @return The corresponding {@link DBTable} if found, null otherwise. + * + * @throws ParseException An {@link UnresolvedTableException} if the given table can't be resolved. + */ + protected DBTable resolveTable(final ADQLTable table) throws ParseException{ + ArrayList tables = lstTables.search(table); + + // good if only one table has been found: + if (tables.size() == 1) + return tables.get(0); + // but if more than one: ambiguous table name ! + else if (tables.size() > 1) + throw new UnresolvedTableException(table, tables.get(0).getADQLSchemaName() + "." + tables.get(0).getADQLName(), tables.get(1).getADQLSchemaName() + "." + tables.get(1).getADQLName()); + // otherwise (no match): unknown table ! + else + throw new UnresolvedTableException(table); + } + + /** + *

Search all column references inside the given query, resolve them thanks to the given tables' metadata, + * and if there is only one match, attach the matching metadata to them.

+ * + * Management of selected columns' references + *

+ * A column reference is not only a direct reference to a table column using a column name. + * It can also be a reference to an item of the SELECT clause (which will then call a "selected column"). + * That kind of reference can be either an index (an unsigned integer starting from 1 to N, where N is the + * number selected columns), or the name/alias of the column. + *

+ *

+ * These references are also checked, in a second step, in this function. Thus, column metadata are + * also attached to them, as common columns. + *

+ * + * @param query Query in which the existence of tables must be checked. + * @param fathersList List of all columns available in the father queries and that should be accessed in sub-queries. + * Each item of this stack is a list of columns available in each father-level query. + * Note: this parameter is NULL if this function is called with the root/father query as parameter. + * @param mapTables List of all resolved tables. + * @param list List of column metadata to complete in this function each time a column reference is resolved. + * @param errors List of errors to complete in this function each time an unknown table or column is encountered. + */ + protected void resolveColumns(final ADQLQuery query, final Stack fathersList, final Map mapTables, final SearchColumnList list, final UnresolvedIdentifiersException errors){ + ISearchHandler sHandler; // Check the existence of all columns: sHandler = new SearchColumnHandler(); @@ -272,7 +630,7 @@ public class DBChecker implements QueryChecker { } } - // Check the correctness of all column references: + // Check the correctness of all column references (= references to selected columns): /* Note: no need to provide the father tables when resolving column references, * because no father column can be used in ORDER BY and/or GROUP BY. */ sHandler = new SearchColReferenceHandler(); @@ -291,70 +649,22 @@ public class DBChecker implements QueryChecker { errors.addException(pe); } } - - // Check subqueries outside the clause FROM: - sHandler = new SearchSubQueryHandler(); - sHandler.search(query); - if (sHandler.getNbMatch() > 0){ - - // Push the list of columns in the father columns stack: - if (fathersList == null) - fathersList = new Stack(); - fathersList.push(list); - - // Check each found subquery (except the first one because it is the current query): - for(ADQLObject result : sHandler){ - try{ - check((ADQLQuery)result, fathersList); - }catch(UnresolvedIdentifiersException uie){ - Iterator itPe = uie.getErrors(); - while(itPe.hasNext()) - errors.addException(itPe.next()); - } - } - - // Pop the list of columns from the father columns stack: - fathersList.pop(); - - } - - // Throw all errors if any: - if (errors.getNbErrors() > 0) - throw errors; } /** - * Resolves the given table, that's to say searches for the corresponding {@link DBTable}. - * - * @param table The table to resolve. + *

Resolve the given column, that's to say search for the corresponding {@link DBColumn}.

* - * @return The corresponding {@link DBTable} if found, null otherwise. - * - * @throws ParseException An {@link UnresolvedTableException} if the given table can't be resolved. - */ - protected DBTable resolveTable(final ADQLTable table) throws ParseException{ - ArrayList tables = lstTables.search(table); - - // good if only one table has been found: - if (tables.size() == 1) - return tables.get(0); - // but if more than one: ambiguous table name ! - else if (tables.size() > 1) - throw new UnresolvedTableException(table, tables.get(0).getADQLSchemaName() + "." + tables.get(0).getADQLName(), tables.get(1).getADQLSchemaName() + "." + tables.get(1).getADQLName()); - // otherwise (no match): unknown table ! - else - throw new UnresolvedTableException(table); - } - - /** - *

Resolves the given column, that's to say searches for the corresponding {@link DBColumn}.

- *

The third parameter is used only if this function is called inside a subquery. In this case, - * column is tried to be resolved with the first list (dbColumns). If no match is found, - * the resolution is tried with the father columns list (fatherColumns).

+ *

+ * The third parameter is used only if this function is called inside a sub-query. In this case, + * the column is tried to be resolved with the first list (dbColumns). If no match is found, + * the resolution is tried with the father columns list (fathersList). + *

* * @param column The column to resolve. * @param dbColumns List of all available {@link DBColumn}s. - * @param fathersList List of all columns available in the father query ; a list for each father-level. + * @param fathersList List of all columns available in the father queries and that should be accessed in sub-queries. + * Each item of this stack is a list of columns available in each father-level query. + * Note: this parameter is NULL if this function is called with the root/father query as parameter. * * @return The corresponding {@link DBColumn} if found. Otherwise an exception is thrown. * @@ -386,14 +696,14 @@ public class DBChecker implements QueryChecker { } /** - * Checks whether the given column reference corresponds to a selected item (column or an expression with an alias) + * Check whether the given column reference corresponds to a selected item (column or an expression with an alias) * or to an existing column. * - * @param colRef The column reference which must be checked. - * @param select The SELECT clause of the ADQL query. - * @param dbColumns The list of all available {@link DBColumn}s. + * @param colRef The column reference which must be checked. + * @param select The SELECT clause of the ADQL query. + * @param dbColumns The list of all available columns. * - * @return The corresponding {@link DBColumn} if this reference is actually the name of a column, null otherwise. + * @return The corresponding {@link DBColumn} if this reference is actually the name of a column, null otherwise. * * @throws ParseException An {@link UnresolvedColumnException} if the given column can't be resolved * or an {@link UnresolvedTableException} if its table reference can't be resolved. @@ -431,19 +741,16 @@ public class DBChecker implements QueryChecker { } } - /* ************************************* */ - /* DBTABLE & DBCOLUMN GENERATION METHODS */ - /* ************************************* */ /** - * Generates a {@link DBTable} corresponding to the given sub-query with the given table name. - * This {@link DBTable} which contains all {@link DBColumn} returned by {@link ADQLQuery#getResultingColumns()}. + * Generate a {@link DBTable} corresponding to the given sub-query with the given table name. + * This {@link DBTable} will contain all {@link DBColumn} returned by {@link ADQLQuery#getResultingColumns()}. * * @param subQuery Sub-query in which the specified table must be searched. * @param tableName Name of the table to search. * - * @return The corresponding {@link DBTable} if the table has been found in the given sub-query, null otherwise. + * @return The corresponding {@link DBTable} if the table has been found in the given sub-query, null otherwise. * - * @throws ParseException Can be used to explain why the table has not been found. + * @throws ParseException Can be used to explain why the table has not been found. Note: not used by default. */ public static DBTable generateDBTable(final ADQLQuery subQuery, final String tableName) throws ParseException{ DefaultDBTable dbTable = new DefaultDBTable(tableName); @@ -455,6 +762,473 @@ public class DBChecker implements QueryChecker { return dbTable; } + /* ************************* */ + /* CHECKING METHODS FOR UDFs */ + /* ************************* */ + + /** + *

Search all UDFs (User Defined Functions) inside the given query, and then + * check their signature against the list of allowed UDFs.

+ * + *

Note: + * When more than one allowed function match, the function is considered as correct + * and no error is added. + * However, in case of multiple matches, the return type of matching functions could + * be different and in this case, there would be an error while checking later + * the types. In such case, throwing an error could make sense, but the user would + * then need to cast some parameters to help the parser identifying the right function. + * But the type-casting ability is not yet possible in ADQL. + *

+ * + * @param query Query in which UDFs must be checked. + * @param errors List of errors to complete in this function each time a UDF does not match to any of the allowed UDFs. + * + * @since 1.3 + */ + protected void checkUDFs(final ADQLQuery query, final UnresolvedIdentifiersException errors){ + // 1. Search all UDFs: + ISearchHandler sHandler = new SearchUDFHandler(); + sHandler.search(query); + + // If no UDF are allowed, throw immediately an error: + if (allowedUdfs.length == 0){ + for(ADQLObject result : sHandler) + errors.addException(new UnresolvedFunctionException((UserDefinedFunction)result)); + } + // 2. Try to resolve all of them: + else{ + ArrayList toResolveLater = new ArrayList(); + UserDefinedFunction udf; + int match; + BinarySearch binSearch = new BinarySearch(){ + @Override + protected int compare(UserDefinedFunction searchItem, FunctionDef arrayItem){ + return arrayItem.compareTo(searchItem) * -1; + } + }; + + // Try to resolve all the found UDFs: + /* Note: at this stage, it can happen that UDFs can not be yet resolved because the building of + * their signature depends of other UDFs. That's why, these special cases should be kept + * for a later resolution try. */ + for(ADQLObject result : sHandler){ + udf = (UserDefinedFunction)result; + // search for a match: + match = binSearch.search(udf, allowedUdfs); + // if no match... + if (match < 0){ + // ...if the type of all parameters is resolved, add an error (no match is possible): + if (isAllParamTypesResolved(udf)) + errors.addException(new UnresolvedFunctionException(udf)); // TODO Add the ADQLOperand position! + // ...otherwise, try to resolved it later (when other UDFs will be mostly resolved): + else + toResolveLater.add(udf); + } + // if there is a match, metadata may be attached (particularly if the function is built automatically by the syntactic parser): + else if (udf instanceof DefaultUDF) + ((DefaultUDF)udf).setDefinition(allowedUdfs[match]); + } + + // Try to resolve UDFs whose some parameter types are depending of other UDFs: + for(int i = 0; i < toResolveLater.size(); i++){ + udf = toResolveLater.get(i); + // search for a match: + match = binSearch.search(udf, allowedUdfs); + // if no match, add an error: + if (match < 0) + errors.addException(new UnresolvedFunctionException(udf)); // TODO Add the ADQLOperand position! + // otherwise, metadata may be attached (particularly if the function is built automatically by the syntactic parser): + else if (udf instanceof DefaultUDF) + ((DefaultUDF)udf).setDefinition(allowedUdfs[match]); + } + + // 3. Replace all the resolved DefaultUDF by an instance of the class associated with the set signature: + (new ReplaceDefaultUDFHandler(errors)).searchAndReplace(query); + } + } + + /** + *

Tell whether the type of all parameters of the given ADQL function + * is resolved.

+ * + *

A parameter type may not be resolved for 2 main reasons:

+ *
    + *
  • the parameter is a column, but this column has not been successfully resolved. Thus its type is still unknown.
  • + *
  • the parameter is a UDF, but this UDF has not been already resolved. Thus, as for the column, its return type is still unknown. + * But it could be known later if the UDF is resolved later ; a second try should be done afterwards.
  • + *
+ * + * @param fct ADQL function whose the parameters' type should be checked. + * + * @return true if the type of all parameters is known, false otherwise. + * + * @since 1.3 + */ + protected final boolean isAllParamTypesResolved(final ADQLFunction fct){ + for(ADQLOperand op : fct.getParameters()){ + if (op.isNumeric() == op.isString()) + return false; + } + return true; + } + + /* ************************************************************************************************* */ + /* METHODS CHECKING THE GEOMETRIES (geometrical functions, coordinate systems and STC-S expressions) */ + /* ************************************************************************************************* */ + + /** + *

Check all geometries.

+ * + *

Operations done in this function:

+ *
    + *
  1. Check that all geometrical functions are supported
  2. + *
  3. Check that all explicit (string constant) coordinate system definitions are supported
  4. + *
  5. Check all STC-S expressions (only in {@link RegionFunction} for the moment) and + * Apply the 2 previous checks on them
  6. + *
+ * + * @param query Query in which geometries must be checked. + * @param errors List of errors to complete in this function each time a geometry item is not supported. + * + * @see #resolveGeometryFunctions(ADQLQuery, BinarySearch, UnresolvedIdentifiersException) + * @see #resolveCoordinateSystems(ADQLQuery, UnresolvedIdentifiersException) + * @see #resolveSTCSExpressions(ADQLQuery, BinarySearch, UnresolvedIdentifiersException) + * + * @since 1.3 + */ + protected void checkGeometries(final ADQLQuery query, final UnresolvedIdentifiersException errors){ + BinarySearch binSearch = new BinarySearch(){ + @Override + protected int compare(String searchItem, String arrayItem){ + return searchItem.compareToIgnoreCase(arrayItem); + } + }; + + // a. Ensure that all used geometry functions are allowed: + if (allowedGeo != null) + resolveGeometryFunctions(query, binSearch, errors); + + // b. Check whether the coordinate systems are allowed: + if (allowedCoordSys != null) + resolveCoordinateSystems(query, errors); + + // c. Check all STC-S expressions (in RegionFunctions only) + the used coordinate systems (if StringConstant only): + if (allowedGeo == null || (allowedGeo.length > 0 && binSearch.search("REGION", allowedGeo) >= 0)) + resolveSTCSExpressions(query, binSearch, errors); + } + + /** + * Search for all geometrical functions and check whether they are allowed. + * + * @param query Query in which geometrical functions must be checked. + * @param errors List of errors to complete in this function each time a geometrical function is not supported. + * + * @see #checkGeometryFunction(String, ADQLFunction, BinarySearch, UnresolvedIdentifiersException) + * + * @since 1.3 + */ + protected void resolveGeometryFunctions(final ADQLQuery query, final BinarySearch binSearch, final UnresolvedIdentifiersException errors){ + ISearchHandler sHandler = new SearchGeometryHandler(); + sHandler.search(query); + + String fctName; + for(ADQLObject result : sHandler){ + fctName = result.getName(); + checkGeometryFunction(fctName, (ADQLFunction)result, binSearch, errors); + } + } + + /** + *

Check whether the specified geometrical function is allowed by this implementation.

+ * + *

Note: + * If the list of allowed geometrical functions is empty, this function will always add an errors to the given list. + * Indeed, it means that no geometrical function is allowed and so that the specified function is automatically not supported. + *

+ * + * @param fctName Name of the geometrical function to test. + * @param fct The function instance being or containing the geometrical function to check. Note: this function can be the function to test or a function embedding the function under test (i.e. RegionFunction). + * @param binSearch The object to use in order to search a function name inside the list of allowed functions. + * It is able to perform a binary search inside a sorted array of String objects. The interest of + * this object is its compare function which must be overridden and tells how to compare the item + * to search and the items of the array (basically, a non-case-sensitive comparison between 2 strings). + * @param errors List of errors to complete in this function each time a geometrical function is not supported. + * + * @since 1.3 + */ + protected void checkGeometryFunction(final String fctName, final ADQLFunction fct, final BinarySearch binSearch, final UnresolvedIdentifiersException errors){ + int match = -1; + if (allowedGeo.length != 0) + match = binSearch.search(fctName, allowedGeo); + if (match < 0) + errors.addException(new UnresolvedFunctionException("The geometrical function \"" + fctName + "\" is not available in this implementation!", fct)); + } + + /** + *

Search all explicit coordinate system declarations, check their syntax and whether they are allowed by this implementation.

+ * + *

Note: + * "explicit" means here that all {@link StringConstant} instances. Only coordinate systems expressed as string can + * be parsed and so checked. So if a coordinate system is specified by a column, no check can be done at this stage... + * it will be possible to perform such test only at the execution. + *

+ * + * @param query Query in which coordinate systems must be checked. + * @param errors List of errors to complete in this function each time a coordinate system has a wrong syntax or is not supported. + * + * @see #checkCoordinateSystem(StringConstant, UnresolvedIdentifiersException) + * + * @since 1.3 + */ + protected void resolveCoordinateSystems(final ADQLQuery query, final UnresolvedIdentifiersException errors){ + ISearchHandler sHandler = new SearchCoordSysHandler(); + sHandler.search(query); + for(ADQLObject result : sHandler) + checkCoordinateSystem((StringConstant)result, errors); + } + + /** + * Parse and then check the coordinate system contained in the given {@link StringConstant} instance. + * + * @param adqlCoordSys The {@link StringConstant} object containing the coordinate system to check. + * @param errors List of errors to complete in this function each time a coordinate system has a wrong syntax or is not supported. + * + * @see STCS#parseCoordSys(String) + * @see #checkCoordinateSystem(adql.db.STCS.CoordSys, ADQLOperand, UnresolvedIdentifiersException) + * + * @since 1.3 + */ + protected void checkCoordinateSystem(final StringConstant adqlCoordSys, final UnresolvedIdentifiersException errors){ + String coordSysStr = adqlCoordSys.getValue(); + try{ + checkCoordinateSystem(STCS.parseCoordSys(coordSysStr), adqlCoordSys, errors); + }catch(ParseException pe){ + errors.addException(new ParseException(pe.getMessage())); // TODO Missing object position! + } + } + + /** + * Check whether the given coordinate system is allowed by this implementation. + * + * @param coordSys Coordinate system to test. + * @param operand The operand representing or containing the coordinate system under test. + * @param errors List of errors to complete in this function each time a coordinate system is not supported. + * + * @since 1.3 + */ + protected void checkCoordinateSystem(final CoordSys coordSys, final ADQLOperand operand, final UnresolvedIdentifiersException errors){ + if (coordSysRegExp != null && coordSys != null && !coordSys.toFullSTCS().matches(coordSysRegExp)) + errors.addException(new ParseException("Coordinate system \"" + ((operand instanceof StringConstant) ? ((StringConstant)operand).getValue() : coordSys.toString()) + "\" (= \"" + coordSys.toFullSTCS() + "\") not allowed in this implementation.")); // TODO Missing object position! + List of accepted coordinate systems + } + + /** + *

Search all STC-S expressions inside the given query, parse them (and so check their syntax) and then determine + * whether the declared coordinate system and the expressed region are allowed in this implementation.

+ * + *

Note: + * In the current ADQL language definition, STC-S expressions can be found only as only parameter of the REGION function. + *

+ * + * @param query Query in which STC-S expressions must be checked. + * @param binSearch The object to use in order to search a region name inside the list of allowed functions/regions. + * It is able to perform a binary search inside a sorted array of String objects. The interest of + * this object is its compare function which must be overridden and tells how to compare the item + * to search and the items of the array (basically, a non-case-sensitive comparison between 2 strings). + * @param errors List of errors to complete in this function each time the STC-S syntax is wrong or each time the declared coordinate system or region is not supported. + * + * @see STCS#parseRegion(String) + * @see #checkRegion(adql.db.STCS.Region, RegionFunction, BinarySearch, UnresolvedIdentifiersException) + * + * @since 1.3 + */ + protected void resolveSTCSExpressions(final ADQLQuery query, final BinarySearch binSearch, final UnresolvedIdentifiersException errors){ + // Search REGION functions: + ISearchHandler sHandler = new SearchRegionHandler(); + sHandler.search(query); + + // Parse and check their STC-S expression: + String stcs; + Region region; + for(ADQLObject result : sHandler){ + try{ + // get the STC-S expression: + stcs = ((StringConstant)((RegionFunction)result).getParameter(0)).getValue(); + + // parse the STC-S expression (and so check the syntax): + region = STCS.parseRegion(stcs); + + // check whether the regions (this one + the possible inner ones) and the coordinate systems are allowed: + checkRegion(region, (RegionFunction)result, binSearch, errors); + }catch(ParseException pe){ + errors.addException(new ParseException(pe.getMessage())); // TODO Missing object position! + } + } + } + + /** + *

Check the given region.

+ * + *

The following points are checked in this function:

+ *
    + *
  • whether the coordinate system is allowed
  • + *
  • whether the type of region is allowed
  • + *
  • whether the inner regions are correct (here this function is called recursively on each inner region).
  • + *
+ * + * @param r The region to check. + * @param fct The REGION function containing the region to check. + * @param errors List of errors to complete in this function if the given region or its inner regions are not supported. + * + * @see #checkCoordinateSystem(adql.db.STCS.CoordSys, ADQLOperand, UnresolvedIdentifiersException) + * @see #checkGeometryFunction(String, ADQLFunction, BinarySearch, UnresolvedIdentifiersException) + * @see #checkRegion(adql.db.STCS.Region, RegionFunction, BinarySearch, UnresolvedIdentifiersException) + * + * @since 1.3 + */ + protected void checkRegion(final Region r, final RegionFunction fct, final BinarySearch binSearch, final UnresolvedIdentifiersException errors){ + if (r == null) + return; + + // Check the coordinate system (if any): + if (r.coordSys != null) + checkCoordinateSystem(r.coordSys, fct, errors); + + // Check that the region type is allowed: + if (allowedGeo != null){ + if (allowedGeo.length == 0) + errors.addException(new UnresolvedFunctionException("The region type \"" + r.type + "\" is not available in this implementation!", fct)); + else + checkGeometryFunction((r.type == RegionType.POSITION) ? "POINT" : r.type.toString(), fct, binSearch, errors); + } + + // Check all the inner regions: + if (r.regions != null){ + for(Region innerR : r.regions) + checkRegion(innerR, fct, binSearch, errors); + } + } + + /* **************************************************** */ + /* METHODS CHECKING TYPES UNKNOWN WHILE CHECKING SYNTAX */ + /* **************************************************** */ + + /** + *

Search all operands whose the type is not yet known and try to resolve it now + * and to check whether it matches the type expected by the syntactic parser.

+ * + *

+ * Only two operands may have an unresolved type: columns and user defined functions. + * Indeed, their type can be resolved only if the list of available columns and UDFs is known, + * and if columns and UDFs used in the query are resolved successfully. + *

+ * + *

+ * When an operand type is still unknown, they will own the three kinds of type and + * so this function won't raise an error: it is thus automatically on the expected type. + * This behavior is perfectly correct because if the type is not resolved + * that means the item/operand has not been resolved in the previous steps and so that + * an error about this item has already been raised. + *

+ * + *

Important note: + * This function does not check the types exactly, but just roughly by considering only three categories: + * string, numeric and geometry. + *

+ * + * @param query Query in which unknown types must be resolved and checked. + * @param errors List of errors to complete in this function each time a types does not match to the expected one. + * + * @see UnknownType + * + * @since 1.3 + */ + protected void checkTypes(final ADQLQuery query, final UnresolvedIdentifiersException errors){ + // Search all unknown types: + ISearchHandler sHandler = new SearchUnknownTypeHandler(); + sHandler.search(query); + + // Check whether their type matches the expected one: + UnknownType unknown; + for(ADQLObject result : sHandler){ + unknown = (UnknownType)result; + switch(unknown.getExpectedType()){ + case 'G': + case 'g': + if (!unknown.isGeometry()) + errors.addException(new ParseException("Type mismatch! A geometry was expected instead of \"" + unknown.toADQL() + "\".")); // TODO Add the ADQLOperand position! + break; + case 'N': + case 'n': + if (!unknown.isNumeric()) + errors.addException(new ParseException("Type mismatch! A numeric value was expected instead of \"" + unknown.toADQL() + "\".")); // TODO Add the ADQLOperand position! + break; + case 'S': + case 's': + if (!unknown.isString()) + errors.addException(new ParseException("Type mismatch! A string value was expected instead of \"" + unknown.toADQL() + "\".")); // TODO Add the ADQLOperand position! + break; + } + } + } + + /* ******************************** */ + /* METHODS CHECKING THE SUB-QUERIES */ + /* ******************************** */ + + /** + *

Search all sub-queries found in the given query but not in the clause FROM. + * These sub-queries are then checked using {@link #check(ADQLQuery, Stack)}.

+ * + * Fathers stack + *

+ * Each time a sub-query must be checked with {@link #check(ADQLQuery, Stack)}, + * the list of all columns available in each of its father queries must be provided. + * This function is composing itself this stack by adding the given list of available + * columns (= all columns resolved in the given query) at the end of the given stack. + * If this stack is given empty, then a new stack is created. + *

+ *

+ * This modification of the given stack is just the execution time of this function. + * Before returning, this function removes the last item of the stack. + *

+ * + * + * @param query Query in which sub-queries must be checked. + * @param fathersList List of all columns available in the father queries and that should be accessed in sub-queries. + * Each item of this stack is a list of columns available in each father-level query. + * Note: this parameter is NULL if this function is called with the root/father query as parameter. + * @param availableColumns List of all columns resolved in the given query. + * @param errors List of errors to complete in this function each time a semantic error is encountered. + * + * @since 1.3 + */ + protected void checkSubQueries(final ADQLQuery query, Stack fathersList, final SearchColumnList availableColumns, final UnresolvedIdentifiersException errors){ + // Check sub-queries outside the clause FROM: + ISearchHandler sHandler = new SearchSubQueryHandler(); + sHandler.search(query); + if (sHandler.getNbMatch() > 0){ + + // Push the list of columns into the father columns stack: + if (fathersList == null) + fathersList = new Stack(); + fathersList.push(availableColumns); + + // Check each found sub-query: + for(ADQLObject result : sHandler){ + try{ + check((ADQLQuery)result, fathersList); + }catch(UnresolvedIdentifiersException uie){ + Iterator itPe = uie.getErrors(); + while(itPe.hasNext()) + errors.addException(itPe.next()); + } + } + + // Pop the list of columns from the father columns stack: + fathersList.pop(); + } + } + /* *************** */ /* SEARCH HANDLERS */ /* *************** */ @@ -462,7 +1236,7 @@ public class DBChecker implements QueryChecker { * Lets searching all tables. * * @author Grégory Mantelet (CDS) - * @version 07/2011 + * @version 1.0 (07/2011) */ private static class SearchTableHandler extends SimpleSearchHandler { @Override @@ -475,7 +1249,7 @@ public class DBChecker implements QueryChecker { * Lets searching all wildcards. * * @author Grégory Mantelet (CDS) - * @version 09/2011 + * @version 1.0 (09/2011) */ private static class SearchWildCardHandler extends SimpleSearchHandler { @Override @@ -488,7 +1262,7 @@ public class DBChecker implements QueryChecker { * Lets searching column references. * * @author Grégory Mantelet (CDS) - * @version 11/2011 + * @version 1.0 (11/2011) */ private static class SearchColReferenceHandler extends SimpleSearchHandler { @Override @@ -527,4 +1301,213 @@ public class DBChecker implements QueryChecker { } } + /** + * Let searching user defined functions. + * + * @author Grégory Mantelet (ARI) + * @version 1.3 (10/2014) + * @since 1.3 + */ + private static class SearchUDFHandler extends SimpleSearchHandler { + @Override + protected boolean match(ADQLObject obj){ + return (obj instanceof UserDefinedFunction); + } + } + + /** + *

Let replacing every {@link DefaultUDF}s whose a {@link FunctionDef} is set by their corresponding {@link UserDefinedFunction} class.

+ * + *

Important note: + * If the replacer can not be created using the class returned by {@link FunctionDef#getUDFClass()}, no replacement is performed. + *

+ * + * @author Grégory Mantelet (ARI) + * @version 1.3 (02/2015) + * @since 1.3 + */ + private static class ReplaceDefaultUDFHandler extends SimpleReplaceHandler { + private final UnresolvedIdentifiersException errors; + + public ReplaceDefaultUDFHandler(final UnresolvedIdentifiersException errorsContainer){ + errors = errorsContainer; + } + + @Override + protected boolean match(ADQLObject obj){ + return (obj.getClass().getName().equals(DefaultUDF.class.getName())) && (((DefaultUDF)obj).getDefinition() != null) && (((DefaultUDF)obj).getDefinition().getUDFClass() != null); + /* Note: detection of DefaultUDF is done on the exact class name rather than using "instanceof" in order to have only direct instances of DefaultUDF, + * and not extensions of it. Indeed, DefaultUDFs are generally created automatically by the ADQLQueryFactory ; so, extensions of it can only be custom + * UserDefinedFunctions. */ + } + + @Override + protected ADQLObject getReplacer(ADQLObject objToReplace) throws UnsupportedOperationException{ + try{ + // get the associated UDF class: + Class udfClass = ((DefaultUDF)objToReplace).getDefinition().getUDFClass(); + // get the constructor with a single parameter of type ADQLOperand[]: + Constructor constructor = udfClass.getConstructor(ADQLOperand[].class); + // create a new instance of this UDF class with the operands stored in the object to replace: + return constructor.newInstance((Object)(((DefaultUDF)objToReplace).getParameters())); /* note: without this class, each item of the given array will be considered as a single parameter. */ + }catch(Exception ex){ + // IF NO INSTANCE CAN BE CREATED... + // ...keep the error for further report: + errors.addException(new UnresolvedFunctionException("Impossible to represent the function \"" + ((DefaultUDF)objToReplace).getName() + "\": the following error occured while creating this representation: \"" + ((ex instanceof InvocationTargetException) ? "[" + ex.getCause().getClass().getSimpleName() + "] " + ex.getCause().getMessage() : ex.getMessage()) + "\"", (DefaultUDF)objToReplace)); + // ...keep the same object (i.e. no replacement): + return objToReplace; + } + } + } + + /** + * Let searching geometrical functions. + * + * @author Grégory Mantelet (ARI) + * @version 1.3 (10/2014) + * @since 1.3 + */ + private static class SearchGeometryHandler extends SimpleSearchHandler { + @Override + protected boolean match(ADQLObject obj){ + return (obj instanceof GeometryFunction); + } + } + + /** + *

Let searching all ADQL objects whose the type was not known while checking the syntax of the ADQL query. + * These objects are {@link ADQLColumn}s and {@link UserDefinedFunction}s.

+ * + *

Important note: + * Only {@link UnknownType} instances having an expected type equals to 'S' (or 's' ; for string) or 'N' (or 'n' ; for numeric) + * are kept by this handler. Others are ignored. + *

+ * + * @author Grégory Mantelet (ARI) + * @version 1.3 (10/2014) + * @since 1.3 + */ + private static class SearchUnknownTypeHandler extends SimpleSearchHandler { + @Override + protected boolean match(ADQLObject obj){ + if (obj instanceof UnknownType){ + char expected = ((UnknownType)obj).getExpectedType(); + return (expected == 'G' || expected == 'g' || expected == 'S' || expected == 's' || expected == 'N' || expected == 'n'); + }else + return false; + } + } + + /** + * Let searching all explicit declaration of coordinate systems. + * So, only {@link StringConstant} objects will be returned. + * + * @author Grégory Mantelet (ARI) + * @version 1.3 (10/2014) + * @since 1.3 + */ + private static class SearchCoordSysHandler extends SimpleSearchHandler { + @Override + protected boolean match(ADQLObject obj){ + if (obj instanceof PointFunction || obj instanceof BoxFunction || obj instanceof CircleFunction || obj instanceof PolygonFunction) + return (((GeometryFunction)obj).getCoordinateSystem() instanceof StringConstant); + else + return false; + } + + @Override + protected void addMatch(ADQLObject matchObj, ADQLIterator it){ + results.add(((GeometryFunction)matchObj).getCoordinateSystem()); + } + + } + + /** + * Let searching all {@link RegionFunction}s. + * + * @author Grégory Mantelet (ARI) + * @version 1.3 (10/2014) + * @since 1.3 + */ + private static class SearchRegionHandler extends SimpleSearchHandler { + @Override + protected boolean match(ADQLObject obj){ + if (obj instanceof RegionFunction) + return (((RegionFunction)obj).getParameter(0) instanceof StringConstant); + else + return false; + } + + } + + /** + *

Implement the binary search algorithm over a sorted array.

+ * + *

+ * The only difference with the standard implementation of Java is + * that this object lets perform research with a different type + * of object than the types of array items. + *

+ * + *

+ * For that reason, the "compare" function must always be implemented. + *

+ * + * @author Grégory Mantelet (ARI) + * @version 1.3 (10/2014) + * + * @param Type of items stored in the array. + * @param Type of the item to search. + * + * @since 1.3 + */ + protected static abstract class BinarySearch< T, S > { + private int s, e, m, comp; + + /** + *

Search the given item in the given array.

+ * + *

+ * In case the given object matches to several items of the array, + * this function will return the smallest index, pointing thus to the first + * of all matches. + *

+ * + * @param searchItem Object for which a corresponding array item must be searched. + * @param array Array in which the given object must be searched. + * + * @return The array index of the first item of all matches. + */ + public int search(final S searchItem, final T[] array){ + s = 0; + e = array.length - 1; + while(s < e){ + // middle of the sorted array: + m = s + ((e - s) / 2); + // compare the fct with the middle item of the array: + comp = compare(searchItem, array[m]); + // if the fct is after, trigger the inspection of the right part of the array: + if (comp > 0) + s = m + 1; + // otherwise, the left part: + else + e = m; + } + if (s != e || compare(searchItem, array[s]) != 0) + return -1; + else + return s; + } + + /** + * Compare the search item and the array item. + * + * @param searchItem Item whose a corresponding value must be found in the array. + * @param arrayItem An item of the array. + * + * @return Negative value if searchItem is less than arrayItem, 0 if they are equals, or a positive value if searchItem is greater. + */ + protected abstract int compare(final S searchItem, final T arrayItem); + } + } diff --git a/src/adql/db/DBColumn.java b/src/adql/db/DBColumn.java index a803717e449bb7d1bf8594a3aee51a1b632eb2f9..c987e062cc161f5b20488c8e4652accc00455463 100644 --- a/src/adql/db/DBColumn.java +++ b/src/adql/db/DBColumn.java @@ -16,7 +16,8 @@ package adql.db; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2011 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2011,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ /** @@ -27,8 +28,8 @@ package adql.db; * and corresponds to a real column in the "database" with its DB name ({@link #getDBName()}). *

* - * @author Grégory Mantelet (CDS) - * @version 08/2011 + * @author Grégory Mantelet (CDS;ARI) + * @version 1.3 (10/2014) */ public interface DBColumn { @@ -46,6 +47,19 @@ public interface DBColumn { */ public String getDBName(); + /** + *

Get the type of this column (as closed as possible from the "database" type).

+ * + *

Note: + * The returned type should be as closed as possible from a type listed by the IVOA in the TAP protocol description into the section UPLOAD. + *

+ * + * @return Its type. + * + * @since 1.3 + */ + public DBType getDatatype(); + /** * Gets the table which contains this {@link DBColumn}. * diff --git a/src/adql/db/DBCommonColumn.java b/src/adql/db/DBCommonColumn.java index fbbc73deaa2bf277b3a55ad77e25208f6dbb4274..44c6642ad34a01ddf654ec1d99c018136650d3d5 100644 --- a/src/adql/db/DBCommonColumn.java +++ b/src/adql/db/DBCommonColumn.java @@ -16,12 +16,13 @@ package adql.db; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2014 - Astronomishes Rechen Institute (ARI) + * Copyright 2014-2015 - Astronomisches Rechen Institut (ARI) */ import java.util.ArrayList; import java.util.Iterator; +import adql.db.exception.UnresolvedJoinException; import adql.query.ADQLQuery; /** @@ -33,7 +34,7 @@ import adql.query.ADQLQuery; * in case of several JOINs. * * @author Grégory Mantelet (ARI) - gmantele@ari.uni-heidelberg.de - * @version 1.2 (11/2013) + * @version 1.3 (05/2015) * @since 1.2 */ public class DBCommonColumn implements DBColumn { @@ -54,8 +55,13 @@ public class DBCommonColumn implements DBColumn { * * @param leftCol Column of the left join table. May be a {@link DBCommonColumn}. * @param rightCol Column of the right join table. May be a {@link DBCommonColumn}. + * + * @throws UnresolvedJoinException If the type of the two given columns are not roughly (just testing numeric, string or geometry) compatible. */ - public DBCommonColumn(final DBColumn leftCol, final DBColumn rightCol){ + public DBCommonColumn(final DBColumn leftCol, final DBColumn rightCol) throws UnresolvedJoinException{ + // Test whether type of both columns are compatible: + if (leftCol.getDatatype() != null && rightCol.getDatatype() != null && !leftCol.getDatatype().isCompatible(rightCol.getDatatype())) + throw new UnresolvedJoinException("JOIN impossible: incompatible column types when trying to join the columns " + leftCol.getADQLName() + " (" + leftCol.getDatatype() + ") and " + rightCol.getADQLName() + " (" + rightCol.getDatatype() + ")!"); // LEFT COLUMN: if (leftCol instanceof DBCommonColumn){ @@ -83,7 +89,6 @@ public class DBCommonColumn implements DBColumn { // add the table to cover: addCoveredTable(rightCol.getTable()); } - } /** @@ -112,6 +117,11 @@ public class DBCommonColumn implements DBColumn { return generalColumnDesc.getDBName(); } + @Override + public final DBType getDatatype(){ + return generalColumnDesc.getDatatype(); + } + @Override public final DBTable getTable(){ return null; diff --git a/src/adql/db/DBTable.java b/src/adql/db/DBTable.java index 8e5d52b8dddb5979f874c97c3bdd2caa00e215dd..f72389f4c7b39cc88bd88155ae2673f91a3dd995 100644 --- a/src/adql/db/DBTable.java +++ b/src/adql/db/DBTable.java @@ -16,7 +16,8 @@ package adql.db; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ /** @@ -27,8 +28,8 @@ package adql.db; * and corresponds to a real table in the "database" with its DB name ({@link #getDBName()}). *

* - * @author Grégory Mantelet (CDS) - * @version 07/2011 + * @author Grégory Mantelet (CDS;ARI) + * @version 1.3 (09/2014) */ public interface DBTable extends Iterable { @@ -86,10 +87,25 @@ public interface DBTable extends Iterable { public DBColumn getColumn(String colName, boolean adqlName); /** - * Makes a copy of this instance of {@link DBTable}, with the possibility to change the DB and ADQL names. + *

Makes a copy of this instance of {@link DBTable}, with the possibility to change the DB and ADQL names.

+ * + *

IMPORTANT: + * The given DB and ADQL name may be NULL. If NULL, the copy will contain exactly the same full name (DB and/or ADQL).
+ * And they may be qualified (that's to say: prefixed by the schema name or by the catalog and schema name). It means that it is possible to + * change the catalog, schema and table name in the copy.
+ * For instance: + *

+ *
    + *
  • .copy(null, "foo") => a copy with the same full DB name, but with no ADQL catalog and schema name and with an ADQL table name equals to "foo"
  • + *
  • .copy("schema.table", ) => a copy with the same full ADQL name, but with no DB catalog name, with a DB schema name equals to "schema" and with a DB table name equals to "table"
  • + *
* * @param dbName Its new DB name. + * It may be qualified. + * It may also be NULL ; if so, the full DB name won't be different in the copy. * @param adqlName Its new ADQL name. + * It may be qualified. + * It may also be NULL ; if so, the full DB name won't be different in the copy. * * @return A modified copy of this {@link DBTable}. */ diff --git a/src/adql/db/DBType.java b/src/adql/db/DBType.java new file mode 100644 index 0000000000000000000000000000000000000000..87f6c05c81674a98bed60cc17ec67a7c2947eb0a --- /dev/null +++ b/src/adql/db/DBType.java @@ -0,0 +1,150 @@ +package adql.db; + +/* + * This file is part of ADQLLibrary. + * + * ADQLLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * ADQLLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with TAPLibrary. If not, see . + * + * Copyright 2014 - Astronomisches Rechen Institut (ARI) + */ + +/** + * + *

+ * Describe a full column type as it is described in the IVOA document of TAP. + * Thus, this object contains 2 attributes: type (or datatype) and length (or size). + *

+ * + *

The length/size may be not defined ; in this case, its value is set to {@link #NO_LENGTH} or is negative or null.

+ * + *

All datatypes declared in the IVOA recommendation document of TAP are listed in an enumeration type: {@link DBDatatype}. + * It is used to set the attribute type/datatype of this class.

+ * + * @author Grégory Mantelet (ARI) + * @version 1.3 (10/2014) + * @since 1.3 + */ +public class DBType { + + /** + * List of all datatypes declared in the IVOA recommendation of TAP (in the section UPLOAD). + * + * @author Grégory Mantelet (ARI) + * @version 1.3 (10/2014) + * @since 1.3 + */ + public static enum DBDatatype{ + SMALLINT, INTEGER, BIGINT, REAL, DOUBLE, BINARY, VARBINARY, CHAR, VARCHAR, BLOB, CLOB, TIMESTAMP, POINT, REGION; + } + + /** Special value in case no length/size is specified. */ + public static final int NO_LENGTH = -1; + + /** Datatype of a column. */ + public final DBDatatype type; + + /** The length parameter (only few datatypes need this parameter: char, varchar, binary and varbinary). */ + public final int length; + + /** + * Build a TAP column type by specifying a datatype. + * + * @param datatype Column datatype. + */ + public DBType(final DBDatatype datatype){ + this(datatype, NO_LENGTH); + } + + /** + * Build a TAP column type by specifying a datatype and a length (needed only for datatypes like char, varchar, binary and varbinary). + * + * @param datatype Column datatype. + * @param length Length of the column value (needed only for datatypes like char, varchar, binary and varbinary). + */ + public DBType(final DBDatatype datatype, final int length){ + if (datatype == null) + throw new NullPointerException("Missing TAP column datatype !"); + this.type = datatype; + this.length = length; + } + + public boolean isNumeric(){ + switch(type){ + case SMALLINT: + case INTEGER: + case BIGINT: + case REAL: + case DOUBLE: + /* Note: binaries are also included here because they can also be considered as Numeric, + * but not for JOINs. */ + case BINARY: + case VARBINARY: + case BLOB: + return true; + default: + return false; + } + } + + public boolean isBinary(){ + switch(type){ + case BINARY: + case VARBINARY: + case BLOB: + return true; + default: + return false; + } + } + + public boolean isString(){ + switch(type){ + case CHAR: + case VARCHAR: + case CLOB: + case TIMESTAMP: + return true; + default: + return false; + } + } + + public boolean isGeometry(){ + return (type == DBDatatype.POINT || type == DBDatatype.REGION); + } + + public boolean isCompatible(final DBType t){ + if (t == null) + return false; + else if (isBinary() == t.isBinary()) + return (type == DBDatatype.BLOB && t.type == DBDatatype.BLOB) || (type != DBDatatype.BLOB && t.type != DBDatatype.BLOB); + else if (isNumeric() == t.isNumeric()) + return true; + else if (isGeometry() == t.isGeometry()) + return (type == t.type); + else if (isString()) + return (type == DBDatatype.CLOB && t.type == DBDatatype.CLOB) || (type != DBDatatype.CLOB && t.type != DBDatatype.CLOB); + else + return (type == t.type); + } + + @Override + public String toString(){ + if (length > 0) + return type + "(" + length + ")"; + else + return type.toString(); + } + +} diff --git a/src/adql/db/DefaultDBColumn.java b/src/adql/db/DefaultDBColumn.java index 8496501aa3ee7ecc3fb15619c8bddecafb98e3ef..a4ed9e3bc557bc8cba391bc68c35e269dbf27ea9 100644 --- a/src/adql/db/DefaultDBColumn.java +++ b/src/adql/db/DefaultDBColumn.java @@ -16,20 +16,27 @@ package adql.db; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ /** * Default implementation of {@link DBColumn}. * - * @author Grégory Mantelet (CDS) - * @version 08/2011 + * @author Grégory Mantelet (CDS;ARI) + * @version 1.3 (10/2014) */ public class DefaultDBColumn implements DBColumn { + /** Name of the column in the "database". */ protected String dbName; + /** Type of the column in the "database". + * Note: This should be one of the types listed by the IVOA in the TAP description. + * @since 1.3 */ + protected DBType type; + /** Table in which this column exists. */ protected DBTable table; - + /** Name that this column must have in ADQL queries. */ protected String adqlName = null; /** @@ -40,10 +47,30 @@ public class DefaultDBColumn implements DBColumn { * if a whole column reference is given, no split will be done. * @param table DB table which contains this column. * - * @see #DefaultDBColumn(String, String, DBTable) + * @see #DefaultDBColumn(String, String, DBType, DBTable) */ public DefaultDBColumn(final String dbName, final DBTable table){ - this(dbName, dbName, table); + this(dbName, dbName, null, table); + } + + /** + * Builds a default {@link DBColumn} with the given DB name and DB table. + * + * @param dbName Database column name (it will be also used for the ADQL name). + * Only the column name is expected. Contrary to {@link DefaultDBTable}, + * if a whole column reference is given, no split will be done. + * @param type Type of the column. + * Note: there is no default value. Consequently if this parameter is NULL, + * the type should be considered as unknown. It means that any comparison with + * any type will always return 'true'. + * @param table DB table which contains this column. + * + * @see #DefaultDBColumn(String, String, DBType, DBTable) + * + * @since 1.3 + */ + public DefaultDBColumn(final String dbName, final DBType type, final DBTable table){ + this(dbName, dbName, type, table); } /** @@ -56,13 +83,38 @@ public class DefaultDBColumn implements DBColumn { * Only the column name is expected. Contrary to {@link DefaultDBTable}, * if a whole column reference is given, no split will be done. * @param table DB table which contains this column. + * + * @see #DefaultDBColumn(String, String, DBType, DBTable) */ public DefaultDBColumn(final String dbName, final String adqlName, final DBTable table){ + this(dbName, adqlName, null, table); + } + + /** + * Builds a default {@link DBColumn} with the given DB name, DB table and ADQL name. + * + * @param dbName Database column name. + * Only the column name is expected. Contrary to {@link DefaultDBTable}, + * if a whole column reference is given, no split will be done. + * @param adqlName Column name used in ADQL queries. + * Only the column name is expected. Contrary to {@link DefaultDBTable}, + * if a whole column reference is given, no split will be done. + * @param type Type of the column. + * Note: there is no default value. Consequently if this parameter is NULL, + * the type should be considered as unknown. It means that any comparison with + * any type will always return 'true'. + * @param table DB table which contains this column. + * + * @since 1.3 + */ + public DefaultDBColumn(final String dbName, final String adqlName, final DBType type, final DBTable table){ this.dbName = dbName; this.adqlName = adqlName; + this.type = type; this.table = table; } + @Override public final String getADQLName(){ return adqlName; } @@ -72,10 +124,38 @@ public class DefaultDBColumn implements DBColumn { this.adqlName = adqlName; } + @Override + public final DBType getDatatype(){ + return type; + } + + /** + *

Set the type of this column.

+ * + *

Note 1: + * The given type should be as closed as possible from a type listed by the IVOA in the TAP protocol description into the section UPLOAD. + *

+ * + *

Note 2: + * there is no default value. Consequently if this parameter is NULL, + * the type should be considered as unknown. It means that any comparison with + * any type will always return 'true'. + *

+ * + * @param type New type of this column. + * + * @since 1.3 + */ + public final void setDatatype(final DBType type){ + this.type = type; + } + + @Override public final String getDBName(){ return dbName; } + @Override public final DBTable getTable(){ return table; } @@ -84,8 +164,9 @@ public class DefaultDBColumn implements DBColumn { this.table = table; } + @Override public DBColumn copy(final String dbName, final String adqlName, final DBTable dbTable){ - return new DefaultDBColumn(dbName, adqlName, dbTable); + return new DefaultDBColumn(dbName, adqlName, type, dbTable); } } diff --git a/src/adql/db/DefaultDBTable.java b/src/adql/db/DefaultDBTable.java index baf71400273ce0d32a193328d7b20833cf6aded6..ccc3752df7a56eb33715da0eda46ca0f5fc38d81 100644 --- a/src/adql/db/DefaultDBTable.java +++ b/src/adql/db/DefaultDBTable.java @@ -17,18 +17,19 @@ package adql.db; * along with ADQLLibrary. If not, see . * * Copyright 2012-2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomisches Rechen Institute (ARI) + * Astronomisches Rechen Institut (ARI) */ import java.util.Collection; -import java.util.HashMap; import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.Map; /** * Default implementation of {@link DBTable}. * * @author Grégory Mantelet (CDS;ARI) - * @version 1.2 (11/2013) + * @version 1.3 (11/2014) */ public class DefaultDBTable implements DBTable { @@ -40,7 +41,7 @@ public class DefaultDBTable implements DBTable { protected String adqlSchemaName = null; protected String adqlName = null; - protected HashMap columns = new HashMap(); + protected Map columns = new LinkedHashMap(); /** *

Builds a default {@link DBTable} with the given DB name.

@@ -247,8 +248,52 @@ public class DefaultDBTable implements DBTable { return splitRes; } + /** + *

Join the last 3 items of the given string array with a dot ('.'). + * These three parts should be: [0]=catalog name, [1]=schema name, [2]=table name.

+ * + *

+ * If the array contains less than 3 items, all the given items will be though joined. + * However, if it contains more than 3 items, only the three last items will be. + *

+ * + *

A null item will be written as an empty string (string of length 0 ; "").

+ * + *

+ * In the case the first and the third items are not null, but the second is null, the final string will contain in the middle two dots. + * Example: if the array is {"cat", NULL, "table"}, then the joined string will be: "cat..table". + *

+ * + * @param nameParts String items to join. + * + * @return A string joining the 3 last string items of the given array, + * or an empty string if the given array is NULL. + * + * @since 1.3 + */ + public static final String joinTableName(final String[] nameParts){ + if (nameParts == null) + return ""; + + StringBuffer str = new StringBuffer(); + boolean empty = true; + for(int i = (nameParts.length <= 3) ? 0 : (nameParts.length - 3); i < nameParts.length; i++){ + if (!empty) + str.append('.'); + + String part = (nameParts[i] == null) ? null : nameParts[i].trim(); + if (part != null && part.length() > 0){ + str.append(part); + empty = false; + } + } + return str.toString(); + } + @Override - public DBTable copy(final String dbName, final String adqlName){ + public DBTable copy(String dbName, String adqlName){ + dbName = (dbName == null) ? joinTableName(new String[]{dbCatalogName,dbSchemaName,this.dbName}) : dbName; + adqlName = (adqlName == null) ? joinTableName(new String[]{adqlCatalogName,adqlSchemaName,this.adqlName}) : adqlName; DefaultDBTable copy = new DefaultDBTable(dbName, adqlName); for(DBColumn col : this){ if (col instanceof DBCommonColumn) @@ -258,5 +303,4 @@ public class DefaultDBTable implements DBTable { } return copy; } - } diff --git a/src/adql/db/FunctionDef.java b/src/adql/db/FunctionDef.java new file mode 100644 index 0000000000000000000000000000000000000000..82107d0a232a33d2e1d113c795304261f1bfda66 --- /dev/null +++ b/src/adql/db/FunctionDef.java @@ -0,0 +1,541 @@ +package adql.db; + +/* + * This file is part of ADQLLibrary. + * + * ADQLLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * ADQLLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with ADQLLibrary. If not, see . + * + * Copyright 2015 - Astronomisches Rechen Institut (ARI) + */ + +import java.lang.reflect.Constructor; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import adql.db.DBType.DBDatatype; +import adql.parser.ParseException; +import adql.query.operand.ADQLOperand; +import adql.query.operand.function.ADQLFunction; +import adql.query.operand.function.DefaultUDF; +import adql.query.operand.function.UserDefinedFunction; + +/** + *

Definition of any function that could be used in ADQL queries.

+ * + *

+ * A such definition can be built manually thanks to the different constructors of this class, + * or by parsing a string function definition form using the static function {@link #parse(String)}. + *

+ * + *

+ * The syntax of the expression expected by {@link #parse(String)} is the same as the one used to build + * the string returned by {@link #toString()}. Here is this syntax: + *

+ *
{fctName}([{param1Name} {param1Type}, ...])[ -> {returnType}]
+ * + *

+ * A description of this function may be set thanks to the public class attribute {@link #description}. + *

+ * + * @author Grégory Mantelet (ARI) + * @version 1.3 (02/2015) + * + * @since 1.3 + */ +public class FunctionDef implements Comparable { + /** Regular expression for what should be a function or parameter name - a regular identifier. */ + protected final static String regularIdentifierRegExp = "[a-zA-Z]+[0-9a-zA-Z_]*"; + /** Rough regular expression for a function return type or a parameter type. + * The exact type is not checked here ; just the type name syntax is tested, not its value. + * This regular expression allows a type to have exactly one parameter (which is generally the length of a character or binary string. */ + protected final static String typeRegExp = "([a-zA-Z]+[0-9a-zA-Z]*)(\\(\\s*([0-9]+)\\s*\\))?"; + /** Rough regular expression for a function parameters' list. */ + protected final static String fctParamsRegExp = "\\s*[^,]+\\s*(,\\s*[^,]+\\s*)*"; + /** Rough regular expression for a function parameter: a name (see {@link #regularIdentifierRegExp}) and a type (see {@link #typeRegExp}). */ + protected final static String fctParamRegExp = "\\s*(" + regularIdentifierRegExp + ")\\s+" + typeRegExp + "\\s*"; + /** Rough regular expression for a whole function definition. */ + protected final static String fctDefRegExp = "\\s*(" + regularIdentifierRegExp + ")\\s*\\(([a-zA-Z0-9,() \r\n\t]*)\\)(\\s*->\\s*(" + typeRegExp + "))?\\s*"; + + /** Pattern of a function definition. This object has been compiled with {@link #fctDefRegExp}. */ + protected final static Pattern fctPattern = Pattern.compile(fctDefRegExp); + /** Pattern of a single parameter definition. This object has been compiled with {@link #fctParamRegExp}. */ + protected final static Pattern paramPattern = Pattern.compile(fctParamRegExp); + + /** Name of the function. */ + public final String name; + + /** Description of this function. */ + public String description = null; + + /** Type of the result returned by this function. */ + public final DBType returnType; + /** Indicate whether the return type is a string. */ + protected final boolean isString; + /** Indicate whether the return type is a numeric. */ + protected final boolean isNumeric; + /** Indicate whether the return type is a geometry. */ + protected final boolean isGeometry; + + /** Total number of parameters. */ + public final int nbParams; + /** List of all the parameters of this function. */ + protected final FunctionParam[] params; + + /**

String representation of this function.

+ *

The syntax of this representation is the following (items between brackets are optional):

+ *
{fctName}([{param1Name} {param1Type}, ...])[ -> {returnType}]
*/ + private final String serializedForm; + + /**

String representation of this function dedicated to comparison with any function signature.

+ *

This form is different from the serialized form on the following points:

+ *
    + *
  • the function name is always in lower case.
  • + *
  • each parameter is represented by a string of 3 characters, one for each kind of type (in the order): numeric, string, geometry. + * Each character is either a 0 or 1, so that indicating whether the parameter is of that kind of type.
  • + *
  • no return type.
  • + *
+ *

So the syntax of this form is the following (items between brackets are optional ; xxx is a string of 3 characters, each being either 0 or 1):

+ *
{fctName}([xxx, ...])
*/ + private final String compareForm; + + /** + *

Class of the {@link UserDefinedFunction} which must represent the UDF defined by this {@link FunctionDef} in the ADQL tree.

+ *

This class MUST have a constructor with a single parameter of type {@link ADQLOperand}[].

+ *

If this {@link FunctionDef} is defining an ordinary ADQL function, this attribute must be NULL. It is used only for user defined functions.

+ */ + private Class udfClass = null; + + /** + *

Definition of a function parameter.

+ * + *

This definition is composed of two items: the name and the type of the parameter.

+ * + * @author Grégory Mantelet (ARI) + * @version 1.3 (10/2014) + * @since 1.3 + */ + public static final class FunctionParam { + /** Parameter name. Ensured not null */ + public final String name; + /** Parameter type. Ensured not null */ + public final DBType type; + + /** + * Create a function parameter. + * + * @param paramName Name of the parameter to create. MUST NOT be NULL + * @param paramType Type of the parameter to create. MUST NOT be NULL + */ + public FunctionParam(final String paramName, final DBType paramType){ + if (paramName == null) + throw new NullPointerException("Missing name! The function parameter can not be created."); + if (paramType == null) + throw new NullPointerException("Missing type! The function parameter can not be created."); + this.name = paramName; + this.type = paramType; + } + } + + /** + *

Create a function definition.

+ * + *

The created function will have no return type and no parameter.

+ * + * @param fctName Name of the function. + */ + public FunctionDef(final String fctName){ + this(fctName, null, null); + } + + /** + *

Create a function definition.

+ * + *

The created function will have a return type (if the provided one is not null) and no parameter.

+ * + * @param fctName Name of the function. + * @param returnType Return type of the function. If NULL, this function will have no return type + */ + public FunctionDef(final String fctName, final DBType returnType){ + this(fctName, returnType, null); + } + + /** + *

Create a function definition.

+ * + *

The created function will have no return type and some parameters (except if the given array is NULL or empty).

+ * + * @param fctName Name of the function. + * @param params Parameters of this function. If NULL or empty, this function will have no parameter. + */ + public FunctionDef(final String fctName, final FunctionParam[] params){ + this(fctName, null, params); + } + + public FunctionDef(final String fctName, final DBType returnType, final FunctionParam[] params){ + // Set the name: + if (fctName == null) + throw new NullPointerException("Missing name! Can not create this function definition."); + this.name = fctName; + + // Set the parameters: + this.params = (params == null || params.length == 0) ? null : params; + this.nbParams = (params == null) ? 0 : params.length; + + // Set the return type; + this.returnType = returnType; + if (returnType != null){ + isNumeric = returnType.isNumeric(); + isString = returnType.isString(); + isGeometry = returnType.isGeometry(); + }else + isNumeric = isString = isGeometry = false; + + // Serialize in Strings (serializedForm and compareForm) this function definition: + StringBuffer bufSer = new StringBuffer(name), bufCmp = new StringBuffer(name.toLowerCase()); + bufSer.append('('); + for(int i = 0; i < nbParams; i++){ + bufSer.append(params[i].name).append(' ').append(params[i].type); + bufCmp.append(params[i].type.isNumeric() ? '1' : '0').append(params[i].type.isString() ? '1' : '0').append(params[i].type.isGeometry() ? '1' : '0'); + if (i + 1 < nbParams) + bufSer.append(", "); + } + bufSer.append(')'); + if (returnType != null) + bufSer.append(" -> ").append(returnType); + serializedForm = bufSer.toString(); + compareForm = bufCmp.toString(); + } + + /** + * Tell whether this function returns a numeric. + * + * @return true if this function returns a numeric, false otherwise. + */ + public final boolean isNumeric(){ + return isNumeric; + } + + /** + * Tell whether this function returns a string. + * + * @return true if this function returns a string, false otherwise. + */ + public final boolean isString(){ + return isString; + } + + /** + * Tell whether this function returns a geometry. + * + * @return true if this function returns a geometry, false otherwise. + */ + public final boolean isGeometry(){ + return isGeometry; + } + + /** + * Get the number of parameters required by this function. + * + * @return Number of required parameters. + */ + public final int getNbParams(){ + return nbParams; + } + + /** + * Get the definition of the indParam-th parameter of this function. + * + * @param indParam Index of the parameter whose the definition must be returned. + * + * @return Definition of the specified parameter. + * + * @throws ArrayIndexOutOfBoundsException If the given index is negative or bigger than the number of parameters. + */ + public final FunctionParam getParam(final int indParam) throws ArrayIndexOutOfBoundsException{ + if (indParam < 0 || indParam >= nbParams) + throw new ArrayIndexOutOfBoundsException(indParam); + else + return params[indParam]; + } + + /** + *

Get the class of the {@link UserDefinedFunction} able to represent the function defined here in an ADQL tree.

+ * + *

Note: + * This getter should return always NULL if the function defined here is not a user defined function. + *
+ * However, if this {@link FunctionDef} is defining a user defined function and this function returns NULL, + * the library will create on the fly a {@link DefaultUDF} corresponding to this definition when needed. + * Indeed this UDF class is useful only if the translation from ADQL (to SQL for instance) of the defined + * function has a different signature (e.g. a different name) in the target language (e.g. SQL). + *

+ * + * @return The corresponding {@link UserDefinedFunction}. MAY BE NULL + */ + public final Class getUDFClass(){ + return udfClass; + } + + /** + *

Set the class of the {@link UserDefinedFunction} able to represent the function defined here in an ADQL tree.

+ * + *

Note: + * If this {@link FunctionDef} defines an ordinary ADQL function - and not a user defined function - no class should be set here. + *
+ * However, if it defines a user defined function, there is no obligation to set a UDF class. It is useful only if the translation + * from ADQL (to SQL for instance) of the function has a different signature (e.g. a different name) in the target language (e.g. SQL). + * If the signature is the same, there is no need to set a UDF class ; a {@link DefaultUDF} will be created on the fly by the library + * when needed if it turns out that no UDF class is set. + *

+ * + * @param udfClass Class to use to represent in an ADQL tree the User Defined Function defined in this {@link FunctionDef}. + * + * @throws IllegalArgumentException If the given class does not provide any constructor with a single parameter of type ADQLOperand[]. + */ + public final < T extends UserDefinedFunction > void setUDFClass(final Class udfClass) throws IllegalArgumentException{ + try{ + + // Ensure that, if a class is provided, it contains a constructor with a single parameter of type ADQLOperand[]: + if (udfClass != null){ + Constructor constructor = udfClass.getConstructor(ADQLOperand[].class); + if (constructor == null) + throw new IllegalArgumentException("The given class (" + udfClass.getName() + ") does not provide any constructor with a single parameter of type ADQLOperand[]!"); + } + + // Set the new UDF class: + this.udfClass = udfClass; + + }catch(SecurityException e){ + throw new IllegalArgumentException("A security problem occurred while trying to get constructor from the class " + udfClass.getName() + ": " + e.getMessage()); + }catch(NoSuchMethodException e){ + throw new IllegalArgumentException("The given class (" + udfClass.getName() + ") does not provide any constructor with a single parameter of type ADQLOperand[]!"); + } + } + + /** + *

Let parsing the serialized form of a function definition.

+ * + *

The expected syntax is (items between brackets are optional):

+ *
{fctName}([{param1Name} {param1Type}, ...])[ -> {returnType}]
+ * + *

+ * Allowed parameter types and return types should be one the types listed by the UPLOAD section of the TAP recommendation document. + * These types are listed in the enumeration object {@link DBType}. + * However, other types should be accepted like the common database types...but it should be better to not rely on that + * since the conversion of those types to TAP types should not be exactly what is expected. + *

+ * + * @param strDefinition Serialized function definition to parse. + * + * @return The object representation of the given string definition. + * + * @throws ParseException If the given string has a wrong syntax or uses unknown types. + */ + public static FunctionDef parse(final String strDefinition) throws ParseException{ + if (strDefinition == null) + throw new NullPointerException("Missing string definition to build a FunctionDef!"); + + // Check the global syntax of the function definition: + Matcher m = fctPattern.matcher(strDefinition); + if (m.matches()){ + + // Get the function name: + String fctName = m.group(1); + + // Parse and get the return type: + DBType returnType = null; + if (m.group(3) != null){ + returnType = parseType(m.group(5), (m.group(7) == null) ? DBType.NO_LENGTH : Integer.parseInt(m.group(7))); + if (returnType == null) + throw new ParseException("Unknown return type: \"" + m.group(4).trim() + "\"!"); + } + + // Get the parameters, if any: + String paramsList = m.group(2); + FunctionParam[] params = null; + if (paramsList != null && paramsList.trim().length() > 0){ + + // Check the syntax of the parameters' list: + if (!paramsList.matches(fctParamsRegExp)) + throw new ParseException("Wrong parameters syntax! Expected syntax: \"( (, )*)\", where =\"[a-zA-Z]+[a-zA-Z0-9_]*\", should be one of the types described in the UPLOAD section of the TAP documentation. Examples of good syntax: \"()\", \"(param INTEGER)\", \"(param1 INTEGER, param2 DOUBLE)\""); + + // Split all the parameter definitions: + String[] paramsSplit = paramsList.split(","); + params = new FunctionParam[paramsSplit.length]; + DBType paramType; + + // For each parameter definition... + for(int i = 0; i < params.length; i++){ + m = paramPattern.matcher(paramsSplit[i]); + if (m.matches()){ + + // ...parse and get the parameter type: + paramType = parseType(m.group(2), (m.group(4) == null) ? DBType.NO_LENGTH : Integer.parseInt(m.group(4))); + + // ...build the parameter definition object: + if (paramType == null) + throw new ParseException("Unknown type for the parameter \"" + m.group(1) + "\": \"" + m.group(2) + ((m.group(3) == null) ? "" : m.group(3)) + "\"!"); + else + params[i] = new FunctionParam(m.group(1), paramType); + }else + // note: should never happen because we have already check the syntax of the whole parameters list before parsing each individual parameter. + throw new ParseException("Wrong syntax for the " + (i + 1) + "-th parameter: \"" + paramsSplit[i].trim() + "\"! Expected syntax: \"( (, )*)\", where =\"[a-zA-Z]+[a-zA-Z0-9_]*\", should be one of the types described in the UPLOAD section of the TAP documentation. Examples of good syntax: \"()\", \"(param INTEGER)\", \"(param1 INTEGER, param2 DOUBLE)\""); + } + } + + // Build the function definition object: + return new FunctionDef(fctName, returnType, params); + }else + throw new ParseException("Wrong function definition syntax! Expected syntax: \"(?) ?\", where =\"[a-zA-Z]+[a-zA-Z0-9_]*\", =\" -> \", =\"( (, )*)\", should be one of the types described in the UPLOAD section of the TAP documentation. Examples of good syntax: \"foo()\", \"foo() -> VARCHAR\", \"foo(param INTEGER)\", \"foo(param1 INTEGER, param2 DOUBLE) -> DOUBLE\""); + } + + /** + * Parse the given string representation of a datatype. + * + * @param datatype String representation of a datatype. + * Note: This string must not contain the length parameter or any other parameter. + * These latter should have been separated from the datatype before calling this function. + * @param length Length of this datatype. + * Note: This length will be used only for binary (BINARY and VARBINARY) + * and character (CHAR and VARCHAR) types. + * + * @return The object representation of the specified datatype. + */ + private static DBType parseType(String datatype, int length){ + if (datatype == null) + return null; + + try{ + // Try to find a corresponding DBType item: + DBDatatype dbDatatype = DBDatatype.valueOf(datatype.toUpperCase()); + + // If there's a match, build the type object representation: + length = (length <= 0) ? DBType.NO_LENGTH : length; + switch(dbDatatype){ + case CHAR: + case VARCHAR: + case BINARY: + case VARBINARY: + return new DBType(dbDatatype, length); + default: + return new DBType(dbDatatype); + } + }catch(IllegalArgumentException iae){ + // If there's no corresponding DBType item, try to find a match among the most used DB types: + datatype = datatype.toLowerCase(); + if (datatype.equals("bool") || datatype.equals("boolean") || datatype.equals("short")) + return new DBType(DBDatatype.SMALLINT); + else if (datatype.equals("int2")) + return new DBType(DBDatatype.SMALLINT); + else if (datatype.equals("int") || datatype.equals("int4")) + return new DBType(DBDatatype.INTEGER); + else if (datatype.equals("long") || datatype.equals("number") || datatype.equals("bigint") || datatype.equals("int8")) + return new DBType(DBDatatype.BIGINT); + else if (datatype.equals("float") || datatype.equals("float4")) + return new DBType(DBDatatype.REAL); + else if (datatype.equals("numeric") || datatype.equals("float8")) + return new DBType(DBDatatype.DOUBLE); + else if (datatype.equals("byte") || datatype.equals("raw")) + return new DBType(DBDatatype.BINARY, length); + else if (datatype.equals("unsignedByte")) + return new DBType(DBDatatype.VARBINARY, length); + else if (datatype.equals("character")) + return new DBType(DBDatatype.CHAR, length); + else if (datatype.equals("string") || datatype.equals("varchar2")) + return new DBType(DBDatatype.VARCHAR, length); + else if (datatype.equals("bytea")) + return new DBType(DBDatatype.BLOB); + else if (datatype.equals("text")) + return new DBType(DBDatatype.CLOB); + else if (datatype.equals("date") || datatype.equals("time")) + return new DBType(DBDatatype.TIMESTAMP); + else if (datatype.equals("position")) + return new DBType(DBDatatype.POINT); + else if (datatype.equals("polygon") || datatype.equals("box") || datatype.equals("circle")) + return new DBType(DBDatatype.REGION); + else + return null; + } + } + + @Override + public String toString(){ + return serializedForm; + } + + @Override + public int compareTo(final FunctionDef def){ + return compareForm.compareTo(def.compareForm); + } + + /** + *

Compare this function definition with the given ADQL function item.

+ * + *

+ * The comparison is done only on the function name and on rough type of the parameters. + * "Rough type" means here that just the kind of type is tested: numeric, string or geometry. + * Anyway, the return type is never tested by this function, since such information is usually + * not part of a function signature. + *

+ * + *

The notion of "greater" and "less" are defined here according to the three following test steps:

+ *
    + *
  1. Name test: if the name of both function are equals, next steps are evaluated, otherwise the standard string comparison (case insensitive) result is returned.
  2. + *
  3. Parameters test: parameters are compared individually. Each time parameters (at the same position in both functions) are equals the next parameter can be tested, + * and so on until two parameters are different or the end of the parameters' list is reached. + * Just the kind of type is used for parameter comparison. Each kind of type is tested in the following order: numeric, string and geometry. + * When a kind of type is not equal for both parameters, the function exits with the appropriate value + * (1 if the parameter of this function definition is of the kind of type, -1 otherwise).
  4. + *
  5. Number of parameters test: in the case where this function definition has N parameters and the given ADQL function has M parameters, + * and that the L (= min(N,M)) first parameters have the same type in both functions, the value returns by this function + * will be N-M. Thus, if this function definition has more parameters than the given function, a positive value will be + * returned. Otherwise a negative value will be returned, or 0 if the number of parameters is the same.
  6. + *
+ * + * @param fct ADQL function item to compare with this function definition. + * + * @return A positive value if this function definition is "greater" than the given {@link ADQLFunction}, + * 0 if they are perfectly matching, + * or a negative value if this function definition is "less" than the given {@link ADQLFunction}. + */ + public int compareTo(final ADQLFunction fct){ + if (fct == null) + throw new NullPointerException("Missing ADQL function with which comparing this function definition!"); + + // Names comparison: + int comp = name.compareToIgnoreCase(fct.getName()); + + // If equals, compare the parameters' type: + if (comp == 0){ + for(int i = 0; comp == 0 && i < nbParams && i < fct.getNbParameters(); i++){ + if (params[i].type.isNumeric() == fct.getParameter(i).isNumeric()){ + if (params[i].type.isString() == fct.getParameter(i).isString()){ + if (params[i].type.isGeometry() == fct.getParameter(i).isGeometry()) + comp = 0; + else + comp = params[i].type.isGeometry() ? 1 : -1; + }else + comp = params[i].type.isString() ? 1 : -1; + }else + comp = params[i].type.isNumeric() ? 1 : -1; + } + + // If the first min(N,M) parameters are of the same type, do the last comparison on the number of parameters: + if (comp == 0 && nbParams != fct.getNbParameters()) + comp = nbParams - fct.getNbParameters(); + } + + return comp; + } +} diff --git a/src/adql/db/STCS.java b/src/adql/db/STCS.java new file mode 100644 index 0000000000000000000000000000000000000000..716ef580fee037003b78b52d33fb75a183732fa8 --- /dev/null +++ b/src/adql/db/STCS.java @@ -0,0 +1,1683 @@ +package adql.db; + +/* + * This file is part of ADQLLibrary. + * + * ADQLLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * ADQLLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with ADQLLibrary. If not, see . + * + * Copyright 2014 - Astronomisches Rechen Institut (ARI) + */ + +import java.util.ArrayList; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import adql.parser.ADQLQueryFactory; +import adql.parser.ParseException; +import adql.query.TextPosition; +import adql.query.operand.ADQLOperand; +import adql.query.operand.NegativeOperand; +import adql.query.operand.NumericConstant; +import adql.query.operand.StringConstant; +import adql.query.operand.function.ADQLFunction; +import adql.query.operand.function.geometry.BoxFunction; +import adql.query.operand.function.geometry.CircleFunction; +import adql.query.operand.function.geometry.GeometryFunction; +import adql.query.operand.function.geometry.PointFunction; +import adql.query.operand.function.geometry.PolygonFunction; +import adql.query.operand.function.geometry.RegionFunction; + +/** + *

This class helps dealing with the subset of STC-S expressions described by the section "6 Use of STC-S in TAP (informative)" + * of the TAP Recommendation 1.0 (27th March 2010). This subset is limited to the most common coordinate systems and regions.

+ * + *

Note: + * No instance of this class can be created. Its usage is only limited to its static functions and classes. + *

+ * + *

Coordinate system

+ *

+ * The function {@link #parseCoordSys(String)} is able to parse a string containing only the STC-S expression of a coordinate system + * (or an empty string or null which would be interpreted as the default coordinate system - UNKNOWNFRAME UNKNOWNREFPOS SPHERICAL2). + * When successful, this parsing returns an object representation of the coordinate system: {@link CoordSys}. + *

+ *

+ * To serialize into STC-S a coordinate system, you have to create a {@link CoordSys} instance with the desired values + * and to call the function {@link CoordSys#toSTCS()}. The static function {@link #toSTCS(CoordSys)} is just calling the + * {@link CoordSys#toSTCS()} on the given coordinate system. + *

+ * + *

Geometrical region

+ *

+ * As for the coordinate system, there is a static function to parse the STC-S representation of a geometrical region: {@link #parseRegion(String)}. + * Here again, when the parsing is successful an object representation is returned: {@link Region}. + *

+ *

+ * This class lets also serializing into STC-S a region. The procedure is the same as with a coordinate system: create a {@link Region} and then + * call {@link Region#toString()}. + *

+ *

+ * The class {@link Region} lets also dealing with the {@link ADQLFunction} implementing a region. It is then possible to create a {@link Region} + * object from a such {@link ADQLFunction} and to get the corresponding STC-S representation. The static function {@link #toSTCS(GeometryFunction)} + * is a helpful function which do these both actions in once. + *

+ *

Note: + * The conversion from {@link ADQLFunction} to {@link Region} or STC-S is possible only if the {@link ADQLFunction} contains constants as parameter. + * Thus, a such function using a column, a concatenation, a math operation or using another function can not be converted into STC-S using this class. + *

+ * + * @author Grégory Mantelet (ARI) + * @version 1.3 (12/2014) + * @since 1.3 + */ +public final class STCS { + + /** + * Empty private constructor ; in order to prevent any instance creation. + */ + private STCS(){} + + /* ***************** */ + /* COORDINATE SYSTEM */ + /* ***************** */ + + /** Regular expression for a STC-S representation of a coordinate system. It takes into account the fact that each part of + * a coordinate system is optional and so that a full coordinate system expression can be reduced to an empty string. */ + private final static String coordSysRegExp = Frame.regexp + "?\\s*" + RefPos.regexp + "?\\s*" + Flavor.regexp + "?"; + /** Regular expression of an expression exclusively limited to a coordinate system. */ + private final static String onlyCoordSysRegExp = "^\\s*" + coordSysRegExp + "\\s*$"; + /** Regular expression of a default coordinate system: either an empty string or a string containing only default values. */ + private final static String defaultCoordSysRegExp = "^\\s*" + Frame.DEFAULT + "?\\s*" + RefPos.DEFAULT + "?\\s*" + Flavor.DEFAULT + "?\\s*$"; + /** Regular expression of a pattern describing a set of allowed coordinate systems. See {@link #buildAllowedRegExp(String)} for more details. */ + /* With this regular expression, we get the following matching groups: + * 0: All the expression + * 1+(6*N): The N-th part of the coordinate system (N is an unsigned integer between 0 and 2 (included) ; it is reduced to '*' if the two following groups are NULL + * 2+(6*N): A single value for the N-th part + * 3+(6*N): A list of values for the N-th part + * 4+(6*N): First value of the list for the N-th part + * 5+(6*N): All the other values (starting with a |) of the list for the N-th part + * 6+(6*N): Last value of the list for the N-th part. + */ + private final static String allowedCoordSysRegExp = "^\\s*" + buildAllowedRegExp(Frame.regexp) + "\\s+" + buildAllowedRegExp(RefPos.regexp) + "\\s+" + buildAllowedRegExp(Flavor.regexp) + "\\s*$"; + + /** Pattern of an allowed coordinate system pattern. This object has been compiled with {@link #allowedCoordSysRegExp}. */ + private final static Pattern allowedCoordSysPattern = Pattern.compile(allowedCoordSysRegExp); + + /** Human description of the syntax of a full coordinate system expression. */ + private final static String COORD_SYS_SYNTAX = "\"[" + Frame.regexp + "] [" + RefPos.regexp + "] [" + Flavor.regexp + "]\" ; an empty string is also allowed and will be interpreted as the coordinate system locally used"; + + /** + * Build the regular expression of a string defining the allowed values for one part of the whole coordinate system. + * + * @param rootRegExp All allowed part values. + * + * @return The corresponding regular expression. + */ + private static String buildAllowedRegExp(final String rootRegExp){ + return "(" + rootRegExp + "|\\*|(\\(\\s*" + rootRegExp + "\\s*(\\|\\s*" + rootRegExp + "\\s*)*\\)))"; + } + + /** + *

List of all possible frames in an STC expression.

+ * + *

+ * When no value is specified, the default one is {@link #UNKNOWNFRAME}. + * The default value is also accessible through the attribute {@link #DEFAULT} + * and it is possible to test whether a frame is the default with the function {@link #isDefault()}. + *

+ * + *

Note: + * The possible values listed in this enumeration are limited to the subset of STC-S described by the section "6 Use of STC-S in TAP (informative)" + * of the TAP Recommendation 1.0 (27th March 2010). + *

+ * + * @author Grégory Mantelet (ARI) + * @version 1.3 (10/2014) + * @since 1.3 + */ + public static enum Frame{ + ECLIPTIC, FK4, FK5, GALACTIC, ICRS, UNKNOWNFRAME; + + /** Default value for a frame: {@link #UNKNOWNFRAME}. */ + public static final Frame DEFAULT = UNKNOWNFRAME; + + /** Regular expression to test whether a string is a valid frame or not. This regular expression does not take into account + * the case of an empty string (which means "default frame"). */ + public static final String regexp = buildRegexp(Frame.class); + + /** + * Tell whether this frame is the default one. + * + * @return true if this is the default frame, false + */ + public final boolean isDefault(){ + return this == DEFAULT; + } + } + + /** + *

List of all possible reference positions in an STC expression.

+ * + *

+ * When no value is specified, the default one is {@link #UNKNOWNREFPOS}. + * The default value is also accessible through the attribute {@link #DEFAULT} + * and it is possible to test whether a reference position is the default with the function {@link #isDefault()}. + *

+ * + *

Note: + * The possible values listed in this enumeration are limited to the subset of STC-S described by the section "6 Use of STC-S in TAP (informative)" + * of the TAP Recommendation 1.0 (27th March 2010). + *

+ * + * @author Grégory Mantelet (ARI) + * @version 1.3 (10/2014) + * @since 1.3 + */ + public static enum RefPos{ + BARYCENTER, GEOCENTER, HELIOCENTER, LSR, TOPOCENTER, RELOCATABLE, UNKNOWNREFPOS; + + /** Default value for a reference position: {@link #UNKNOWNREFPOS}. */ + public static final RefPos DEFAULT = UNKNOWNREFPOS; + + /** Regular expression to test whether a string is a valid reference position or not. This regular expression does not take into account + * the case of an empty string (which means "default reference position"). */ + public static final String regexp = buildRegexp(RefPos.class); + + /** + * Tell whether this reference position is the default one. + * + * @return true if this is the default reference position, false + */ + public final boolean isDefault(){ + return this == DEFAULT; + } + } + + /** + *

List of all possible flavors in an STC expression.

+ * + *

+ * When no value is specified, the default one is {@link #SPHERICAL2}. + * The default value is also accessible through the attribute {@link #DEFAULT} + * and it is possible to test whether a flavor is the default with the function {@link #isDefault()}. + *

+ * + *

Note: + * The possible values listed in this enumeration are limited to the subset of STC-S described by the section "6 Use of STC-S in TAP (informative)" + * of the TAP Recommendation 1.0 (27th March 2010). + *

+ * + * @author Grégory Mantelet (ARI) + * @version 1.3 (10/2014) + * @since 1.3 + */ + public static enum Flavor{ + CARTESIAN2, CARTESIAN3, SPHERICAL2; + + /** Default value for a flavor: {@link #SPHERICAL2}. */ + public static final Flavor DEFAULT = SPHERICAL2; + + /** Regular expression to test whether a string is a valid flavor or not. This regular expression does not take into account + * the case of an empty string (which means "default flavor"). */ + public static final String regexp = buildRegexp(Flavor.class); + + /** + * Tell whether this flavor is the default one. + * + * @return true if this is the default flavor, false + */ + public final boolean isDefault(){ + return this == DEFAULT; + } + } + + /** + * Build a regular expression covering all possible values of the given enumeration. + * + * @param enumType Class of an enumeration type. + * + * @return The build regular expression or "\s*" if the given enumeration contains no constants/values. + * + * @throws IllegalArgumentException If the given class is not an enumeration type. + */ + private static String buildRegexp(final Class enumType) throws IllegalArgumentException{ + // The given class must be an enumeration type: + if (!enumType.isEnum()) + throw new IllegalArgumentException("An enum class was expected, but a " + enumType.getName() + " has been given!"); + + // Get the enumeration constants/values: + Object[] constants = enumType.getEnumConstants(); + if (constants == null || constants.length == 0) + return "\\s*"; + + // Concatenate all constants with pipe to build a choice regular expression: + StringBuffer buf = new StringBuffer("("); + for(int i = 0; i < constants.length; i++){ + buf.append(constants[i]); + if ((i + 1) < constants.length) + buf.append('|'); + } + return buf.append(')').toString(); + } + + /** + *

Object representation of an STC coordinate system.

+ * + *

+ * A coordinate system is composed of three parts: a frame ({@link #frame}), + * a reference position ({@link #refpos}) and a flavor ({@link #flavor}). + *

+ * + *

+ * The default value - also corresponding to an empty string - should be: + * {@link Frame#UNKNOWNFRAME} {@link RefPos#UNKNOWNREFPOS} {@link Flavor#SPHERICAL2}. + * Once built, it is possible to know whether the coordinate system is the default one + * or not thanks to function {@link #isDefault()}. + *

+ * + *

+ * An instance of this class can be easily serialized into STC-S using {@link #toSTCS()}, {@link #toFullSTCS()} + * or {@link #toString()}. {@link #toFullSTCS()} will display default values explicitly + * on the contrary to {@link #toSTCS()} which will replace them by empty strings. + *

+ * + *

Important note: + * The flavors CARTESIAN2 and CARTESIAN3 can not be used with other frame and reference position than + * UNKNOWNFRAME and UNKNOWNREFPOS. In the contrary case an {@link IllegalArgumentException} is throw. + *

+ * + * @author Grégory Mantelet (ARI) + * @version 1.3 (10/2014) + * @since 1.3 + */ + public static class CoordSys { + /** First item of a coordinate system expression: the frame. */ + public final Frame frame; + + /** Second item of a coordinate system expression: the reference position. */ + public final RefPos refpos; + + /** Third and last item of a coordinate system expression: the flavor. */ + public final Flavor flavor; + + /** Indicate whether all parts of the coordinate system are set to their default value. */ + private final boolean isDefault; + + /** STC-S representation of this coordinate system. Default items are not written (that's to say, they are replaced by an empty string). */ + private final String stcs; + + /** STC-S representation of this coordinate system. Default items are explicitly written. */ + private final String fullStcs; + + /** + * Build a default coordinate system (UNKNOWNFRAME UNKNOWNREFPOS SPHERICAL2). + */ + public CoordSys(){ + this(null, null, null); + } + + /** + * Build a coordinate system with the given parts. + * + * @param fr Frame part. + * @param rp Reference position part. + * @param fl Flavor part. + * + * @throws IllegalArgumentException If a cartesian flavor is used with a frame and reference position other than UNKNOWNFRAME and UNKNOWNREFPOS. + */ + public CoordSys(final Frame fr, final RefPos rp, final Flavor fl) throws IllegalArgumentException{ + frame = (fr == null) ? Frame.DEFAULT : fr; + refpos = (rp == null) ? RefPos.DEFAULT : rp; + flavor = (fl == null) ? Flavor.DEFAULT : fl; + + if (flavor != Flavor.SPHERICAL2 && (frame != Frame.UNKNOWNFRAME || refpos != RefPos.UNKNOWNREFPOS)) + throw new IllegalArgumentException("a coordinate system expressed with a cartesian flavor MUST have an UNKNOWNFRAME and UNKNOWNREFPOS!"); + + isDefault = frame.isDefault() && refpos.isDefault() && flavor.isDefault(); + + stcs = ((!frame.isDefault() ? frame + " " : "") + (!refpos.isDefault() ? refpos + " " : "") + (!flavor.isDefault() ? flavor : "")).trim(); + fullStcs = frame + " " + refpos + " " + flavor; + } + + /** + * Build a coordinate system by parsing the given STC-S expression. + * + * @param coordsys STC-S expression representing a coordinate system. Empty string and NULL are allowed values ; they correspond to a default coordinate system. + * + * @throws ParseException If the syntax of the given STC-S expression is wrong or if it is not a coordinate system only. + */ + public CoordSys(final String coordsys) throws ParseException{ + CoordSys tmp = new STCSParser().parseCoordSys(coordsys); + frame = tmp.frame; + refpos = tmp.refpos; + flavor = tmp.flavor; + isDefault = tmp.isDefault; + stcs = tmp.stcs; + fullStcs = tmp.fullStcs; + } + + /** + * Tell whether this is the default coordinate system (UNKNOWNFRAME UNKNOWNREFPOS SPHERICAL2). + * + * @return true if it is the default coordinate system, false otherwise. + */ + public final boolean isDefault(){ + return isDefault; + } + + /** + * Get the STC-S expression of this coordinate system, + * in which default values are not written (they are replaced by empty strings). + * + * @return STC-S representation of this coordinate system. + */ + public String toSTCS(){ + return stcs; + } + + /** + * Get the STC-S expression of this coordinate system, + * in which default values are explicitly written. + * + * @return STC-S representation of this coordinate system. + */ + public String toFullSTCS(){ + return fullStcs; + } + + /** + * Convert this coordinate system into a STC-S expression. + * + * @see java.lang.Object#toString() + * @see #toSTCS() + */ + @Override + public String toString(){ + return stcs; + } + } + + /** + * Parse the given STC-S representation of a coordinate system. + * + * @param stcs STC-S expression of a coordinate system. Note: a NULL or empty string will be interpreted as a default coordinate system. + * + * @return The object representation of the specified coordinate system. + * + * @throws ParseException If the given expression has a wrong STC-S syntax. + */ + public static CoordSys parseCoordSys(final String stcs) throws ParseException{ + return (new STCSParser().parseCoordSys(stcs)); + } + + /** + *

Convert an object representation of a coordinate system into an STC-S expression.

+ * + *

Note: + * A NULL object will be interpreted as the default coordinate system and so an empty string will be returned. + * Otherwise, this function is equivalent to {@link CoordSys#toSTCS()} (in which default values for each + * coordinate system part is not displayed). + *

+ * + * @param coordSys The object representation of the coordinate system to convert into STC-S. + * + * @return The corresponding STC-S expression. + * + * @see CoordSys#toSTCS() + * @see CoordSys#toFullSTCS() + */ + public static String toSTCS(final CoordSys coordSys){ + if (coordSys == null) + return ""; + else + return coordSys.toSTCS(); + } + + /** + *

Build a big regular expression gathering all of the given coordinate system syntaxes.

+ * + *

+ * Each item of the given list must respect a strict syntax. Each part of the coordinate system + * may be a single value, a list of values or a '*' (meaning all values are allowed). + * A list of values must have the following syntax: ({value1}|{value2}|...). + * An empty string is NOT here accepted. + *

+ * + *

Example: + * (ICRS|FK4|FK5) * SPHERICAL2 is OK, + * but (ICRS|FK4|FK5) * is not valid because the flavor value is not defined. + *

+ * + *

+ * Since the default value of each part of a coordinate system should always be possible, + * this function ensure these default values are always possible in the returned regular expression. + * Thus, if some values except the default one are specified, the default value is automatically appended. + *

+ * + *

Note: + * If the given array is NULL, all coordinate systems are allowed. + * But if the given array is empty, none except an empty string or the default value will be allowed. + *

+ * + * @param allowedCoordSys List of all coordinate systems that are allowed. + * + * @return The corresponding regular expression. + * + * @throws ParseException If the syntax of one of the given allowed coordinate system is wrong. + */ + public static String buildCoordSysRegExp(final String[] allowedCoordSys) throws ParseException{ + // NULL array => all coordinate systems are allowed: + if (allowedCoordSys == null) + return onlyCoordSysRegExp; + // Empty array => no coordinate system (except the default one) is allowed: + else if (allowedCoordSys.length == 0) + return defaultCoordSysRegExp; + + // The final regular expression must be reduced to a coordinate system and nothing else before: + StringBuffer finalRegExp = new StringBuffer("^\\s*("); + + // For each allowed coordinate system: + Matcher m; + int nbCoordSys = 0; + for(int i = 0; i < allowedCoordSys.length; i++){ + + // NULL item => skipped! + if (allowedCoordSys[i] == null) + continue; + else{ + if (nbCoordSys > 0) + finalRegExp.append('|'); + nbCoordSys++; + } + + // Check its syntax and identify all of its parts: + m = allowedCoordSysPattern.matcher(allowedCoordSys[i].toUpperCase()); + if (m.matches()){ + finalRegExp.append('('); + for(int g = 0; g < 3; g++){ // See the comment after the Javadoc of #allowedCoordSysRegExp for a complete list of available groups returned by the pattern. + + // SINGLE VALUE: + if (m.group(2 + (6 * g)) != null) + finalRegExp.append('(').append(defaultChoice(g, m.group(2 + (6 * g)))).append(m.group(2 + (6 * g))).append(')'); + + // LIST OF VALUES: + else if (m.group(3 + (6 * g)) != null) + finalRegExp.append('(').append(defaultChoice(g, m.group(3 + (6 * g)))).append(m.group(3 + (6 * g)).replaceAll("\\s", "").substring(1)); + + // JOKER (*): + else{ + switch(g){ + case 0: + finalRegExp.append(Frame.regexp); + break; + case 1: + finalRegExp.append(RefPos.regexp); + break; + case 2: + finalRegExp.append(Flavor.regexp); + break; + } + finalRegExp.append('?'); + } + finalRegExp.append("\\s*"); + } + finalRegExp.append(')'); + }else + throw new ParseException("Wrong allowed coordinate system syntax for the " + (i + 1) + "-th item: \"" + allowedCoordSys[i] + "\"! Expected: \"frameRegExp refposRegExp flavorRegExp\" ; where each xxxRegExp = (xxx | '*' | '('xxx ('|' xxx)*')'), frame=\"" + Frame.regexp + "\", refpos=\"" + RefPos.regexp + "\" and flavor=\"" + Flavor.regexp + "\" ; an empty string is also allowed and will be interpreted as '*' (so all possible values)."); + } + + // The final regular expression must be reduced to a coordinate system and nothing else after: + finalRegExp.append(")\\s*$"); + + return (nbCoordSys > 0) ? finalRegExp.toString() : defaultCoordSysRegExp; + } + + /** + * Get the default value appended by a '|' character, ONLY IF the given value does not already contain the default value. + * + * @param g Index of the coordinate system part (0: Frame, 1: RefPos, 2: Flavor, another value will return an empty string). + * @param value Value in which the default value must prefix. + * + * @return A prefix for the given value (the default value and a '|' if the default value is not already in the given value, "" otherwise). + */ + private static String defaultChoice(final int g, final String value){ + switch(g){ + case 0: + return value.contains(Frame.DEFAULT.toString()) ? "" : Frame.DEFAULT + "|"; + case 1: + return value.contains(RefPos.DEFAULT.toString()) ? "" : RefPos.DEFAULT + "|"; + case 2: + return value.contains(Flavor.DEFAULT.toString()) ? "" : Flavor.DEFAULT + "|"; + default: + return ""; + } + } + + /* ****** */ + /* REGION */ + /* ****** */ + + /** + *

List all possible region types allowed in an STC-S expression.

+ * + *

Note: + * The possible values listed in this enumeration are limited to the subset of STC-S described by the section "6 Use of STC-S in TAP (informative)" + * of the TAP Recommendation 1.0 (27th March 2010). + *

+ * + * @author Grégory Mantelet (ARI) + * @version 1.3 (10/2014) + * @since 1.3 + */ + public static enum RegionType{ + POSITION, CIRCLE, BOX, POLYGON, UNION, INTERSECTION, NOT; + } + + /** + *

Object representation of an STC region.

+ * + *

+ * This class contains a field for each possible parameter of a region. Depending of the region type + * some are not used. In such case, these unused fields are set to NULL. + *

+ * + *

+ * An instance of this class can be easily serialized into STC-S using {@link #toSTCS()}, {@link #toFullSTCS()} + * or {@link #toString()}. {@link #toFullSTCS()} will display default value explicit + * on the contrary to {@link #toSTCS()} which will replace them by empty strings. + *

+ * + * @author Grégory Mantelet (ARI) + * @version 1.3 (10/2014) + * @since 1.3 + */ + public static class Region { + /** Type of the region. */ + public final RegionType type; + + /** Coordinate system used by this region. + * Note: only the NOT region does not declare a coordinate system ; so only for this region this field is NULL. */ + public final CoordSys coordSys; + + /** List of coordinates' pairs. The second dimension of this array represents a pair of coordinates ; it is then an array of two elements. + * Note: this field is used by POINT, BOX, CIRCLE and POLYGON. */ + public final double[][] coordinates; + + /** Width of the BOX region. */ + public final double width; + + /** Height of the BOX region. */ + public final double height; + + /** Radius of the CIRCLE region. */ + public final double radius; + + /** List of regions unified (UNION), intersected (INTERSECTION) or avoided (NOT). */ + public final Region[] regions; + + /** STC-S representation of this region, in which default values of the coordinate system (if any) are not written (they are replaced by empty strings). + * Note: This attribute is NULL until the first call of the function {@link #toSTCS()} where it is built. */ + private String stcs = null; + + /** STC-S representation of this region, in which default values of the coordinate system (if any) are explicitly written. + * Note: This attribute is NULL until the first call of the function {@link #toFullSTCS()} where it is built. */ + private String fullStcs = null; + + /** The ADQL function object representing this region. + * Note: this attribute is NULL until the first call of the function {@link #toGeometry()} or {@link #toGeometry(ADQLQueryFactory)}. */ + private GeometryFunction geometry = null; + + /** + *

Constructor for a POINT/POSITION region.

+ * + *

Important note: + * The array of coordinates is used like that. No copy is done. + *

+ * + * @param coordSys Coordinate system. note: It MAY BE null ; if so, the default coordinate system will be chosen + * @param coordinates A pair of coordinates ; coordinates[0] and coordinates[1]. + */ + public Region(final CoordSys coordSys, final double[] coordinates){ + this(coordSys, new double[][]{coordinates}); + } + + /** + *

Constructor for a POINT/POSITION or a POLYGON region.

+ * + *

Whether it is a polygon or a point depends on the number of given coordinates:

+ *
    + *
  • 1 item => POINT/POSITION
  • + *
  • more items => POLYGON
  • + *
+ * + *

Important note: + * The array of coordinates is used like that. No copy is done. + *

+ * + * @param coordSys Coordinate system. note: It MAY BE null ; if so, the default coordinate system will be chosen + * @param coordinates List of coordinates' pairs ; coordinates[n] = 1 pair = 2 items (coordinates[n][0] and coordinates[n][1]) ; if 1 pair, it is a POINT/POSITION, but if more, it is a POLYGON. + */ + public Region(final CoordSys coordSys, final double[][] coordinates){ + // Check roughly the coordinates: + if (coordinates == null || coordinates.length == 0) + throw new NullPointerException("Missing coordinates!"); + else if (coordinates[0].length != 2) + throw new IllegalArgumentException("Wrong number of coordinates! Expected at least 2 pairs of coordinates (so coordinates[0], coordinates[1] and coordinates[n].length = 2)."); + + // Decide of the region type in function of the number of coordinates' pairs: + type = (coordinates.length > 1) ? RegionType.POLYGON : RegionType.POSITION; + + // Set the coordinate system (if NULL, choose the default one): + this.coordSys = (coordSys == null ? new CoordSys() : coordSys); + + // Set the coordinates: + this.coordinates = coordinates; + + // Set the other fields as not used: + width = Double.NaN; + height = Double.NaN; + radius = Double.NaN; + regions = null; + } + + /** + *

Constructor for a CIRCLE region.

+ * + *

Important note: + * The array of coordinates is used like that. No copy is done. + *

+ * + * @param coordSys Coordinate system. note: It MAY BE null ; if so, the default coordinate system will be chosen + * @param coordinates A pair of coordinates ; coordinates[0] and coordinates[1]. + * @param radius The circle radius. + */ + public Region(final CoordSys coordSys, final double[] coordinates, final double radius){ + // Check roughly the coordinates: + if (coordinates == null || coordinates.length == 0) + throw new NullPointerException("Missing coordinates!"); + else if (coordinates.length != 2) + throw new IllegalArgumentException("Wrong number of coordinates! Expected exactly 2 values."); + + // Set the region type: + type = RegionType.CIRCLE; + + // Set the coordinate system (if NULL, choose the default one): + this.coordSys = (coordSys == null ? new CoordSys() : coordSys); + + // Set the coordinates: + this.coordinates = new double[][]{coordinates}; + + // Set the radius: + this.radius = radius; + + // Set the other fields as not used: + width = Double.NaN; + height = Double.NaN; + regions = null; + } + + /** + *

Constructor for a BOX region.

+ * + *

Important note: + * The array of coordinates is used like that. No copy is done. + *

+ * + * @param coordSys Coordinate system. note: It MAY BE null ; if so, the default coordinate system will be chosen + * @param coordinates A pair of coordinates ; coordinates[0] and coordinates[1]. + * @param width Width of the box. + * @param height Height of the box. + */ + public Region(final CoordSys coordSys, final double[] coordinates, final double width, final double height){ + // Check roughly the coordinates: + if (coordinates == null || coordinates.length == 0) + throw new NullPointerException("Missing coordinates!"); + else if (coordinates.length != 2) + throw new IllegalArgumentException("Wrong number of coordinates! Expected exactly 2 values."); + + // Set the region type: + type = RegionType.BOX; + + // Set the coordinate system (if NULL, choose the default one): + this.coordSys = (coordSys == null ? new CoordSys() : coordSys); + + // Set the coordinates: + this.coordinates = new double[][]{coordinates}; + + // Set the size of the box: + this.width = width; + this.height = height; + + // Set the other fields as not used: + radius = Double.NaN; + regions = null; + } + + /** + *

Constructor for a UNION or INTERSECTION region.

+ * + *

Important note: + * The array of regions is used like that. No copy is done. + *

+ * + * @param unionOrIntersection Type of the region to create. Note: It can be ONLY a UNION or INTERSECTION. Another value will throw an IllegalArgumentException). + * @param coordSys Coordinate system. note: It MAY BE null ; if so, the default coordinate system will be chosen + * @param regions Regions to unite or to intersect. Note: At least two regions must be provided. + */ + public Region(final RegionType unionOrIntersection, final CoordSys coordSys, final Region[] regions){ + // Check the type: + if (unionOrIntersection == null) + throw new NullPointerException("Missing type of region (UNION or INTERSECTION here)!"); + else if (unionOrIntersection != RegionType.UNION && unionOrIntersection != RegionType.INTERSECTION) + throw new IllegalArgumentException("Wrong region type: \"" + unionOrIntersection + "\"! This constructor lets create only an UNION or INTERSECTION region."); + + // Check the list of regions: + if (regions == null || regions.length == 0) + throw new NullPointerException("Missing regions to " + (unionOrIntersection == RegionType.UNION ? "unite" : "intersect") + "!"); + else if (regions.length < 2) + throw new IllegalArgumentException("Wrong number of regions! Expected at least 2 regions."); + + // Set the region type: + type = unionOrIntersection; + + // Set the coordinate system (if NULL, choose the default one): + this.coordSys = (coordSys == null ? new CoordSys() : coordSys); + + // Set the regions: + this.regions = regions; + + // Set the other fields as not used: + coordinates = null; + radius = Double.NaN; + width = Double.NaN; + height = Double.NaN; + } + + /** + * Constructor for a NOT region. + * + * @param region Any region to not select. + */ + public Region(final Region region){ + // Check the region parameter: + if (region == null) + throw new NullPointerException("Missing region to NOT select!"); + + // Set the region type: + type = RegionType.NOT; + + // Set the regions: + this.regions = new Region[]{region}; + + // Set the other fields as not used: + coordSys = null; + coordinates = null; + radius = Double.NaN; + width = Double.NaN; + height = Double.NaN; + } + + /** + *

Build a Region from the given ADQL representation.

+ * + *

Note: + * Only {@link PointFunction}, {@link CircleFunction}, {@link BoxFunction}, {@link PolygonFunction} and {@link RegionFunction} + * are accepted here. Other extensions of {@link GeometryFunction} will throw an {@link IllegalArgumentException}. + *

+ * + * @param geometry The ADQL representation of the region to create here. + * + * @throws IllegalArgumentException If the given geometry is neither of {@link PointFunction}, {@link BoxFunction}, {@link PolygonFunction} and {@link RegionFunction}. + * @throws ParseException If the declared coordinate system, the coordinates or the STC-S definition has a wrong syntax. + */ + public Region(final GeometryFunction geometry) throws IllegalArgumentException, ParseException{ + if (geometry == null) + throw new NullPointerException("Missing geometry to convert into STCS.Region!"); + + if (geometry instanceof PointFunction){ + type = RegionType.POSITION; + coordSys = STCS.parseCoordSys(extractString(geometry.getCoordinateSystem())); + coordinates = new double[][]{{extractNumeric(((PointFunction)geometry).getCoord1()),extractNumeric(((PointFunction)geometry).getCoord2())}}; + width = Double.NaN; + height = Double.NaN; + radius = Double.NaN; + regions = null; + }else if (geometry instanceof CircleFunction){ + type = RegionType.CIRCLE; + coordSys = STCS.parseCoordSys(extractString(geometry.getCoordinateSystem())); + coordinates = new double[][]{{extractNumeric(((CircleFunction)geometry).getCoord1()),extractNumeric(((CircleFunction)geometry).getCoord2())}}; + radius = extractNumeric(((CircleFunction)geometry).getRadius()); + width = Double.NaN; + height = Double.NaN; + regions = null; + }else if (geometry instanceof BoxFunction){ + type = RegionType.BOX; + coordSys = STCS.parseCoordSys(extractString(geometry.getCoordinateSystem())); + coordinates = new double[][]{{extractNumeric(((BoxFunction)geometry).getCoord1()),extractNumeric(((BoxFunction)geometry).getCoord2())}}; + width = extractNumeric(((BoxFunction)geometry).getWidth()); + height = extractNumeric(((BoxFunction)geometry).getHeight()); + radius = Double.NaN; + regions = null; + }else if (geometry instanceof PolygonFunction){ + PolygonFunction poly = (PolygonFunction)geometry; + type = RegionType.POLYGON; + coordSys = STCS.parseCoordSys(extractString(poly.getCoordinateSystem())); + coordinates = new double[(poly.getNbParameters() - 1) / 2][2]; + for(int i = 0; i < coordinates.length; i++) + coordinates[i] = new double[]{extractNumeric(poly.getParameter(1 + i * 2)),extractNumeric(poly.getParameter(2 + i * 2))}; + width = Double.NaN; + height = Double.NaN; + radius = Double.NaN; + regions = null; + }else if (geometry instanceof RegionFunction){ + Region r = STCS.parseRegion(extractString(((RegionFunction)geometry).getParameter(0))); + type = r.type; + coordSys = r.coordSys; + coordinates = r.coordinates; + width = r.width; + height = r.height; + radius = r.radius; + regions = r.regions; + }else + throw new IllegalArgumentException("Unknown region type! Only geometrical function PointFunction, CircleFunction, BoxFunction, PolygonFunction and RegionFunction are allowed."); + } + + /** + * Extract a string value from the given {@link ADQLOperand} + * which is expected to be a {@link StringConstant} instance. + * + * @param op A string operand. + * + * @return The string value embedded in the given operand. + * + * @throws ParseException If the given operand is not an instance of {@link StringConstant}. + */ + private static String extractString(final ADQLOperand op) throws ParseException{ + if (op == null) + throw new NullPointerException("Missing operand!"); + else if (op instanceof StringConstant) + return ((StringConstant)op).getValue(); + else + throw new ParseException("Can not convert into STC-S a non string argument (including ADQLColumn and Concatenation)!"); + } + + /** + * Extract a numeric value from the given {@link ADQLOperand} + * which is expected to be a {@link NumericConstant} instance + * or a {@link NegativeOperand} embedding a {@link NumericConstant}. + * + * @param op A numeric operand. + * + * @return The numeric value embedded in the given operand. + * + * @throws ParseException If the given operand is not an instance of {@link NumericConstant} or a {@link NegativeOperand}. + */ + private static double extractNumeric(final ADQLOperand op) throws ParseException{ + if (op == null) + throw new NullPointerException("Missing operand!"); + else if (op instanceof NumericConstant) + return Double.parseDouble(((NumericConstant)op).getValue()); + else if (op instanceof NegativeOperand) + return extractNumeric(((NegativeOperand)op).getOperand()) * -1; + else + throw new ParseException("Can not convert into STC-S a non numeric argument (including ADQLColumn and Operation)!"); + } + + /** + *

Get the STC-S representation of this region (in which default values + * of the coordinate system are not written ; they are replaced by empty strings).

+ * + *

Note: + * This function build the STC-S just once and store it in a class attribute. + * The value of this attribute is then returned at next calls of this function. + *

+ * + * @return Its STC-S representation. + */ + public String toSTCS(){ + if (stcs != null) + return stcs; + else{ + // Write the region type: + StringBuffer buf = new StringBuffer(type.toString()); + + // Write the coordinate system (except for NOT): + if (type != RegionType.NOT){ + String coordSysStr = coordSys.toSTCS(); + if (coordSysStr != null && coordSysStr.length() > 0) + buf.append(' ').append(coordSysStr); + buf.append(' '); + } + + // Write the other parameters (coordinates, regions, ...): + switch(type){ + case POSITION: + case POLYGON: + appendCoordinates(buf, coordinates); + break; + case CIRCLE: + appendCoordinates(buf, coordinates); + buf.append(' ').append(radius); + break; + case BOX: + appendCoordinates(buf, coordinates); + buf.append(' ').append(width).append(' ').append(height); + break; + case UNION: + case INTERSECTION: + case NOT: + buf.append('('); + appendRegions(buf, regions, false); + buf.append(')'); + break; + } + + // Return the built STC-S: + return (stcs = buf.toString()); + } + } + + /** + *

Get the STC-S representation of this region (in which default values + * of the coordinate system are explicitly written).

+ * + *

Note: + * This function build the STC-S just once and store it in a class attribute. + * The value of this attribute is then returned at next calls of this function. + *

+ * + * @return Its STC-S representation. + */ + public String toFullSTCS(){ + if (fullStcs != null) + return fullStcs; + else{ + // Write the region type: + StringBuffer buf = new StringBuffer(type.toString()); + + // Write the coordinate system (except for NOT): + if (type != RegionType.NOT){ + String coordSysStr = coordSys.toFullSTCS(); + if (coordSysStr != null && coordSysStr.length() > 0) + buf.append(' ').append(coordSysStr); + buf.append(' '); + } + + // Write the other parameters (coordinates, regions, ...): + switch(type){ + case POSITION: + case POLYGON: + appendCoordinates(buf, coordinates); + break; + case CIRCLE: + appendCoordinates(buf, coordinates); + buf.append(' ').append(radius); + break; + case BOX: + appendCoordinates(buf, coordinates); + buf.append(' ').append(width).append(' ').append(height); + break; + case UNION: + case INTERSECTION: + case NOT: + buf.append('('); + appendRegions(buf, regions, true); + buf.append(')'); + break; + } + + // Return the built STC-S: + return (fullStcs = buf.toString()); + } + } + + /** + * Append all the given coordinates to the given buffer. + * + * @param buf Buffer in which coordinates must be appended. + * @param coords Coordinates to append. + */ + private static void appendCoordinates(final StringBuffer buf, final double[][] coords){ + for(int i = 0; i < coords.length; i++){ + if (i > 0) + buf.append(' '); + buf.append(coords[i][0]).append(' ').append(coords[i][1]); + } + } + + /** + * Append all the given regions in the given buffer. + * + * @param buf Buffer in which regions must be appended. + * @param regions Regions to append. + * @param fullCoordSys Indicate whether the coordinate system of the regions must explicitly display the default values. + */ + private static void appendRegions(final StringBuffer buf, final Region[] regions, final boolean fullCoordSys){ + for(int i = 0; i < regions.length; i++){ + if (i > 0) + buf.append(' '); + if (fullCoordSys) + buf.append(regions[i].toFullSTCS()); + else + buf.append(regions[i].toSTCS()); + } + } + + @Override + public String toString(){ + return toSTCS(); + } + + /** + *

Convert this region into its corresponding ADQL representation.

+ * + *
    + *
  • POSITION: {@link PointFunction}
  • + *
  • CIRCLE: {@link CircleFunction}
  • + *
  • BOX: {@link BoxFunction}
  • + *
  • POLYGON: {@link PolygonFunction}
  • + *
  • UNION, INTERSECTION, NOT: {@link RegionFunction}
  • + *
+ * + *

Note: + * This function is using the default ADQL factory, built using {@link ADQLQueryFactory#ADQLQueryFactory()}. + *

+ * + * @return The corresponding ADQL representation. + * + * @see #toGeometry(ADQLQueryFactory) + */ + public GeometryFunction toGeometry(){ + return toGeometry(null); + } + + /** + *

Convert this region into its corresponding ADQL representation.

+ * + *
    + *
  • POSITION: {@link PointFunction}
  • + *
  • CIRCLE: {@link CircleFunction}
  • + *
  • BOX: {@link BoxFunction}
  • + *
  • POLYGON: {@link PolygonFunction}
  • + *
  • UNION, INTERSECTION, NOT: {@link RegionFunction}
  • + *
+ * + *

Note: + * This function build the ADQL representation just once and store it in a class attribute. + * The value of this attribute is then returned at next calls of this function. + *

+ * + * @param factory The factory of ADQL objects to use. + * + * @return The corresponding ADQL representation. + */ + public GeometryFunction toGeometry(ADQLQueryFactory factory){ + if (factory == null) + factory = new ADQLQueryFactory(); + + try{ + if (geometry != null) + return geometry; + else{ + StringConstant coordSysObj = factory.createStringConstant(coordSys == null ? "" : coordSys.toString()); + switch(type){ + case POSITION: + return (geometry = factory.createPoint(coordSysObj, toNumericObj(coordinates[0][0], factory), toNumericObj(coordinates[0][1], factory))); + case CIRCLE: + return (geometry = factory.createCircle(coordSysObj, toNumericObj(coordinates[0][0], factory), toNumericObj(coordinates[0][1], factory), toNumericObj(radius, factory))); + case BOX: + return (geometry = factory.createBox(coordSysObj, toNumericObj(coordinates[0][0], factory), toNumericObj(coordinates[0][1], factory), toNumericObj(width, factory), toNumericObj(height, factory))); + case POLYGON: + ArrayList coords = new ArrayList(coordinates.length * 2); + for(int i = 0; i < coordinates.length; i++){ + coords.add(toNumericObj(coordinates[i][0], factory)); + coords.add(toNumericObj(coordinates[i][1], factory)); + } + return (geometry = factory.createPolygon(coordSysObj, coords)); + default: + return (geometry = factory.createRegion(factory.createStringConstant(toString()))); + } + } + }catch(Exception pe){ + return null; + } + } + + /** + *

Convert a numeric value into an ADQL representation:

+ * + *
    + *
  • If negative: NegativeOperand(NumericConstant(val))
  • + *
  • Otherwise: NumericConstant(val)
  • + *
+ * + * @param val The value to embed in an ADQL object. + * @param factory The factory to use to created ADQL objects. + * + * @return The representing ADQL representation. + * + * @throws Exception If an error occurs while creating the ADQL object. + */ + private ADQLOperand toNumericObj(final double val, final ADQLQueryFactory factory) throws Exception{ + if (val >= 0) + return factory.createNumericConstant("" + val); + else + return factory.createNegativeOperand(factory.createNumericConstant("" + (val * -1))); + } + } + + /** + * Parse the given STC-S expression representing a geometrical region. + * + * @param stcsRegion STC-S expression of a region. Note: MUST be different from NULL. + * + * @return The object representation of the specified geometrical region. + * + * @throws ParseException If the given expression is NULL, empty string or if the STC-S syntax is wrong. + */ + public static Region parseRegion(final String stcsRegion) throws ParseException{ + if (stcsRegion == null || stcsRegion.trim().length() == 0) + throw new ParseException("Missing STC-S expression to parse!"); + return (new STCSParser().parseRegion(stcsRegion)); + } + + /** + * Convert into STC-S the given object representation of a geometrical region. + * + * @param region Region to convert into STC-S. + * + * @return The corresponding STC-S expression. + */ + public static String toSTCS(final Region region){ + if (region == null) + throw new NullPointerException("Missing region to serialize into STC-S!"); + return region.toSTCS(); + } + + /** + *

Convert into STC-S the given ADQL representation of a geometrical function.

+ * + *

Important note: + * Only {@link PointFunction}, {@link CircleFunction}, {@link BoxFunction}, {@link PolygonFunction} + * and {@link RegionFunction} are accepted here. Other extensions of {@link GeometryFunction} will + * throw an {@link IllegalArgumentException}. + *

+ * + * @param region ADQL representation of the region to convert into STC-S. + * + * @return The corresponding STC-S expression. + * + * @throws ParseException If the given object is NULL or not of the good type. + */ + public static String toSTCS(final GeometryFunction region) throws ParseException{ + if (region == null) + throw new NullPointerException("Missing region to serialize into STC-S!"); + return (new Region(region)).toSTCS(); + } + + /* *************************** */ + /* PARSER OF STC-S EXPRESSIONS */ + /* *************************** */ + + /** + * Let parse any STC-S expression. + * + * @author Grégory Mantelet (ARI) + * @version 1.3 (11/2014) + * @since 1.3 + */ + private static class STCSParser { + /** Regular expression of a numerical value. */ + private final static String numericRegExp = "(\\+|-)?(\\d+(\\.\\d*)?|\\.\\d+)([Ee](\\+|-)?\\d+)?"; + + /** Position of the next characters to read in the STC-S expression to parse. */ + private int pos; + /** Full STC-S expression to parse. */ + private String stcs; + /** Last read token (can be a numeric, a string, a region type, ...). */ + private String token; + /** Buffer used to read tokens. */ + private StringBuffer buffer; + + /** + * Exception sent when the end of the expression + * (EOE = End Of Expression) is reached. + * + * @author Grégory Mantelet (ARI) + * @version 1.3 (10/2014) + * @since 1.3 + */ + private static class EOEException extends ParseException { + private static final long serialVersionUID = 1L; + + /** Build a simple EOEException. */ + public EOEException(){ + super("Unexpected End Of Expression!"); + } + } + + /** + * Build the STC-S parser. + */ + public STCSParser(){} + + /** + * Parse the given STC-S expression, expected as a coordinate system. + * + * @param stcs The STC-S expression to parse. + * + * @return The corresponding object representation of the specified coordinate system. + * + * @throws ParseException If the syntax of the given STC-S expression is wrong or if it is not a coordinate system. + */ + public CoordSys parseCoordSys(final String stcs) throws ParseException{ + init(stcs); + CoordSys coordsys = null; + try{ + coordsys = coordSys(); + end(COORD_SYS_SYNTAX); + return coordsys; + }catch(EOEException ex){ + ex.printStackTrace(); + return new CoordSys(); + } + } + + /** + * Parse the given STC-S expression, expected as a geometrical region. + * + * @param stcs The STC-S expression to parse. + * + * @return The corresponding object representation of the specified geometrical region. + * + * @throws ParseException If the syntax of the given STC-S expression is wrong or if it is not a geometrical region. + */ + public Region parseRegion(final String stcs) throws ParseException{ + init(stcs); + Region region = region(); + end("\"POSITION \", \"CIRCLE \", \"BOX \", \"POLYGON [ ...]\", \"UNION ( [ ...] )\", \"INTERSECTION [] ( [ ...] )\" or \"NOT ( )\""); + return region; + } + + /** + * Prepare the parser in order to read the given STC-S expression. + * + * @param newStcs New STC-S expression to parse from now. + */ + private void init(final String newStcs){ + stcs = (newStcs == null) ? "" : newStcs; + token = null; + buffer = new StringBuffer(); + pos = 0; + } + + /** + * Finalize the parsing. + * No more characters (except eventually some space characters) should remain in the STC-S expression to parse. + * + * @param expectedSyntax Description of the good syntax expected. This description is used only to write the + * {@link ParseException} in case other non-space characters are found among the remaining characters. + * + * @throws ParseException If other non-space characters remains. + */ + private void end(final String expectedSyntax) throws ParseException{ + // Skip all spaces: + skipSpaces(); + + // If there is still some characters, they are not expected, and so throw an exception: + if (stcs.length() > 0 && pos < stcs.length()) + throw new ParseException("Incorrect syntax: \"" + stcs.substring(pos) + "\" was unexpected! Expected syntax: " + expectedSyntax + ".", new TextPosition(1, pos, 1, stcs.length())); + + // Reset the buffer, token and the STC-S expression to parse: + buffer = null; + stcs = null; + token = null; + } + + /** + * Tool function which skip all next space characters until the next meaningful characters. + */ + private void skipSpaces(){ + while(pos < stcs.length() && Character.isWhitespace(stcs.charAt(pos))) + pos++; + } + + /** + *

Get the next meaningful word. This word can be a numeric, any string constant or a region type.

+ * + *

+ * In case the end of the expression is reached before getting any meaningful character, an {@link EOEException} is thrown. + *

+ * + * @return The full read word/token. + * + * @throws EOEException If the end of the STC-S expression is reached before getting any meaningful character. + */ + private String nextToken() throws EOEException{ + // Skip all spaces: + skipSpaces(); + + // Fetch all characters until word separator (a space or a open/close parenthesis): + while(pos < stcs.length() && !Character.isWhitespace(stcs.charAt(pos)) && stcs.charAt(pos) != '(' && stcs.charAt(pos) != ')') + buffer.append(stcs.charAt(pos++)); + + // If no character has been fetched while at least one was expected, throw an exception: + if (buffer.length() == 0) + throw new EOEException(); + + // Save the read token and reset the buffer: + token = buffer.toString(); + buffer.delete(0, token.length()); + + return token; + } + + /** + * Read the next token as a numeric. + * If not a numeric, a {@link ParseException} is thrown. + * + * @return The read numerical value. + * + * @throws ParseException If the next token is not a numerical expression. + */ + private double numeric() throws ParseException{ + if (nextToken().matches(numericRegExp)) + return Double.parseDouble(token); + else + throw new ParseException("a numeric was expected!", new TextPosition(1, pos - token.length(), 1, pos)); // TODO Check the begin and end! + } + + /** + * Read the next 2 tokens as a coordinate pairs (so as 2 numerical values). + * If not 2 numeric, a {@link ParseException} is thrown. + * + * @return The read coordinate pairs. + * + * @throws ParseException If the next 2 tokens are not 2 numerical expressions. + */ + private double[] coordPair() throws ParseException{ + skipSpaces(); + int startPos = pos; + try{ + return new double[]{numeric(),numeric()}; + }catch(ParseException pe){ + if (pe instanceof EOEException) + throw pe; + else + throw new ParseException("a coordinates pair (2 numerics separated by one or more spaces) was expected!", new TextPosition(1, startPos, 1, pos)); // TODO Check the begin and end! + } + } + + /** + * Read and parse the next tokens as a coordinate system expression. + * If they do not match, a {@link ParseException} is thrown. + * + * @return The object representation of the read coordinate system. + * + * @throws ParseException If the next tokens are not representing a valid coordinate system. + */ + private CoordSys coordSys() throws ParseException{ + // Skip all spaces: + skipSpaces(); + + // Backup the current position: + /* (because every parts of a coordinate system are optional ; + * like this, it will be possible to go back in the expression + * to parse if optional parts are not written) */ + String oldToken = token; + int startPos = pos; + + Frame fr = null; + RefPos rp = null; + Flavor fl = null; + + try{ + // Read the token: + nextToken(); + // Try to parse it as a frame: + if ((fr = frame()) != null){ + // if success, go the next token: + startPos = pos; + oldToken = token; + nextToken(); + } + // Try to parse the last read token as a reference position: + if ((rp = refpos()) != null){ + // if success, go the next token: + startPos = pos; + oldToken = token; + nextToken(); + } + // Try to parse the last read token as a flavor: + if ((fl = flavor()) == null){ + // if NOT a success, go back "in time" (go back to the position before reading the token): + pos = startPos; + token = oldToken; + } + }catch(EOEException ex){ + /* End Of Expression may happen here since all parts of a coordinate system are optional. + * So, there is no need to treat the error. */ + } + + // Build the object representation of the read coordinate system: + /* Note: if nothing has been read for one or all parts of the coordinate system, + * the NULL value will be replaced automatically in the constructor + * by the default value of the corresponding part(s). */ + try{ + return new CoordSys(fr, rp, fl); + }catch(IllegalArgumentException iae){ + throw new ParseException(iae.getMessage(), new TextPosition(1, startPos, 1, pos)); + } + } + + /** + * Parse the last read token as FRAME. + * + * @return The corresponding enumeration item, or NULL if the last token is not a valid FRAME item. + */ + private Frame frame(){ + try{ + return Frame.valueOf(token.toUpperCase()); + }catch(IllegalArgumentException iae){ + return null; + } + } + + /** + * Parse the last read token as REFERENCE POSITION. + * + * @return The corresponding enumeration item, or NULL if the last token is not a valid REFERENCE POSITION item. + */ + private RefPos refpos(){ + try{ + return RefPos.valueOf(token.toUpperCase()); + }catch(IllegalArgumentException iae){ + return null; + } + } + + /** + * Parse the last read token as FLAVOR. + * + * @return The corresponding enumeration item, or NULL if the last token is not a valid FLAVOR item. + */ + private Flavor flavor(){ + try{ + return Flavor.valueOf(token.toUpperCase()); + }catch(IllegalArgumentException iae){ + return null; + } + } + + /** + * Read and parse the next tokens as a geometrical region. + * If they do not match, a {@link ParseException} is thrown. + * + * @return The object representation of the read geometrical region. + * + * @throws ParseException If the next tokens are not representing a valid geometrical region. + */ + private Region region() throws ParseException{ + // Skip all spaces: + skipSpaces(); + + // Read the next token (it should be the region type): + int startPos = pos; + token = nextToken().toUpperCase(); + + /* Identify the region type, next the expected parameters and finally build the corresponding object representation */ + // POSITION case: + if (token.equals("POSITION")){ + try{ + CoordSys coordSys = coordSys(); + double[] coords = coordPair(); + return new Region(coordSys, coords); + }catch(Exception e){ + throw buildException(e, "\"POSITION \", where coordPair=\" \" and coordSys=" + COORD_SYS_SYNTAX, startPos); + } + } + // CIRCLE case: + else if (token.equals("CIRCLE")){ + try{ + CoordSys coordSys = coordSys(); + double[] coords = coordPair(); + double radius = numeric(); + return new Region(coordSys, coords, radius); + }catch(Exception e){ + throw buildException(e, "\"CIRCLE \", where coordPair=\" \", radius=\"\" and coordSys=" + COORD_SYS_SYNTAX, startPos); + } + } + // BOX case: + else if (token.equals("BOX")){ + try{ + CoordSys coordSys = coordSys(); + double[] coords = coordPair(); + double width = numeric(), height = numeric(); + return new Region(coordSys, coords, width, height); + }catch(Exception e){ + throw buildException(e, "\"BOX \", where coordPair=\" \", width and height=\"\" and coordSys=" + COORD_SYS_SYNTAX, startPos); + } + } + // POLYGON case: + else if (token.equals("POLYGON")){ + try{ + CoordSys coordSys = coordSys(); + ArrayList coordinates = new ArrayList(6); + double[] coords; + for(int i = 0; i < 3; i++){ + coords = coordPair(); + coordinates.add(coords[0]); + coordinates.add(coords[1]); + } + boolean moreCoord = true; + int posBackup; + do{ + posBackup = pos; + try{ + coords = coordPair(); + coordinates.add(coords[0]); + coordinates.add(coords[1]); + }catch(ParseException pe){ + moreCoord = false; + pos = posBackup; + } + }while(moreCoord); + double[][] allCoords = new double[coordinates.size() / 2][2]; + for(int i = 0; i < coordinates.size() && i + 1 < coordinates.size(); i += 2) + allCoords[i / 2] = new double[]{coordinates.get(i),coordinates.get(i + 1)}; + return new Region(coordSys, allCoords); + }catch(Exception e){ + throw buildException(e, "\"POLYGON [ ...]\", where coordPair=\" \" and coordSys=" + COORD_SYS_SYNTAX, startPos); + } + } + // UNION & INTERSECTION cases: + else if (token.equals("UNION") || token.equals("INTERSECTION")){ + RegionType type = (token.equals("UNION") ? RegionType.UNION : RegionType.INTERSECTION); + try{ + CoordSys coordSys = coordSys(); + ArrayList regions = new ArrayList(2); + + skipSpaces(); + if (stcs.charAt(pos) != '(') + throw buildException(new ParseException("a opening parenthesis - ( - was expected!", new TextPosition(1, pos, 1, pos + 1)), "\"" + type + " ( [ ...] )\", where coordSys=" + COORD_SYS_SYNTAX, startPos); + else + pos++; + + // parse and add the FIRST region: + regions.add(region()); + + // parse and add the SECOND region: + regions.add(region()); + + skipSpaces(); + while(stcs.charAt(pos) != ')'){ + regions.add(region()); + skipSpaces(); + } + pos++; + + return new Region(type, coordSys, regions.toArray(new Region[regions.size()])); + }catch(Exception e){ + if (e instanceof ParseException && e.getMessage().startsWith("Incorrect syntax: \"")) + throw (ParseException)e; + else + throw buildException(e, "\"" + type + " ( [ ...] )\", where coordSys=" + COORD_SYS_SYNTAX, startPos); + } + } + // NOT case: + else if (token.equals("NOT")){ + try{ + skipSpaces(); + if (stcs.charAt(pos) != '(') + throw buildException(new ParseException("an opening parenthesis - ( - was expected!", new TextPosition(1, pos, 1, pos + 1)), "\"NOT ( )\"", startPos); + else + pos++; + Region region = region(); + skipSpaces(); + if (stcs.charAt(pos) != ')') + throw buildException(new ParseException("a closing parenthesis - ) - was expected!", new TextPosition(1, pos, 1, pos + 1)), "\"NOT ( )\"", startPos); + else + pos++; + return new Region(region); + }catch(Exception e){ + if (e instanceof ParseException && e.getMessage().startsWith("Incorrect syntax: ")) + throw (ParseException)e; + else + throw buildException(e, "\"NOT ( )\"", startPos); + } + } + // Otherwise, the region type is not known and so a ParseException is thrown: + else + throw new ParseException("Unknown STC region type: \"" + token + "\"!", new TextPosition(1, startPos, 1, pos)); + } + + /** + * Build a {@link ParseException} based on the given one and by adding the human description of what was expected, if needed. + * + * @param ex Root exception. + * @param expectedSyntax Human description of what was expected. + * @param startPos Position of the first character of the wrong part of expression. + * + * @return The build exception. + */ + private ParseException buildException(final Exception ex, final String expectedSyntax, int startPos){ + if (ex instanceof EOEException) + return new ParseException("Unexpected End Of Expression! Expected syntax: " + expectedSyntax + ".", new TextPosition(1, startPos, 1, pos)); + else if (ex instanceof ParseException) + return new ParseException("Incorrect syntax: " + ex.getMessage() + " Expected syntax: " + expectedSyntax + ".", (((ParseException)ex).getPosition() != null ? ((ParseException)ex).getPosition() : new TextPosition(1, startPos, 1, pos))); + else + return new ParseException(ex.getMessage(), new TextPosition(1, startPos, 1, pos)); + } + } +} diff --git a/src/adql/db/SearchColumnList.java b/src/adql/db/SearchColumnList.java index dbb9b97ce2847b579de5389cb6ce0de0c4814246..b3117d6664ded3589e514f1eefe2b52ccf3fb34d 100644 --- a/src/adql/db/SearchColumnList.java +++ b/src/adql/db/SearchColumnList.java @@ -17,7 +17,7 @@ package adql.db; * along with ADQLLibrary. If not, see . * * Copyright 2012-2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Astronomisches Rechen Institut (ARI) */ import java.util.ArrayList; @@ -46,7 +46,7 @@ import cds.utils.TextualSearchList; *

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.2 (11/2013) + * @version 1.3 (02/2015) */ public class SearchColumnList extends TextualSearchList { private static final long serialVersionUID = 1L; @@ -284,7 +284,7 @@ public class SearchColumnList extends TextualSearchList { } // test the schema name: - if (schema != null){ + if (schema != null && matchTable.getADQLSchemaName() != null){ if (IdentifierField.SCHEMA.isCaseSensitive(caseSensitivity)){ if (!matchTable.getADQLSchemaName().equals(schema)) continue; @@ -307,7 +307,6 @@ public class SearchColumnList extends TextualSearchList { // if here, all prefixes are matching and so the column is a good match: DBColumn goodMatch = matchTable.getColumn(match.getADQLName(), true); - System.out.println("Good match for \"" + catalog + "." + schema + "." + table + "." + column + "\" found: " + goodMatch); result.add(goodMatch); } } diff --git a/src/adql/db/SearchTableList.java b/src/adql/db/SearchTableList.java index 649b4fd60b6962c2d304353bada4506704506988..7de6ec59098b7b6f6516627aed856aa64b850cf4 100644 --- a/src/adql/db/SearchTableList.java +++ b/src/adql/db/SearchTableList.java @@ -16,13 +16,14 @@ package adql.db; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Astronomisches Rechen Institut (ARI) */ import java.util.ArrayList; import java.util.Collection; -import adql.query.IdentifierField; +import adql.query.IdentifierField; import adql.query.from.ADQLTable; import cds.utils.TextualSearchList; @@ -34,8 +35,8 @@ import cds.utils.TextualSearchList; * These last information will be used only if the ADQL table name is ambiguous, otherwise all matching elements are returned. *

* - * @author Grégory Mantelet (CDS) - * @version 09/2011 + * @author Grégory Mantelet (CDS;ARI) + * @version 1.3 (02/2015) */ public class SearchTableList extends TextualSearchList { private static final long serialVersionUID = 1L; @@ -58,7 +59,7 @@ public class SearchTableList extends TextualSearchList { * * @param collection Collection of {@link DBTable} to copy. */ - public SearchTableList(final Collection collection){ + public SearchTableList(final Collection collection){ super(collection, new DBTableKeyExtractor()); } @@ -155,22 +156,24 @@ public class SearchTableList extends TextualSearchList { ArrayList result = new ArrayList(); for(DBTable match : tmpResult){ - if (IdentifierField.SCHEMA.isCaseSensitive(caseSensitivity)){ - if (!match.getADQLSchemaName().equals(schema)) - continue; - }else{ - if (!match.getADQLSchemaName().equalsIgnoreCase(schema)) - continue; - } - - if (catalog != null){ - if (IdentifierField.CATALOG.isCaseSensitive(caseSensitivity)){ - if (!match.getADQLCatalogName().equals(catalog)) + if (match.getADQLSchemaName() != null){ + if (IdentifierField.SCHEMA.isCaseSensitive(caseSensitivity)){ + if (!match.getADQLSchemaName().equals(schema)) continue; }else{ - if (!match.getADQLCatalogName().equalsIgnoreCase(catalog)) + if (!match.getADQLSchemaName().equalsIgnoreCase(schema)) continue; } + + if (catalog != null && match.getADQLCatalogName() != null){ + if (IdentifierField.CATALOG.isCaseSensitive(caseSensitivity)){ + if (!match.getADQLCatalogName().equals(catalog)) + continue; + }else{ + if (!match.getADQLCatalogName().equalsIgnoreCase(catalog)) + continue; + } + } } result.add(match); @@ -199,6 +202,7 @@ public class SearchTableList extends TextualSearchList { * @version 09/2011 */ private static class DBTableKeyExtractor implements KeyExtractor { + @Override public String getKey(DBTable obj){ return obj.getADQLName(); } diff --git a/src/adql/db/exception/UnresolvedFunctionException.java b/src/adql/db/exception/UnresolvedFunctionException.java new file mode 100644 index 0000000000000000000000000000000000000000..befb5565061b0b9997916427e3cdba020eaf086b --- /dev/null +++ b/src/adql/db/exception/UnresolvedFunctionException.java @@ -0,0 +1,124 @@ +package adql.db.exception; + +/* + * This file is part of ADQLLibrary. + * + * ADQLLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * ADQLLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with ADQLLibrary. If not, see . + * + * Copyright 2014-2015 - Astronomisches Rechen Institut (ARI) + */ + +import adql.parser.ParseException; +import adql.query.operand.function.ADQLFunction; + +/** + * Exception thrown when a function can not be resolved by the library. + * + * @author Grégory Mantelet (ARI) + * @version 1.3 (05/2015) + * @since 1.3 + */ +public class UnresolvedFunctionException extends ParseException { + private static final long serialVersionUID = 1L; + + /** Function which can not be resolved. */ + protected final ADQLFunction functionInError; + + /** + * Build the exception with just a message. + * + * @param message Description of the error. + */ + public UnresolvedFunctionException(final String message){ + super(message); + functionInError = null; + } + + /** + * Build the exception with the unresolved function in parameter. + * The position of this function in the ADQL query can be retrieved and used afterwards. + * + * @param fct The unresolved function. + */ + public UnresolvedFunctionException(final ADQLFunction fct){ + super("Unresolved function: \"" + fct.toADQL() + "\"! No UDF has been defined or found with the signature: " + getFctSignature(fct) + "."); // TODO Add the position of the function in the ADQL query! + functionInError = fct; + } + + /** + * Build the exception with a message but also with the unresolved function in parameter. + * The position of this function in the ADQL query can be retrieved and used afterwards. + * + * @param message Description of the error. + * @param fct The unresolved function. + */ + public UnresolvedFunctionException(final String message, final ADQLFunction fct){ + super(message); // TODO Add the position of the function in the ADQL query! + functionInError = fct; + } + + /** + * Get the unresolved function at the origin of this exception. + * + * @return The unresolved function. Note: MAY be NULL + */ + public final ADQLFunction getFunction(){ + return functionInError; + } + + /** + *

Get the signature of the function given in parameter.

+ * + *

+ * In this signature, just the name and the type of all the parameters are written. + * The return type is never part of a function signature. + *

+ * + *

Note 1: + * A parameter type can be either "NUMERIC", "STRING" or "GEOMETRY". In order to be the most generic has possible, + * no more precision about a type is returned here. If the parameter is none of these type kinds, "???" is returned. + *

+ * + *

Note 2: + * If the given object is NULL, an empty string is returned. + *

+ * + * @param fct Function whose the signature must be returned. + * + * @return The corresponding signature. + */ + public static String getFctSignature(final ADQLFunction fct){ + if (fct == null) + return ""; + + StringBuffer buf = new StringBuffer(fct.getName().toLowerCase()); + buf.append('('); + for(int i = 0; i < fct.getNbParameters(); i++){ + if (fct.getParameter(i).isNumeric()) + buf.append("NUMERIC"); + else if (fct.getParameter(i).isString()) + buf.append("STRING"); + else if (fct.getParameter(i).isGeometry()) + buf.append("GEOMETRY"); + else + buf.append("???"); + + if ((i + 1) < fct.getNbParameters()) + buf.append(", "); + } + buf.append(')'); + return buf.toString(); + } + +} diff --git a/src/adql/db/exception/UnresolvedIdentifiersException.java b/src/adql/db/exception/UnresolvedIdentifiersException.java index b141befb55700ffa2a51f0d39fcba06d58d61d0f..446ae7933b57555b89a9e97500d6744c103a2c69 100644 --- a/src/adql/db/exception/UnresolvedIdentifiersException.java +++ b/src/adql/db/exception/UnresolvedIdentifiersException.java @@ -16,7 +16,8 @@ package adql.db.exception; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.util.ArrayList; @@ -36,8 +37,8 @@ import adql.parser.ParseException; * on this {@link UnresolvedIdentifiersException} (method {@link #iterator()}). *

* - * @author Grégory Mantelet (CDS) - * @version 06/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 1.4 (06/2015) * * @see DBChecker */ @@ -65,12 +66,16 @@ public class UnresolvedIdentifiersException extends ParseException implements It exceptions.add(pe); if (pe instanceof UnresolvedColumnException){ String colName = ((UnresolvedColumnException)pe).getColumnName(); - if (colName != null && !colName.trim().isEmpty()) + if (colName != null && colName.trim().length() > 0) addIdentifierName(colName + " " + pe.getPosition()); }else if (pe instanceof UnresolvedTableException){ String tableName = ((UnresolvedTableException)pe).getTableName(); - if (tableName != null && !tableName.trim().isEmpty()) + if (tableName != null && tableName.trim().length() > 0) addIdentifierName(tableName + " " + pe.getPosition()); + }else if (pe instanceof UnresolvedFunctionException){ + String fctName = (((UnresolvedFunctionException)pe).getFunction() == null) ? null : ((UnresolvedFunctionException)pe).getFunction().getName() + "(...)"; + if (fctName != null && fctName.trim().length() > 0) + addIdentifierName(fctName + " " + pe.getPosition()); }else if (pe instanceof UnresolvedIdentifiersException) addIdentifierName(((UnresolvedIdentifiersException)pe).unresolvedIdentifiers); } @@ -82,7 +87,7 @@ public class UnresolvedIdentifiersException extends ParseException implements It * @param name Name (or description) of the identifier to add. */ private final void addIdentifierName(final String name){ - if (name != null && !name.trim().isEmpty()){ + if (name != null && name.trim().length() > 0){ if (unresolvedIdentifiers == null) unresolvedIdentifiers = ""; else @@ -109,6 +114,7 @@ public class UnresolvedIdentifiersException extends ParseException implements It return exceptions.iterator(); } + @Override public final Iterator iterator(){ return getErrors(); } @@ -120,7 +126,11 @@ public class UnresolvedIdentifiersException extends ParseException implements It */ @Override public String getMessage(){ - return exceptions.size() + " unresolved identifiers" + ((unresolvedIdentifiers != null) ? (": " + unresolvedIdentifiers) : "") + " !"; + StringBuffer buf = new StringBuffer(); + buf.append(exceptions.size()).append(" unresolved identifiers").append(((unresolvedIdentifiers != null) ? (": " + unresolvedIdentifiers) : "")).append('!'); + for(ParseException pe : exceptions) + buf.append("\n - ").append(pe.getMessage()); + return buf.toString(); } } diff --git a/src/adql/db/exception/UnresolvedJoin.java b/src/adql/db/exception/UnresolvedJoinException.java similarity index 85% rename from src/adql/db/exception/UnresolvedJoin.java rename to src/adql/db/exception/UnresolvedJoinException.java index ac47cfdd4e24bc11658c1e456865139b6879bc91..162bcd03c05b07284b4e6387f1daa1b2da340f64 100644 --- a/src/adql/db/exception/UnresolvedJoin.java +++ b/src/adql/db/exception/UnresolvedJoinException.java @@ -16,7 +16,7 @@ package adql.db.exception; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2013-2014 - Astronomishes Rechen Institute (ARI) + * Copyright 2013-2015 - Astronomisches Rechen Institut (ARI) */ import adql.parser.ParseException; @@ -27,9 +27,10 @@ import adql.query.TextPosition; * and particularly because of the join condition (i.e. column names not found, ...). * * @author Grégory Mantelet (ARI) - gmantele@ari.uni-heidelberg.de - * @version 1.2 (11/2013) + * @version 1.3 (05/2015) + * @since 1.2 */ -public class UnresolvedJoin extends ParseException { +public class UnresolvedJoinException extends ParseException { private static final long serialVersionUID = 1L; /** @@ -38,7 +39,7 @@ public class UnresolvedJoin extends ParseException { * * @param message Message to display explaining why the join can't be resolved. */ - public UnresolvedJoin(String message){ + public UnresolvedJoinException(String message){ super(message); } @@ -48,7 +49,7 @@ public class UnresolvedJoin extends ParseException { * @param message Message to display explaining why the join can't be resolved. * @param errorPosition Position of the wrong part of the join. */ - public UnresolvedJoin(String message, TextPosition errorPosition){ + public UnresolvedJoinException(String message, TextPosition errorPosition){ super(message, errorPosition); } diff --git a/src/adql/parser/.gitignore b/src/adql/parser/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..5e75933c6a3ba2a311cd3cd309c6159f5c3e1845 --- /dev/null +++ b/src/adql/parser/.gitignore @@ -0,0 +1,3 @@ +/ADQLParser.java +/ADQLParserConstants.java +/ADQLParserTokenManager.java diff --git a/src/adql/parser/ADQLParser.java b/src/adql/parser/ADQLParser.java index f5f1e54d1a949ad4d2061bc700674a78216de4b6..08f3482a5d08bacba4ec07d607293f960a5340c3 100644 --- a/src/adql/parser/ADQLParser.java +++ b/src/adql/parser/ADQLParser.java @@ -4,7 +4,6 @@ package adql.parser; import java.io.FileReader; import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.Stack; import java.util.Vector; @@ -68,7 +67,7 @@ import adql.translator.TranslationException; * @see ADQLQueryFactory * * @author Grégory Mantelet (CDS;ARI) - gmantele@ari.uni-heidelberg.de -* @version 1.2 (03/2014) +* @version 1.4 (06/2015) */ public class ADQLParser implements ADQLParserConstants { @@ -87,9 +86,6 @@ public class ADQLParser implements ADQLParserConstants { /** The first token of a table/column name. This token is extracted by {@link #Identifier()}. */ private Token currentIdentifierToken = null; - /** List of all allowed coordinate systems. */ - private ArrayList allowedCoordSys = new ArrayList(); - /** * Builds an ADQL parser without a query to parse. */ @@ -327,24 +323,6 @@ public class ADQLParser implements ADQLParserConstants { return Query(); } - public final void addCoordinateSystem(final String coordSys){ - allowedCoordSys.add(coordSys); - } - - public final void setCoordinateSystems(final Collection coordSys){ - allowedCoordSys.clear(); - if (coordSys != null) - allowedCoordSys.addAll(coordSys); - } - - public final boolean isAllowedCoordSys(final String coordSys){ - for(String cs : allowedCoordSys){ - if (cs.equalsIgnoreCase(coordSys)) - return true; - } - return false; - } - public final void setDebug(boolean debug){ if (debug) enable_tracing(); @@ -500,1057 +478,1367 @@ public class ADQLParser implements ADQLParserConstants { * @throws ParseException If the query syntax is incorrect. */ final public ADQLQuery Query() throws ParseException{ - trace_call("Query"); + ADQLQuery q = null; + q = QueryExpression(); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case 0: + jj_consume_token(0); + break; + case EOQ: + jj_consume_token(EOQ); + break; + default: + jj_la1[0] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + // check the query: + if (queryChecker != null) + queryChecker.check(q); + + { + if (true) + return q; + } + throw new Error("Missing return statement in function"); + } + + final public ADQLQuery QueryExpression() throws ParseException{ + TextPosition endPos = null; try{ - ADQLQuery q = null; - q = QueryExpression(); + // create the query: + query = queryFactory.createQuery(); + stackQuery.push(query); + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); + } + } + Select(); + From(); + endPos = query.getFrom().getPosition(); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case WHERE: + Where(); + endPos = query.getWhere().getPosition(); + break; + default: + jj_la1[1] = jj_gen; + ; + } + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case GROUP_BY: + GroupBy(); + endPos = query.getGroupBy().getPosition(); + break; + default: + jj_la1[2] = jj_gen; + ; + } + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case HAVING: + Having(); + endPos = query.getHaving().getPosition(); + break; + default: + jj_la1[3] = jj_gen; + ; + } + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case ORDER_BY: + OrderBy(); + endPos = query.getOrderBy().getPosition(); + break; + default: + jj_la1[4] = jj_gen; + ; + } + // set the position of the query: + query.setPosition(new TextPosition(query.getSelect().getPosition(), endPos)); + + // get the previous query (!= null if the current query is a sub-query): + ADQLQuery previousQuery = stackQuery.pop(); + if (stackQuery.isEmpty()) + query = null; + else + query = stackQuery.peek(); + + { + if (true) + return previousQuery; + } + throw new Error("Missing return statement in function"); + } + + final public ADQLQuery SubQueryExpression() throws ParseException{ + ADQLQuery q = null; + Token start, end; + start = jj_consume_token(LEFT_PAR); + q = QueryExpression(); + end = jj_consume_token(RIGHT_PAR); + q.setPosition(new TextPosition(start, end)); + { + if (true) + return q; + } + throw new Error("Missing return statement in function"); + } + + final public void Select() throws ParseException{ + ClauseSelect select = query.getSelect(); + SelectItem item = null; + Token start, t = null; + start = jj_consume_token(SELECT); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case QUANTIFIER: + t = jj_consume_token(QUANTIFIER); + select.setDistinctColumns(t.image.equalsIgnoreCase("DISTINCT")); + break; + default: + jj_la1[5] = jj_gen; + ; + } + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case TOP: + jj_consume_token(TOP); + t = jj_consume_token(UNSIGNED_INTEGER); + try{ + select.setLimit(Integer.parseInt(t.image)); + }catch(NumberFormatException nfe){ + { + if (true) + throw new ParseException("[l." + t.beginLine + ";c." + t.beginColumn + "] The TOP limit (\u005c"" + t.image + "\u005c") isn't a regular unsigned integer !"); + } + } + break; + default: + jj_la1[6] = jj_gen; + ; + } + item = SelectItem(); + select.add(item); + label_1: while(true){ switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case 0: - jj_consume_token(0); - break; - case EOQ: - jj_consume_token(EOQ); + case COMMA: + ; break; default: - jj_la1[0] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); + jj_la1[7] = jj_gen; + break label_1; } - // check the query: - if (queryChecker != null) - queryChecker.check(q); + jj_consume_token(COMMA); + item = SelectItem(); + select.add(item); + } + TextPosition lastItemPos = query.getSelect().get(query.getSelect().size() - 1).getPosition(); + select.setPosition(new TextPosition(start.beginLine, start.beginColumn, lastItemPos.endLine, lastItemPos.endColumn)); + } + final public SelectItem SelectItem() throws ParseException{ + IdentifierItems identifiers = new IdentifierItems(true); + IdentifierItem id = null, label = null; + ADQLOperand op = null; + SelectItem item; + Token starToken; + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case ASTERISK: + starToken = jj_consume_token(ASTERISK); + item = new SelectAllColumns(query); + item.setPosition(new TextPosition(starToken)); + { + if (true) + return item; + } + break; + default: + jj_la1[11] = jj_gen; + if (jj_2_1(7)){ + id = Identifier(); + jj_consume_token(DOT); + identifiers.append(id); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case DELIMITED_IDENTIFIER: + case REGULAR_IDENTIFIER: + id = Identifier(); + jj_consume_token(DOT); + identifiers.append(id); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case DELIMITED_IDENTIFIER: + case REGULAR_IDENTIFIER: + id = Identifier(); + jj_consume_token(DOT); + identifiers.append(id); + break; + default: + jj_la1[8] = jj_gen; + ; + } + break; + default: + jj_la1[9] = jj_gen; + ; + } + starToken = jj_consume_token(ASTERISK); + try{ + item = new SelectAllColumns(queryFactory.createTable(identifiers, null)); + TextPosition firstPos = identifiers.get(0).position; + item.setPosition(new TextPosition(firstPos.beginLine, firstPos.beginColumn, starToken.endLine, (starToken.endColumn < 0) ? -1 : (starToken.endColumn + 1))); + { + if (true) + return item; + } + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); + } + } + }else{ + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case LEFT_PAR: + case PLUS: + case MINUS: + case AVG: + case MAX: + case MIN: + case SUM: + case COUNT: + case BOX: + case CENTROID: + case CIRCLE: + case POINT: + case POLYGON: + case REGION: + case CONTAINS: + case INTERSECTS: + case AREA: + case COORD1: + case COORD2: + case COORDSYS: + case DISTANCE: + case ABS: + case CEILING: + case DEGREES: + case EXP: + case FLOOR: + case LOG: + case LOG10: + case MOD: + case PI: + case POWER: + case RADIANS: + case RAND: + case ROUND: + case SQRT: + case TRUNCATE: + case ACOS: + case ASIN: + case ATAN: + case ATAN2: + case COS: + case COT: + case SIN: + case TAN: + case STRING_LITERAL: + case DELIMITED_IDENTIFIER: + case REGULAR_IDENTIFIER: + case SCIENTIFIC_NUMBER: + case UNSIGNED_FLOAT: + case UNSIGNED_INTEGER: + op = ValueExpression(); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case AS: + jj_consume_token(AS); + label = Identifier(); + break; + default: + jj_la1[10] = jj_gen; + ; + } + break; + default: + jj_la1[12] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } + } + try{ + item = queryFactory.createSelectItem(op, (label == null) ? null : label.identifier); + if (label != null){ + item.setCaseSensitive(label.caseSensitivity); + item.setPosition(new TextPosition(op.getPosition(), label.position)); + }else + item.setPosition(new TextPosition(op.getPosition())); { if (true) - return q; + return item; + } + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("Query"); } + throw new Error("Missing return statement in function"); } - final public ADQLQuery QueryExpression() throws ParseException{ - trace_call("QueryExpression"); + final public void From() throws ParseException{ + FromContent content = null, content2 = null; try{ - TextPosition endPos = null; - try{ - // create the query: - query = queryFactory.createQuery(); - stackQuery.push(query); - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); + jj_consume_token(FROM); + content = TableRef(); + label_2: while(true){ + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case COMMA: + ; + break; + default: + jj_la1[13] = jj_gen; + break label_2; } + jj_consume_token(COMMA); + content2 = TableRef(); + TextPosition startPos = content.getPosition(), endPos = content2.getPosition(); + content = queryFactory.createJoin(JoinType.CROSS, content, content2); + content.setPosition(new TextPosition(startPos, endPos)); } - Select(); - From(); - endPos = query.getFrom().getPosition(); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case WHERE: - Where(); - endPos = query.getWhere().getPosition(); - break; - default: - jj_la1[1] = jj_gen; - ; + query.setFrom(content); + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); } + } + } + + final public void Where() throws ParseException{ + ClauseConstraints where = query.getWhere(); + ADQLConstraint condition; + Token start; + start = jj_consume_token(WHERE); + ConditionsList(where); + TextPosition endPosition = where.getPosition(); + where.setPosition(new TextPosition(start.beginLine, start.beginColumn, endPosition.endLine, endPosition.endColumn)); + } + + final public void GroupBy() throws ParseException{ + ClauseADQL groupBy = query.getGroupBy(); + ColumnReference colRef = null; + Token start; + start = jj_consume_token(GROUP_BY); + colRef = ColumnRef(); + groupBy.add(colRef); + label_3: while(true){ switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case GROUP_BY: - GroupBy(); - endPos = query.getGroupBy().getPosition(); - break; - default: - jj_la1[2] = jj_gen; + case COMMA: ; - } - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case HAVING: - Having(); - endPos = query.getHaving().getPosition(); break; default: - jj_la1[3] = jj_gen; - ; + jj_la1[14] = jj_gen; + break label_3; } + jj_consume_token(COMMA); + colRef = ColumnRef(); + groupBy.add(colRef); + } + groupBy.setPosition(new TextPosition(start.beginLine, start.beginColumn, colRef.getPosition().endLine, colRef.getPosition().endColumn)); + } + + final public void Having() throws ParseException{ + ClauseConstraints having = query.getHaving(); + Token start; + start = jj_consume_token(HAVING); + ConditionsList(having); + TextPosition endPosition = having.getPosition(); + having.setPosition(new TextPosition(start.beginLine, start.beginColumn, endPosition.endLine, endPosition.endColumn)); + } + + final public void OrderBy() throws ParseException{ + ClauseADQL orderBy = query.getOrderBy(); + ADQLOrder order = null; + Token start; + start = jj_consume_token(ORDER_BY); + order = OrderItem(); + orderBy.add(order); + label_4: while(true){ switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case ORDER_BY: - OrderBy(); - endPos = query.getOrderBy().getPosition(); + case COMMA: + ; break; default: - jj_la1[4] = jj_gen; - ; + jj_la1[15] = jj_gen; + break label_4; } - // set the position of the query: - query.setPosition(new TextPosition(query.getSelect().getPosition(), endPos)); + jj_consume_token(COMMA); + order = OrderItem(); + orderBy.add(order); + } + orderBy.setPosition(new TextPosition(start.beginLine, start.beginColumn, order.getPosition().endLine, order.getPosition().endColumn)); + } - // get the previous query (!= null if the current query is a sub-query): - ADQLQuery previousQuery = stackQuery.pop(); - if (stackQuery.isEmpty()) - query = null; - else - query = stackQuery.peek(); + /* *************************** */ + /* COLUMN AND TABLE REFERENCES */ + /* *************************** */ + final public IdentifierItem Identifier() throws ParseException{ + Token t; + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case REGULAR_IDENTIFIER: + t = jj_consume_token(REGULAR_IDENTIFIER); + { + if (true) + return new IdentifierItem(t, false); + } + break; + case DELIMITED_IDENTIFIER: + t = jj_consume_token(DELIMITED_IDENTIFIER); + { + if (true) + return new IdentifierItem(t, true); + } + break; + default: + jj_la1[16] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + throw new Error("Missing return statement in function"); + } - { - if (true) - return previousQuery; - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("QueryExpression"); + /** + * Extracts the name of a table with its possible catalog and schema prefixes. + * + * @return A {@link IdentifierItems} which contains at most three items: catalogName, schemaName and tableName. + */ + final public IdentifierItems TableName() throws ParseException{ + IdentifierItems identifiers = new IdentifierItems(true); + IdentifierItem id = null; + id = Identifier(); + identifiers.append(id); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case DOT: + jj_consume_token(DOT); + id = Identifier(); + identifiers.append(id); + break; + default: + jj_la1[17] = jj_gen; + ; + } + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case DOT: + jj_consume_token(DOT); + id = Identifier(); + identifiers.append(id); + break; + default: + jj_la1[18] = jj_gen; + ; + } + { + if (true) + return identifiers; } + throw new Error("Missing return statement in function"); } - final public ADQLQuery SubQueryExpression() throws ParseException{ - trace_call("SubQueryExpression"); + /** + * Extracts the name of a column with its possible catalog, schema and table prefixes. + * + * @return A {@link IdentifierItems} which contains at most four items: catalogName, schemaName, tableName and columnName. + */ + final public IdentifierItems ColumnName() throws ParseException{ + IdentifierItem id; + IdentifierItems table = null, identifiers = new IdentifierItems(false); + id = Identifier(); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case DOT: + jj_consume_token(DOT); + table = TableName(); + break; + default: + jj_la1[19] = jj_gen; + ; + } + identifiers.append(id); + if (table != null){ + for(int i = 0; i < table.size(); i++) + identifiers.append(table.get(i)); + } + { + if (true) + return identifiers; + } + throw new Error("Missing return statement in function"); + } + + final public ADQLColumn Column() throws ParseException{ + IdentifierItems identifiers; + identifiers = ColumnName(); try{ - ADQLQuery q = null; - Token start, end; - start = jj_consume_token(LEFT_PAR); - q = QueryExpression(); - end = jj_consume_token(RIGHT_PAR); - q.setPosition(new TextPosition(start, end)); { if (true) - return q; + return queryFactory.createColumn(identifiers); + } + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("SubQueryExpression"); } + throw new Error("Missing return statement in function"); } - final public void Select() throws ParseException{ - trace_call("Select"); + final public ColumnReference ColumnRef() throws ParseException{ + IdentifierItems identifiers = null; + Token ind = null; + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case DELIMITED_IDENTIFIER: + case REGULAR_IDENTIFIER: + identifiers = ColumnName(); + break; + case UNSIGNED_INTEGER: + ind = jj_consume_token(UNSIGNED_INTEGER); + break; + default: + jj_la1[20] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } try{ - ClauseSelect select = query.getSelect(); - SelectItem item = null; - Token start, t = null; - start = jj_consume_token(SELECT); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case QUANTIFIER: - t = jj_consume_token(QUANTIFIER); - select.setDistinctColumns(t.image.equalsIgnoreCase("DISTINCT")); - break; - default: - jj_la1[5] = jj_gen; - ; + ColumnReference colRef = null; + if (identifiers != null) + colRef = queryFactory.createColRef(identifiers); + else + colRef = queryFactory.createColRef(Integer.parseInt(ind.image), new TextPosition(ind)); + { + if (true) + return colRef; } - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case TOP: - jj_consume_token(TOP); - t = jj_consume_token(UNSIGNED_INTEGER); - try{ - select.setLimit(Integer.parseInt(t.image)); - }catch(NumberFormatException nfe){ - { - if (true) - throw new ParseException("[l." + t.beginLine + ";c." + t.beginColumn + "] The TOP limit (\u005c"" + t.image + "\u005c") isn't a regular unsigned integer !"); - } - } - break; - default: - jj_la1[6] = jj_gen; - ; + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); } - item = SelectItem(); - select.add(item); - label_1: while(true){ + } + throw new Error("Missing return statement in function"); + } + + final public ADQLOrder OrderItem() throws ParseException{ + IdentifierItems identifiers = null; + Token ind = null, desc = null; + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case DELIMITED_IDENTIFIER: + case REGULAR_IDENTIFIER: + identifiers = ColumnName(); + break; + case UNSIGNED_INTEGER: + ind = jj_consume_token(UNSIGNED_INTEGER); + break; + default: + jj_la1[21] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case ASC: + case DESC: switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case COMMA: - ; + case ASC: + jj_consume_token(ASC); + break; + case DESC: + desc = jj_consume_token(DESC); break; default: - jj_la1[7] = jj_gen; - break label_1; + jj_la1[22] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); } - jj_consume_token(COMMA); - item = SelectItem(); - select.add(item); + break; + default: + jj_la1[23] = jj_gen; + ; + } + try{ + ADQLOrder order = null; + if (identifiers != null){ + order = queryFactory.createOrder(identifiers, desc != null); + order.setPosition(identifiers.getPosition()); + }else{ + order = queryFactory.createOrder(Integer.parseInt(ind.image), desc != null); + order.setPosition(new TextPosition(ind)); + } + { + if (true) + return order; + } + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); } - TextPosition lastItemPos = query.getSelect().get(query.getSelect().size() - 1).getPosition(); - select.setPosition(new TextPosition(start.beginLine, start.beginColumn, lastItemPos.endLine, lastItemPos.endColumn)); - }finally{ - trace_return("Select"); } + throw new Error("Missing return statement in function"); } - final public SelectItem SelectItem() throws ParseException{ - trace_call("SelectItem"); + final public FromContent SimpleTableRef() throws ParseException{ + IdentifierItem alias = null; + IdentifierItems identifiers = null; + ADQLQuery subQuery = null; + FromContent content = null; + Token start, end; try{ - IdentifierItems identifiers = new IdentifierItems(true); - IdentifierItem id = null, label = null; - ADQLOperand op = null; - SelectItem item; - Token starToken; switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case ASTERISK: - starToken = jj_consume_token(ASTERISK); - item = new SelectAllColumns(query); - item.setPosition(new TextPosition(starToken)); + case DELIMITED_IDENTIFIER: + case REGULAR_IDENTIFIER: + identifiers = TableName(); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case AS: + case DELIMITED_IDENTIFIER: + case REGULAR_IDENTIFIER: + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case AS: + jj_consume_token(AS); + break; + default: + jj_la1[24] = jj_gen; + ; + } + alias = Identifier(); + break; + default: + jj_la1[25] = jj_gen; + ; + } + content = queryFactory.createTable(identifiers, alias); + if (alias == null) + content.setPosition(new TextPosition(identifiers.get(0).position, identifiers.get(identifiers.size() - 1).position)); + else + content.setPosition(new TextPosition(identifiers.get(0).position, alias.position)); { if (true) - return item; + return content; } break; default: - jj_la1[11] = jj_gen; - if (jj_2_1(7)){ - id = Identifier(); - jj_consume_token(DOT); - identifiers.append(id); + jj_la1[27] = jj_gen; + if (jj_2_2(2)){ + subQuery = SubQueryExpression(); switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case DELIMITED_IDENTIFIER: - case REGULAR_IDENTIFIER: - id = Identifier(); - jj_consume_token(DOT); - identifiers.append(id); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case DELIMITED_IDENTIFIER: - case REGULAR_IDENTIFIER: - id = Identifier(); - jj_consume_token(DOT); - identifiers.append(id); - break; - default: - jj_la1[8] = jj_gen; - ; - } + case AS: + jj_consume_token(AS); break; default: - jj_la1[9] = jj_gen; + jj_la1[26] = jj_gen; ; } - starToken = jj_consume_token(ASTERISK); - try{ - item = new SelectAllColumns(queryFactory.createTable(identifiers, null)); - TextPosition firstPos = identifiers.get(0).position; - item.setPosition(new TextPosition(firstPos.beginLine, firstPos.beginColumn, starToken.endLine, (starToken.endColumn < 0) ? -1 : (starToken.endColumn + 1))); - { - if (true) - return item; - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } + alias = Identifier(); + content = queryFactory.createTable(subQuery, alias); + if (alias == null) + content.setPosition(new TextPosition(subQuery.getPosition())); + else + content.setPosition(new TextPosition(subQuery.getPosition(), alias.position)); + { + if (true) + return content; } }else{ switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ case LEFT_PAR: - case PLUS: - case MINUS: - case AVG: - case MAX: - case MIN: - case SUM: - case COUNT: - case BOX: - case CENTROID: - case CIRCLE: - case POINT: - case POLYGON: - case REGION: - case CONTAINS: - case INTERSECTS: - case AREA: - case COORD1: - case COORD2: - case COORDSYS: - case DISTANCE: - case ABS: - case CEILING: - case DEGREES: - case EXP: - case FLOOR: - case LOG: - case LOG10: - case MOD: - case PI: - case POWER: - case RADIANS: - case RAND: - case ROUND: - case SQRT: - case TRUNCATE: - case ACOS: - case ASIN: - case ATAN: - case ATAN2: - case COS: - case COT: - case SIN: - case TAN: - case STRING_LITERAL: - case DELIMITED_IDENTIFIER: - case REGULAR_IDENTIFIER: - case SCIENTIFIC_NUMBER: - case UNSIGNED_FLOAT: - case UNSIGNED_INTEGER: - op = ValueExpression(); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case AS: - jj_consume_token(AS); - label = Identifier(); - break; - default: - jj_la1[10] = jj_gen; - ; + start = jj_consume_token(LEFT_PAR); + content = JoinedTable(); + end = jj_consume_token(RIGHT_PAR); + content.setPosition(new TextPosition(start, end)); + { + if (true) + return content; } break; default: - jj_la1[12] = jj_gen; + jj_la1[28] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } } - try{ - item = queryFactory.createSelectItem(op, (label == null) ? null : label.identifier); - if (label != null){ - item.setCaseSensitive(label.caseSensitivity); - item.setPosition(new TextPosition(op.getPosition(), label.position)); - }else - item.setPosition(new TextPosition(op.getPosition())); - { - if (true) - return item; - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("SelectItem"); } + throw new Error("Missing return statement in function"); } - final public void From() throws ParseException{ - trace_call("From"); - try{ - FromContent content = null, content2 = null; - try{ - jj_consume_token(FROM); - content = TableRef(); - label_2: while(true){ - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case COMMA: - ; - break; - default: - jj_la1[13] = jj_gen; - break label_2; - } - jj_consume_token(COMMA); - content2 = TableRef(); - TextPosition startPos = content.getPosition(), endPos = content2.getPosition(); - content = queryFactory.createJoin(JoinType.CROSS, content, content2); - content.setPosition(new TextPosition(startPos, endPos)); - } - query.setFrom(content); - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } + final public FromContent TableRef() throws ParseException{ + FromContent content; + content = SimpleTableRef(); + label_5: while(true){ + if (jj_2_3(2)){ + ; + }else{ + break label_5; } - }finally{ - trace_return("From"); + content = JoinSpecification(content); } - } - - final public void Where() throws ParseException{ - trace_call("Where"); - try{ - ClauseConstraints where = query.getWhere(); - ADQLConstraint condition; - Token start; - start = jj_consume_token(WHERE); - ConditionsList(where); - TextPosition endPosition = where.getPosition(); - where.setPosition(new TextPosition(start.beginLine, start.beginColumn, endPosition.endLine, endPosition.endColumn)); - }finally{ - trace_return("Where"); + { + if (true) + return content; } + throw new Error("Missing return statement in function"); } - final public void GroupBy() throws ParseException{ - trace_call("GroupBy"); - try{ - ClauseADQL groupBy = query.getGroupBy(); - ColumnReference colRef = null; - Token start; - start = jj_consume_token(GROUP_BY); - colRef = ColumnRef(); - groupBy.add(colRef); - label_3: while(true){ - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case COMMA: - ; - break; - default: - jj_la1[14] = jj_gen; - break label_3; - } - jj_consume_token(COMMA); - colRef = ColumnRef(); - groupBy.add(colRef); + final public FromContent JoinedTable() throws ParseException{ + FromContent content; + content = SimpleTableRef(); + label_6: while(true){ + content = JoinSpecification(content); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case NATURAL: + case INNER: + case RIGHT: + case LEFT: + case FULL: + case JOIN: + ; + break; + default: + jj_la1[29] = jj_gen; + break label_6; } - groupBy.setPosition(new TextPosition(start.beginLine, start.beginColumn, colRef.getPosition().endLine, colRef.getPosition().endColumn)); - }finally{ - trace_return("GroupBy"); - } - } - - final public void Having() throws ParseException{ - trace_call("Having"); - try{ - ClauseConstraints having = query.getHaving(); - Token start; - start = jj_consume_token(HAVING); - ConditionsList(having); - TextPosition endPosition = having.getPosition(); - having.setPosition(new TextPosition(start.beginLine, start.beginColumn, endPosition.endLine, endPosition.endColumn)); - }finally{ - trace_return("Having"); } - } - - final public void OrderBy() throws ParseException{ - trace_call("OrderBy"); - try{ - ClauseADQL orderBy = query.getOrderBy(); - ADQLOrder order = null; - Token start; - start = jj_consume_token(ORDER_BY); - order = OrderItem(); - orderBy.add(order); - label_4: while(true){ - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case COMMA: - ; - break; - default: - jj_la1[15] = jj_gen; - break label_4; - } - jj_consume_token(COMMA); - order = OrderItem(); - orderBy.add(order); - } - orderBy.setPosition(new TextPosition(start.beginLine, start.beginColumn, order.getPosition().endLine, order.getPosition().endColumn)); - }finally{ - trace_return("OrderBy"); + { + if (true) + return content; } + throw new Error("Missing return statement in function"); } - /* *************************** */ - /* COLUMN AND TABLE REFERENCES */ - /* *************************** */ - final public IdentifierItem Identifier() throws ParseException{ - trace_call("Identifier"); + final public ADQLJoin JoinSpecification(FromContent leftTable) throws ParseException{ + boolean natural = false; + JoinType type = JoinType.INNER; + ClauseConstraints condition = new ClauseConstraints("ON"); + ArrayList lstColumns = new ArrayList(); + IdentifierItem id; + FromContent rightTable; + ADQLJoin join; + Token lastPar; try{ - Token t; switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case REGULAR_IDENTIFIER: - t = jj_consume_token(REGULAR_IDENTIFIER); + case NATURAL: + jj_consume_token(NATURAL); + natural = true; + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case INNER: + case RIGHT: + case LEFT: + case FULL: + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case INNER: + jj_consume_token(INNER); + break; + case RIGHT: + case LEFT: + case FULL: + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case LEFT: + jj_consume_token(LEFT); + type = JoinType.OUTER_LEFT; + break; + case RIGHT: + jj_consume_token(RIGHT); + type = JoinType.OUTER_RIGHT; + break; + case FULL: + jj_consume_token(FULL); + type = JoinType.OUTER_FULL; + break; + default: + jj_la1[30] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case OUTER: + jj_consume_token(OUTER); + break; + default: + jj_la1[31] = jj_gen; + ; + } + break; + default: + jj_la1[32] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + break; + default: + jj_la1[33] = jj_gen; + ; + } + jj_consume_token(JOIN); + rightTable = TableRef(); + join = queryFactory.createJoin(type, leftTable, rightTable); + join.setPosition(new TextPosition(leftTable.getPosition(), rightTable.getPosition())); { if (true) - return new IdentifierItem(t, false); + return join; } break; - case DELIMITED_IDENTIFIER: - t = jj_consume_token(DELIMITED_IDENTIFIER); - { - if (true) - return new IdentifierItem(t, true); + case INNER: + case RIGHT: + case LEFT: + case FULL: + case JOIN: + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case INNER: + case RIGHT: + case LEFT: + case FULL: + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case INNER: + jj_consume_token(INNER); + break; + case RIGHT: + case LEFT: + case FULL: + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case LEFT: + jj_consume_token(LEFT); + type = JoinType.OUTER_LEFT; + break; + case RIGHT: + jj_consume_token(RIGHT); + type = JoinType.OUTER_RIGHT; + break; + case FULL: + jj_consume_token(FULL); + type = JoinType.OUTER_FULL; + break; + default: + jj_la1[34] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case OUTER: + jj_consume_token(OUTER); + break; + default: + jj_la1[35] = jj_gen; + ; + } + break; + default: + jj_la1[36] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + break; + default: + jj_la1[37] = jj_gen; + ; + } + jj_consume_token(JOIN); + rightTable = TableRef(); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case ON: + jj_consume_token(ON); + ConditionsList(condition); + join = queryFactory.createJoin(type, leftTable, rightTable, condition); + join.setPosition(new TextPosition(leftTable.getPosition(), condition.getPosition())); + { + if (true) + return join; + } + break; + case USING: + jj_consume_token(USING); + jj_consume_token(LEFT_PAR); + id = Identifier(); + lstColumns.add(queryFactory.createColumn(id)); + label_7: while(true){ + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case COMMA: + ; + break; + default: + jj_la1[38] = jj_gen; + break label_7; + } + jj_consume_token(COMMA); + id = Identifier(); + lstColumns.add(queryFactory.createColumn(id)); + } + lastPar = jj_consume_token(RIGHT_PAR); + join = queryFactory.createJoin(type, leftTable, rightTable, lstColumns); + join.setPosition(new TextPosition(leftTable.getPosition().beginLine, leftTable.getPosition().beginColumn, lastPar.endLine, (lastPar.endColumn < 0) ? -1 : (lastPar.endColumn + 1))); + { + if (true) + return join; + } + break; + default: + jj_la1[39] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); } break; default: - jj_la1[16] = jj_gen; + jj_la1[40] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("Identifier"); + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); + } } + throw new Error("Missing return statement in function"); } - /** - * Extracts the name of a table with its possible catalog and schema prefixes. - * - * @return A {@link IdentifierItems} which contains at most three items: catalogName, schemaName and tableName. - */ - final public IdentifierItems TableName() throws ParseException{ - trace_call("TableName"); - try{ - IdentifierItems identifiers = new IdentifierItems(true); - IdentifierItem id = null; - id = Identifier(); - identifiers.append(id); + /* ****** */ + /* STRING */ + /* ****** */ + final public StringConstant String() throws ParseException{ + Token t; + String str = ""; + StringConstant cst; + label_8: while(true){ + t = jj_consume_token(STRING_LITERAL); + str += t.image; switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case DOT: - jj_consume_token(DOT); - id = Identifier(); - identifiers.append(id); - break; - default: - jj_la1[17] = jj_gen; + case STRING_LITERAL: ; - } - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case DOT: - jj_consume_token(DOT); - id = Identifier(); - identifiers.append(id); break; default: - jj_la1[18] = jj_gen; - ; + jj_la1[41] = jj_gen; + break label_8; } + } + try{ + str = (str != null) ? str.substring(1, str.length() - 1) : str; + cst = queryFactory.createStringConstant(str); + cst.setPosition(new TextPosition(t)); { if (true) - return identifiers; + return cst; + } + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("TableName"); } + throw new Error("Missing return statement in function"); } - /** - * Extracts the name of a column with its possible catalog, schema and table prefixes. - * - * @return A {@link IdentifierItems} which contains at most four items: catalogName, schemaName, tableName and columnName. - */ - final public IdentifierItems ColumnName() throws ParseException{ - trace_call("ColumnName"); + /* ************* */ + /* NUMERIC TYPES */ + /* ************* */ + final public NumericConstant UnsignedNumeric() throws ParseException{ + Token t; + NumericConstant cst; + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case SCIENTIFIC_NUMBER: + t = jj_consume_token(SCIENTIFIC_NUMBER); + break; + case UNSIGNED_FLOAT: + t = jj_consume_token(UNSIGNED_FLOAT); + break; + case UNSIGNED_INTEGER: + t = jj_consume_token(UNSIGNED_INTEGER); + break; + default: + jj_la1[42] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } try{ - IdentifierItem id; - IdentifierItems table = null, identifiers = new IdentifierItems(false); - id = Identifier(); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case DOT: - jj_consume_token(DOT); - table = TableName(); - break; - default: - jj_la1[19] = jj_gen; - ; - } - identifiers.append(id); - if (table != null){ - for(int i = 0; i < table.size(); i++) - identifiers.append(table.get(i)); + cst = queryFactory.createNumericConstant(t.image); + cst.setPosition(new TextPosition(t)); + { + if (true) + return cst; } + }catch(Exception ex){ { if (true) - return identifiers; + throw generateParseException(ex); } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("ColumnName"); } + throw new Error("Missing return statement in function"); } - final public ADQLColumn Column() throws ParseException{ - trace_call("Column"); + final public NumericConstant UnsignedFloat() throws ParseException{ + Token t; + NumericConstant cst; + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case UNSIGNED_INTEGER: + t = jj_consume_token(UNSIGNED_INTEGER); + break; + case UNSIGNED_FLOAT: + t = jj_consume_token(UNSIGNED_FLOAT); + break; + default: + jj_la1[43] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } try{ - IdentifierItems identifiers; - identifiers = ColumnName(); - try{ - { - if (true) - return queryFactory.createColumn(identifiers); - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); + cst = queryFactory.createNumericConstant(t.image); + cst.setPosition(new TextPosition(t)); + { + if (true) + return cst; + } + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); + } + } + throw new Error("Missing return statement in function"); + } + + final public NumericConstant SignedInteger() throws ParseException{ + Token sign = null, number; + NumericConstant cst; + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case PLUS: + case MINUS: + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case PLUS: + sign = jj_consume_token(PLUS); + break; + case MINUS: + sign = jj_consume_token(MINUS); + break; + default: + jj_la1[44] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); } + break; + default: + jj_la1[45] = jj_gen; + ; + } + number = jj_consume_token(UNSIGNED_INTEGER); + try{ + if (sign == null){ + cst = queryFactory.createNumericConstant(number.image); + cst.setPosition(new TextPosition(number)); + }else{ + cst = queryFactory.createNumericConstant(sign.image + number.image); + cst.setPosition(new TextPosition(sign, number)); + } + { + if (true) + return cst; + } + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("Column"); } + throw new Error("Missing return statement in function"); } - final public ColumnReference ColumnRef() throws ParseException{ - trace_call("ColumnRef"); + /* *********** */ + /* EXPRESSIONS */ + /* *********** */ + final public ADQLOperand NumericValueExpressionPrimary() throws ParseException{ + String expr; + ADQLColumn column; + ADQLOperand op; + Token left, right; try{ - IdentifierItems identifiers = null; - Token ind = null; switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case SCIENTIFIC_NUMBER: + case UNSIGNED_FLOAT: + case UNSIGNED_INTEGER: + // unsigned_value_specification + op = UnsignedNumeric(); + { + if (true) + return op; + } + break; case DELIMITED_IDENTIFIER: case REGULAR_IDENTIFIER: - identifiers = ColumnName(); + column = Column(); + column.setExpectedType('N'); + { + if (true) + return column; + } break; - case UNSIGNED_INTEGER: - ind = jj_consume_token(UNSIGNED_INTEGER); + case AVG: + case MAX: + case MIN: + case SUM: + case COUNT: + op = SqlFunction(); + { + if (true) + return op; + } + break; + case LEFT_PAR: + left = jj_consume_token(LEFT_PAR); + op = NumericExpression(); + right = jj_consume_token(RIGHT_PAR); + WrappedOperand wop = queryFactory.createWrappedOperand(op); + wop.setPosition(new TextPosition(left, right)); + { + if (true) + return wop; + } break; default: - jj_la1[20] = jj_gen; + jj_la1[46] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - try{ - ColumnReference colRef = null; - if (identifiers != null) - colRef = queryFactory.createColRef(identifiers); - else - colRef = queryFactory.createColRef(Integer.parseInt(ind.image), new TextPosition(ind)); - { - if (true) - return colRef; - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("ColumnRef"); } + throw new Error("Missing return statement in function"); } - final public ADQLOrder OrderItem() throws ParseException{ - trace_call("OrderItem"); + final public ADQLOperand StringValueExpressionPrimary() throws ParseException{ + StringConstant expr; + ADQLColumn column; + ADQLOperand op; try{ - IdentifierItems identifiers = null; - Token ind = null, desc = null; switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case STRING_LITERAL: + // string + expr = String(); + { + if (true) + return expr; + } + break; case DELIMITED_IDENTIFIER: case REGULAR_IDENTIFIER: - identifiers = ColumnName(); + column = Column(); + column.setExpectedType('S'); + { + if (true) + return column; + } break; - case UNSIGNED_INTEGER: - ind = jj_consume_token(UNSIGNED_INTEGER); + case LEFT_PAR: + jj_consume_token(LEFT_PAR); + op = StringExpression(); + jj_consume_token(RIGHT_PAR); + { + if (true) + return queryFactory.createWrappedOperand(op); + } break; default: - jj_la1[21] = jj_gen; + jj_la1[47] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case ASC: - case DESC: - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case ASC: - jj_consume_token(ASC); - break; - case DESC: - desc = jj_consume_token(DESC); - break; - default: - jj_la1[22] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - break; - default: - jj_la1[23] = jj_gen; - ; - } - try{ - ADQLOrder order = null; - if (identifiers != null){ - order = queryFactory.createOrder(identifiers, desc != null); - order.setPosition(identifiers.getPosition()); - }else{ - order = queryFactory.createOrder(Integer.parseInt(ind.image), desc != null); - order.setPosition(new TextPosition(ind)); - } - { - if (true) - return order; - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("OrderItem"); } + throw new Error("Missing return statement in function"); } - final public FromContent SimpleTableRef() throws ParseException{ - trace_call("SimpleTableRef"); + final public ADQLOperand ValueExpression() throws ParseException{ + ADQLOperand valueExpr = null; try{ - IdentifierItem alias = null; - IdentifierItems identifiers = null; - ADQLQuery subQuery = null; - FromContent content = null; - Token start, end; - try{ + if (jj_2_4(2147483647)){ + valueExpr = NumericExpression(); + }else if (jj_2_5(2147483647)){ + valueExpr = StringExpression(); + }else if (jj_2_6(2147483647)){ + jj_consume_token(LEFT_PAR); + valueExpr = ValueExpression(); + jj_consume_token(RIGHT_PAR); + valueExpr = queryFactory.createWrappedOperand(valueExpr); + }else if (jj_2_7(2147483647)){ + valueExpr = UserDefinedFunction(); + }else{ switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case DELIMITED_IDENTIFIER: - case REGULAR_IDENTIFIER: - identifiers = TableName(); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case AS: - case DELIMITED_IDENTIFIER: - case REGULAR_IDENTIFIER: - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case AS: - jj_consume_token(AS); - break; - default: - jj_la1[24] = jj_gen; - ; - } - alias = Identifier(); - break; - default: - jj_la1[25] = jj_gen; - ; - } - content = queryFactory.createTable(identifiers, alias); - if (alias == null) - content.setPosition(new TextPosition(identifiers.get(0).position, identifiers.get(identifiers.size() - 1).position)); - else - content.setPosition(new TextPosition(identifiers.get(0).position, alias.position)); - { - if (true) - return content; - } + case BOX: + case CENTROID: + case CIRCLE: + case POINT: + case POLYGON: + case REGION: + valueExpr = GeometryValueFunction(); break; default: - jj_la1[27] = jj_gen; - if (jj_2_2(2)){ - subQuery = SubQueryExpression(); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case AS: - jj_consume_token(AS); - break; - default: - jj_la1[26] = jj_gen; - ; - } - alias = Identifier(); - content = queryFactory.createTable(subQuery, alias); - if (alias == null) - content.setPosition(new TextPosition(subQuery.getPosition())); - else - content.setPosition(new TextPosition(subQuery.getPosition(), alias.position)); - { - if (true) - return content; - } + jj_la1[48] = jj_gen; + if (jj_2_8(2147483647)){ + valueExpr = Column(); + }else if (jj_2_9(2147483647)){ + valueExpr = StringFactor(); }else{ switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ case LEFT_PAR: - start = jj_consume_token(LEFT_PAR); - content = JoinedTable(); - end = jj_consume_token(RIGHT_PAR); - content.setPosition(new TextPosition(start, end)); - { - if (true) - return content; - } + case PLUS: + case MINUS: + case AVG: + case MAX: + case MIN: + case SUM: + case COUNT: + case CONTAINS: + case INTERSECTS: + case AREA: + case COORD1: + case COORD2: + case DISTANCE: + case ABS: + case CEILING: + case DEGREES: + case EXP: + case FLOOR: + case LOG: + case LOG10: + case MOD: + case PI: + case POWER: + case RADIANS: + case RAND: + case ROUND: + case SQRT: + case TRUNCATE: + case ACOS: + case ASIN: + case ATAN: + case ATAN2: + case COS: + case COT: + case SIN: + case TAN: + case DELIMITED_IDENTIFIER: + case REGULAR_IDENTIFIER: + case SCIENTIFIC_NUMBER: + case UNSIGNED_FLOAT: + case UNSIGNED_INTEGER: + valueExpr = Factor(); break; default: - jj_la1[28] = jj_gen; + jj_la1[49] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("SimpleTableRef"); - } - } - - final public FromContent TableRef() throws ParseException{ - trace_call("TableRef"); - try{ - FromContent content; - content = SimpleTableRef(); - label_5: while(true){ - if (jj_2_3(2)){ - ; - }else{ - break label_5; - } - content = JoinSpecification(content); } { if (true) - return content; - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("TableRef"); - } - } - - final public FromContent JoinedTable() throws ParseException{ - trace_call("JoinedTable"); - try{ - FromContent content; - content = SimpleTableRef(); - label_6: while(true){ - content = JoinSpecification(content); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case NATURAL: - case INNER: - case RIGHT: - case LEFT: - case FULL: - case JOIN: - ; - break; - default: - jj_la1[29] = jj_gen; - break label_6; - } + return valueExpr; } + }catch(Exception ex){ { if (true) - return content; + throw generateParseException(ex); } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("JoinedTable"); } + throw new Error("Missing return statement in function"); } - final public ADQLJoin JoinSpecification(FromContent leftTable) throws ParseException{ - trace_call("JoinSpecification"); - try{ - boolean natural = false; - JoinType type = JoinType.INNER; - ClauseConstraints condition = new ClauseConstraints("ON"); - ArrayList lstColumns = new ArrayList(); - IdentifierItem id; - FromContent rightTable; - ADQLJoin join; - Token lastPar; - try{ + final public ADQLOperand NumericExpression() throws ParseException{ + Token sign = null; + ADQLOperand leftOp, rightOp = null; + leftOp = NumericTerm(); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case PLUS: + case MINUS: switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case NATURAL: - jj_consume_token(NATURAL); - natural = true; - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case INNER: - case RIGHT: - case LEFT: - case FULL: - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case INNER: - jj_consume_token(INNER); - break; - case RIGHT: - case LEFT: - case FULL: - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case LEFT: - jj_consume_token(LEFT); - type = JoinType.OUTER_LEFT; - break; - case RIGHT: - jj_consume_token(RIGHT); - type = JoinType.OUTER_RIGHT; - break; - case FULL: - jj_consume_token(FULL); - type = JoinType.OUTER_FULL; - break; - default: - jj_la1[30] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case OUTER: - jj_consume_token(OUTER); - break; - default: - jj_la1[31] = jj_gen; - ; - } - break; - default: - jj_la1[32] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - break; - default: - jj_la1[33] = jj_gen; - ; - } - jj_consume_token(JOIN); - rightTable = TableRef(); - join = queryFactory.createJoin(type, leftTable, rightTable); - join.setPosition(new TextPosition(leftTable.getPosition(), rightTable.getPosition())); - { - if (true) - return join; - } + case PLUS: + sign = jj_consume_token(PLUS); break; - case INNER: - case RIGHT: - case LEFT: - case FULL: - case JOIN: - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case INNER: - case RIGHT: - case LEFT: - case FULL: - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case INNER: - jj_consume_token(INNER); - break; - case RIGHT: - case LEFT: - case FULL: - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case LEFT: - jj_consume_token(LEFT); - type = JoinType.OUTER_LEFT; - break; - case RIGHT: - jj_consume_token(RIGHT); - type = JoinType.OUTER_RIGHT; - break; - case FULL: - jj_consume_token(FULL); - type = JoinType.OUTER_FULL; - break; - default: - jj_la1[34] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case OUTER: - jj_consume_token(OUTER); - break; - default: - jj_la1[35] = jj_gen; - ; - } - break; - default: - jj_la1[36] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - break; - default: - jj_la1[37] = jj_gen; - ; - } - jj_consume_token(JOIN); - rightTable = TableRef(); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case ON: - jj_consume_token(ON); - ConditionsList(condition); - join = queryFactory.createJoin(type, leftTable, rightTable, condition); - join.setPosition(new TextPosition(leftTable.getPosition(), condition.getPosition())); - { - if (true) - return join; - } - break; - case USING: - jj_consume_token(USING); - jj_consume_token(LEFT_PAR); - id = Identifier(); - lstColumns.add(queryFactory.createColumn(id)); - label_7: while(true){ - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case COMMA: - ; - break; - default: - jj_la1[38] = jj_gen; - break label_7; - } - jj_consume_token(COMMA); - id = Identifier(); - lstColumns.add(queryFactory.createColumn(id)); - } - lastPar = jj_consume_token(RIGHT_PAR); - join = queryFactory.createJoin(type, leftTable, rightTable, lstColumns); - join.setPosition(new TextPosition(leftTable.getPosition().beginLine, leftTable.getPosition().beginColumn, lastPar.endLine, (lastPar.endColumn < 0) ? -1 : (lastPar.endColumn + 1))); - { - if (true) - return join; - } - break; - default: - jj_la1[39] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } + case MINUS: + sign = jj_consume_token(MINUS); break; default: - jj_la1[40] = jj_gen; + jj_la1[50] = jj_gen; jj_consume_token(-1); throw new ParseException(); } + rightOp = NumericExpression(); + break; + default: + jj_la1[51] = jj_gen; + ; + } + if (sign == null){ + if (true) + return leftOp; + }else{ + try{ + Operation operation = queryFactory.createOperation(leftOp, OperationType.getOperator(sign.image), rightOp); + operation.setPosition(new TextPosition(leftOp.getPosition(), rightOp.getPosition())); + { + if (true) + return operation; + } }catch(Exception ex){ { if (true) throw generateParseException(ex); } } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("JoinSpecification"); } + throw new Error("Missing return statement in function"); } - /* ****** */ - /* STRING */ - /* ****** */ - final public StringConstant String() throws ParseException{ - trace_call("String"); - try{ - Token t; - String str = ""; - StringConstant cst; - label_8: while(true){ - t = jj_consume_token(STRING_LITERAL); - str += t.image; + final public ADQLOperand NumericTerm() throws ParseException{ + Token sign = null; + ADQLOperand leftOp, rightOp = null; + leftOp = Factor(); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case ASTERISK: + case DIVIDE: switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case STRING_LITERAL: - ; + case ASTERISK: + sign = jj_consume_token(ASTERISK); + break; + case DIVIDE: + sign = jj_consume_token(DIVIDE); break; default: - jj_la1[41] = jj_gen; - break label_8; + jj_la1[52] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); } - } + rightOp = NumericTerm(); + break; + default: + jj_la1[53] = jj_gen; + ; + } + if (sign == null){ + if (true) + return leftOp; + }else{ try{ - str = (str != null) ? str.substring(1, str.length() - 1) : str; - cst = queryFactory.createStringConstant(str); - cst.setPosition(new TextPosition(t)); + Operation operation = queryFactory.createOperation(leftOp, OperationType.getOperator(sign.image), rightOp); + operation.setPosition(new TextPosition(leftOp.getPosition(), rightOp.getPosition())); { if (true) - return cst; + return operation; } }catch(Exception ex){ { @@ -1558,236 +1846,367 @@ public class ADQLParser implements ADQLParserConstants { throw generateParseException(ex); } } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("String"); } + throw new Error("Missing return statement in function"); } - /* ************* */ - /* NUMERIC TYPES */ - /* ************* */ - final public NumericConstant UnsignedNumeric() throws ParseException{ - trace_call("UnsignedNumeric"); - try{ - Token t; - NumericConstant cst; + final public ADQLOperand Factor() throws ParseException{ + boolean negative = false; + Token minusSign = null; + ADQLOperand op; + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case PLUS: + case MINUS: + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case PLUS: + jj_consume_token(PLUS); + break; + case MINUS: + jj_consume_token(MINUS); + negative = true; + break; + default: + jj_la1[54] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + break; + default: + jj_la1[55] = jj_gen; + ; + } + if (jj_2_10(2)){ + op = NumericFunction(); + }else{ switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case LEFT_PAR: + case AVG: + case MAX: + case MIN: + case SUM: + case COUNT: + case DELIMITED_IDENTIFIER: + case REGULAR_IDENTIFIER: case SCIENTIFIC_NUMBER: - t = jj_consume_token(SCIENTIFIC_NUMBER); - break; case UNSIGNED_FLOAT: - t = jj_consume_token(UNSIGNED_FLOAT); - break; case UNSIGNED_INTEGER: - t = jj_consume_token(UNSIGNED_INTEGER); + op = NumericValueExpressionPrimary(); break; default: - jj_la1[42] = jj_gen; + jj_la1[56] = jj_gen; jj_consume_token(-1); throw new ParseException(); } + } + if (negative){ try{ - cst = queryFactory.createNumericConstant(t.image); - cst.setPosition(new TextPosition(t)); - { - if (true) - return cst; - } + op = queryFactory.createNegativeOperand(op); + NegativeOperand negativeOp = (NegativeOperand)op; + negativeOp.setPosition(new TextPosition(minusSign.beginLine, minusSign.beginColumn, negativeOp.getPosition().endLine, negativeOp.getPosition().endColumn)); }catch(Exception ex){ { if (true) throw generateParseException(ex); } } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("UnsignedNumeric"); } + + { + if (true) + return op; + } + throw new Error("Missing return statement in function"); } - final public NumericConstant UnsignedFloat() throws ParseException{ - trace_call("UnsignedFloat"); - try{ - Token t; - NumericConstant cst; + final public ADQLOperand StringExpression() throws ParseException{ + ADQLOperand leftOp; + ADQLOperand rightOp = null; + leftOp = StringFactor(); + label_9: while(true){ switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case UNSIGNED_INTEGER: - t = jj_consume_token(UNSIGNED_INTEGER); - break; - case UNSIGNED_FLOAT: - t = jj_consume_token(UNSIGNED_FLOAT); + case CONCAT: + ; break; default: - jj_la1[43] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); + jj_la1[57] = jj_gen; + break label_9; } - try{ - cst = queryFactory.createNumericConstant(t.image); - cst.setPosition(new TextPosition(t)); - { - if (true) - return cst; - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); + jj_consume_token(CONCAT); + rightOp = StringFactor(); + if (!(leftOp instanceof Concatenation)){ + try{ + ADQLOperand temp = leftOp; + leftOp = queryFactory.createConcatenation(); + ((Concatenation)leftOp).add(temp); + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); + } } } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("UnsignedFloat"); + ((Concatenation)leftOp).add(rightOp); + } + if (leftOp instanceof Concatenation){ + Concatenation concat = (Concatenation)leftOp; + concat.setPosition(new TextPosition(concat.get(0).getPosition(), concat.get(concat.size() - 1).getPosition())); + } + { + if (true) + return leftOp; } + throw new Error("Missing return statement in function"); } - final public NumericConstant SignedInteger() throws ParseException{ - trace_call("SignedInteger"); - try{ - Token sign = null, number; - NumericConstant cst; - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case PLUS: - case MINUS: + final public ADQLOperand StringFactor() throws ParseException{ + ADQLOperand op; + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case COORDSYS: + op = ExtractCoordSys(); + break; + default: + jj_la1[58] = jj_gen; + if (jj_2_11(2)){ + op = UserDefinedFunction(); + ((UserDefinedFunction)op).setExpectedType('S'); + }else{ switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case PLUS: - sign = jj_consume_token(PLUS); - break; - case MINUS: - sign = jj_consume_token(MINUS); + case LEFT_PAR: + case STRING_LITERAL: + case DELIMITED_IDENTIFIER: + case REGULAR_IDENTIFIER: + op = StringValueExpressionPrimary(); break; default: - jj_la1[44] = jj_gen; + jj_la1[59] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - break; - default: - jj_la1[45] = jj_gen; - ; - } - number = jj_consume_token(UNSIGNED_INTEGER); - try{ - if (sign == null){ - cst = queryFactory.createNumericConstant(number.image); - cst.setPosition(new TextPosition(number)); - }else{ - cst = queryFactory.createNumericConstant(sign.image + number.image); - cst.setPosition(new TextPosition(sign, number)); - } - { - if (true) - return cst; - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); } + } + { + if (true) + return op; + } + throw new Error("Missing return statement in function"); + } + + final public GeometryValue GeometryExpression() throws ParseException{ + ADQLColumn col = null; + GeometryFunction gf = null; + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case DELIMITED_IDENTIFIER: + case REGULAR_IDENTIFIER: + col = Column(); + break; + case BOX: + case CENTROID: + case CIRCLE: + case POINT: + case POLYGON: + case REGION: + gf = GeometryValueFunction(); + break; + default: + jj_la1[60] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + if (col != null){ + col.setExpectedType('G'); + { + if (true) + return new GeometryValue(col); } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("SignedInteger"); + }else{ + if (true) + return new GeometryValue(gf); } + throw new Error("Missing return statement in function"); } - /* *********** */ - /* EXPRESSIONS */ - /* *********** */ - final public ADQLOperand ValueExpressionPrimary() throws ParseException{ - trace_call("ValueExpressionPrimary"); + /* ********************************** */ + /* BOOLEAN EXPRESSIONS (WHERE clause) */ + /* ********************************** */ + final public ClauseConstraints ConditionsList(ClauseConstraints clause) throws ParseException{ + ADQLConstraint constraint = null; + Token op = null; + boolean notOp = false; try{ - ADQLColumn column; - ADQLOperand op; - Token left, right; - try{ + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case NOT: + op = jj_consume_token(NOT); + notOp = true; + break; + default: + jj_la1[61] = jj_gen; + ; + } + constraint = Constraint(); + if (notOp){ + TextPosition oldPos = constraint.getPosition(); + constraint = queryFactory.createNot(constraint); + ((NotConstraint)constraint).setPosition(new TextPosition(op.beginLine, op.beginColumn, oldPos.endLine, oldPos.endColumn)); + } + notOp = false; + + if (clause instanceof ADQLConstraint) + clause.add(constraint); + else + clause.add(constraint); + label_10: while(true){ switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case SCIENTIFIC_NUMBER: - case UNSIGNED_FLOAT: - case UNSIGNED_INTEGER: - // unsigned_value_specification - op = UnsignedNumeric(); - { - if (true) - return op; - } - break; - case STRING_LITERAL: - op = String(); - { - if (true) - return op; - } - break; - case DELIMITED_IDENTIFIER: - case REGULAR_IDENTIFIER: - column = Column(); - { - if (true) - return column; - } + case AND: + case OR: + ; break; - case AVG: - case MAX: - case MIN: - case SUM: - case COUNT: - op = SqlFunction(); - { - if (true) - return op; - } + default: + jj_la1[62] = jj_gen; + break label_10; + } + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case AND: + op = jj_consume_token(AND); break; - case LEFT_PAR: - left = jj_consume_token(LEFT_PAR); - op = ValueExpression(); - right = jj_consume_token(RIGHT_PAR); - WrappedOperand wop = queryFactory.createWrappedOperand(op); - wop.setPosition(new TextPosition(left, right)); - { - if (true) - return wop; - } + case OR: + op = jj_consume_token(OR); break; default: - jj_la1[46] = jj_gen; + jj_la1[63] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case NOT: + jj_consume_token(NOT); + notOp = true; + break; + default: + jj_la1[64] = jj_gen; + ; } - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("ValueExpressionPrimary"); - } - } - - final public ADQLOperand ValueExpression() throws ParseException{ - trace_call("ValueExpression"); + constraint = Constraint(); + if (notOp){ + TextPosition oldPos = constraint.getPosition(); + constraint = queryFactory.createNot(constraint); + ((NotConstraint)constraint).setPosition(new TextPosition(op.beginLine, op.beginColumn, oldPos.endLine, oldPos.endColumn)); + } + notOp = false; + + if (clause instanceof ADQLConstraint) + clause.add(op.image, constraint); + else + clause.add(op.image, constraint); + } + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); + } + } + if (!clause.isEmpty()){ + TextPosition start = clause.get(0).getPosition(); + TextPosition end = clause.get(clause.size() - 1).getPosition(); + clause.setPosition(new TextPosition(start, end)); + } + { + if (true) + return clause; + } + throw new Error("Missing return statement in function"); + } + + final public ADQLConstraint Constraint() throws ParseException{ + ADQLConstraint constraint = null; + Token start, end; + if (jj_2_12(2147483647)){ + constraint = Predicate(); + }else{ + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case LEFT_PAR: + start = jj_consume_token(LEFT_PAR); + try{ + constraint = queryFactory.createGroupOfConstraints(); + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); + } + } + ConditionsList((ConstraintsGroup)constraint); + end = jj_consume_token(RIGHT_PAR); + ((ConstraintsGroup)constraint).setPosition(new TextPosition(start, end)); + break; + default: + jj_la1[65] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } + { + if (true) + return constraint; + } + throw new Error("Missing return statement in function"); + } + + final public ADQLConstraint Predicate() throws ParseException{ + ADQLQuery q = null; + ADQLColumn column = null; + ADQLOperand strExpr1 = null, strExpr2 = null; + ADQLOperand op; + Token start, notToken = null, end; + ADQLConstraint constraint = null; try{ - ADQLOperand valueExpr = null; switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case BOX: - case CENTROID: - case CIRCLE: - case POINT: - case POLYGON: - case REGION: - valueExpr = GeometryValueFunction(); + case EXISTS: + start = jj_consume_token(EXISTS); + q = SubQueryExpression(); + Exists e = queryFactory.createExists(q); + e.setPosition(new TextPosition(start.beginLine, start.beginColumn, q.getPosition().endLine, q.getPosition().endColumn)); + { + if (true) + return e; + } break; default: - jj_la1[47] = jj_gen; - if (jj_2_4(2147483647)){ - valueExpr = NumericExpression(); - }else if (jj_2_5(2147483647)){ - valueExpr = StringExpression(); - }else if (jj_2_6(2147483647)){ - valueExpr = StringExpression(); + jj_la1[70] = jj_gen; + if (jj_2_14(2147483647)){ + column = Column(); + jj_consume_token(IS); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case NOT: + notToken = jj_consume_token(NOT); + break; + default: + jj_la1[66] = jj_gen; + ; + } + end = jj_consume_token(NULL); + IsNull in = queryFactory.createIsNull((notToken != null), column); + in.setPosition(new TextPosition(column.getPosition().beginLine, column.getPosition().beginColumn, end.endLine, (end.endColumn < 0) ? -1 : (end.endColumn + 1))); + { + if (true) + return in; + } + }else if (jj_2_15(2147483647)){ + strExpr1 = StringExpression(); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case NOT: + notToken = jj_consume_token(NOT); + break; + default: + jj_la1[67] = jj_gen; + ; + } + jj_consume_token(LIKE); + strExpr2 = StringExpression(); + Comparison comp = queryFactory.createComparison(strExpr1, (notToken == null) ? ComparisonOperator.LIKE : ComparisonOperator.NOTLIKE, strExpr2); + comp.setPosition(new TextPosition(strExpr1.getPosition(), strExpr2.getPosition())); + { + if (true) + return comp; + } }else{ switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ case LEFT_PAR: @@ -1798,11 +2217,18 @@ public class ADQLParser implements ADQLParserConstants { case MIN: case SUM: case COUNT: + case BOX: + case CENTROID: + case CIRCLE: + case POINT: + case POLYGON: + case REGION: case CONTAINS: case INTERSECTS: case AREA: case COORD1: case COORD2: + case COORDSYS: case DISTANCE: case ABS: case CEILING: @@ -1833,1742 +2259,1099 @@ public class ADQLParser implements ADQLParserConstants { case SCIENTIFIC_NUMBER: case UNSIGNED_FLOAT: case UNSIGNED_INTEGER: - valueExpr = NumericExpression(); + op = ValueExpression(); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case EQUAL: + case NOT_EQUAL: + case LESS_THAN: + case LESS_EQUAL_THAN: + case GREATER_THAN: + case GREATER_EQUAL_THAN: + constraint = ComparisonEnd(op); + break; + default: + jj_la1[68] = jj_gen; + if (jj_2_13(2)){ + constraint = BetweenEnd(op); + }else{ + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case NOT: + case IN: + constraint = InEnd(op); + break; + default: + jj_la1[69] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + } + } break; default: - jj_la1[48] = jj_gen; + jj_la1[71] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } } + }catch(Exception ex){ { if (true) - return valueExpr; + throw generateParseException(ex); } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("ValueExpression"); } + { + if (true) + return constraint; + } + throw new Error("Missing return statement in function"); } - final public ADQLOperand NumericExpression() throws ParseException{ - trace_call("NumericExpression"); + final public Comparison ComparisonEnd(ADQLOperand leftOp) throws ParseException{ + Token comp; + ADQLOperand rightOp; + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case EQUAL: + comp = jj_consume_token(EQUAL); + break; + case NOT_EQUAL: + comp = jj_consume_token(NOT_EQUAL); + break; + case LESS_THAN: + comp = jj_consume_token(LESS_THAN); + break; + case LESS_EQUAL_THAN: + comp = jj_consume_token(LESS_EQUAL_THAN); + break; + case GREATER_THAN: + comp = jj_consume_token(GREATER_THAN); + break; + case GREATER_EQUAL_THAN: + comp = jj_consume_token(GREATER_EQUAL_THAN); + break; + default: + jj_la1[72] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + rightOp = ValueExpression(); + try{ + Comparison comparison = queryFactory.createComparison(leftOp, ComparisonOperator.getOperator(comp.image), rightOp); + comparison.setPosition(new TextPosition(leftOp.getPosition(), rightOp.getPosition())); + { + if (true) + return comparison; + } + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); + } + } + throw new Error("Missing return statement in function"); + } + + final public Between BetweenEnd(ADQLOperand leftOp) throws ParseException{ + Token start, notToken = null; + ADQLOperand min, max; + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case NOT: + notToken = jj_consume_token(NOT); + break; + default: + jj_la1[73] = jj_gen; + ; + } + start = jj_consume_token(BETWEEN); + min = ValueExpression(); + jj_consume_token(AND); + max = ValueExpression(); try{ - Token sign = null; - ADQLOperand leftOp, rightOp = null; - leftOp = NumericTerm(); + Between bet = queryFactory.createBetween((notToken != null), leftOp, min, max); + if (notToken != null) + start = notToken; + bet.setPosition(new TextPosition(start.beginLine, start.beginColumn, max.getPosition().endLine, max.getPosition().endColumn)); + { + if (true) + return bet; + } + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); + } + } + throw new Error("Missing return statement in function"); + } + + final public In InEnd(ADQLOperand leftOp) throws ParseException{ + Token not = null, start; + ADQLQuery q = null; + ADQLOperand item; + Vector items = new Vector(); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case NOT: + not = jj_consume_token(NOT); + break; + default: + jj_la1[74] = jj_gen; + ; + } + start = jj_consume_token(IN); + if (jj_2_16(2)){ + q = SubQueryExpression(); + }else{ switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case PLUS: - case MINUS: - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case PLUS: - sign = jj_consume_token(PLUS); - break; - case MINUS: - sign = jj_consume_token(MINUS); - break; - default: - jj_la1[49] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); + case LEFT_PAR: + jj_consume_token(LEFT_PAR); + item = ValueExpression(); + items.add(item); + label_11: while(true){ + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case COMMA: + ; + break; + default: + jj_la1[75] = jj_gen; + break label_11; + } + jj_consume_token(COMMA); + item = ValueExpression(); + items.add(item); } - rightOp = NumericExpression(); + jj_consume_token(RIGHT_PAR); break; default: - jj_la1[50] = jj_gen; - ; + jj_la1[76] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); } - if (sign == null){ - if (true) - return leftOp; + } + try{ + In in; + start = (not != null) ? not : start; + if (q != null){ + in = queryFactory.createIn(leftOp, q, not != null); + in.setPosition(new TextPosition(start.beginLine, start.beginColumn, q.getPosition().endLine, q.getPosition().endColumn)); }else{ - try{ - Operation operation = queryFactory.createOperation(leftOp, OperationType.getOperator(sign.image), rightOp); - operation.setPosition(new TextPosition(leftOp.getPosition(), rightOp.getPosition())); - { - if (true) - return operation; - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } - } + ADQLOperand[] list = new ADQLOperand[items.size()]; + int i = 0; + for(ADQLOperand op : items) + list[i++] = op; + in = queryFactory.createIn(leftOp, list, not != null); + in.setPosition(new TextPosition(start.beginLine, start.beginColumn, list[list.length - 1].getPosition().endLine, list[list.length - 1].getPosition().endColumn)); + } + { + if (true) + return in; + } + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("NumericExpression"); } + throw new Error("Missing return statement in function"); } - final public ADQLOperand NumericTerm() throws ParseException{ - trace_call("NumericTerm"); + /* ************* */ + /* SQL FUNCTIONS */ + /* ************* */ + final public SQLFunction SqlFunction() throws ParseException{ + Token fct, all = null, distinct = null, end; + ADQLOperand op = null; + SQLFunction funct = null; try{ - Token sign = null; - ADQLOperand leftOp, rightOp = null; - leftOp = Factor(); switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case ASTERISK: - case DIVIDE: + case COUNT: + fct = jj_consume_token(COUNT); + jj_consume_token(LEFT_PAR); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case QUANTIFIER: + distinct = jj_consume_token(QUANTIFIER); + break; + default: + jj_la1[77] = jj_gen; + ; + } switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ case ASTERISK: - sign = jj_consume_token(ASTERISK); + all = jj_consume_token(ASTERISK); break; - case DIVIDE: - sign = jj_consume_token(DIVIDE); + case LEFT_PAR: + case PLUS: + case MINUS: + case AVG: + case MAX: + case MIN: + case SUM: + case COUNT: + case BOX: + case CENTROID: + case CIRCLE: + case POINT: + case POLYGON: + case REGION: + case CONTAINS: + case INTERSECTS: + case AREA: + case COORD1: + case COORD2: + case COORDSYS: + case DISTANCE: + case ABS: + case CEILING: + case DEGREES: + case EXP: + case FLOOR: + case LOG: + case LOG10: + case MOD: + case PI: + case POWER: + case RADIANS: + case RAND: + case ROUND: + case SQRT: + case TRUNCATE: + case ACOS: + case ASIN: + case ATAN: + case ATAN2: + case COS: + case COT: + case SIN: + case TAN: + case STRING_LITERAL: + case DELIMITED_IDENTIFIER: + case REGULAR_IDENTIFIER: + case SCIENTIFIC_NUMBER: + case UNSIGNED_FLOAT: + case UNSIGNED_INTEGER: + op = ValueExpression(); break; default: - jj_la1[51] = jj_gen; + jj_la1[78] = jj_gen; jj_consume_token(-1); throw new ParseException(); } - rightOp = NumericTerm(); + end = jj_consume_token(RIGHT_PAR); + funct = queryFactory.createSQLFunction((all != null) ? SQLFunctionType.COUNT_ALL : SQLFunctionType.COUNT, op, distinct != null && distinct.image.equalsIgnoreCase("distinct")); + funct.setPosition(new TextPosition(fct, end)); break; - default: - jj_la1[52] = jj_gen; - ; - } - if (sign == null){ - if (true) - return leftOp; - }else{ - try{ - Operation operation = queryFactory.createOperation(leftOp, OperationType.getOperator(sign.image), rightOp); - operation.setPosition(new TextPosition(leftOp.getPosition(), rightOp.getPosition())); - { - if (true) - return operation; - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } - } - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("NumericTerm"); - } - } - - final public ADQLOperand Factor() throws ParseException{ - trace_call("Factor"); - try{ - boolean negative = false; - Token minusSign = null; - ADQLOperand op; - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case PLUS: - case MINUS: + case AVG: + case MAX: + case MIN: + case SUM: switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case PLUS: - jj_consume_token(PLUS); + case AVG: + fct = jj_consume_token(AVG); break; - case MINUS: - minusSign = jj_consume_token(MINUS); - negative = true; + case MAX: + fct = jj_consume_token(MAX); + break; + case MIN: + fct = jj_consume_token(MIN); + break; + case SUM: + fct = jj_consume_token(SUM); break; default: - jj_la1[53] = jj_gen; + jj_la1[79] = jj_gen; jj_consume_token(-1); throw new ParseException(); } + jj_consume_token(LEFT_PAR); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case QUANTIFIER: + distinct = jj_consume_token(QUANTIFIER); + break; + default: + jj_la1[80] = jj_gen; + ; + } + op = ValueExpression(); + end = jj_consume_token(RIGHT_PAR); + funct = queryFactory.createSQLFunction(SQLFunctionType.valueOf(fct.image.toUpperCase()), op, distinct != null && distinct.image.equalsIgnoreCase("distinct")); + funct.setPosition(new TextPosition(fct, end)); break; default: - jj_la1[54] = jj_gen; - ; - } - if (jj_2_7(2)){ - op = NumericFunction(); - }else{ - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case LEFT_PAR: - case AVG: - case MAX: - case MIN: - case SUM: - case COUNT: - case STRING_LITERAL: - case DELIMITED_IDENTIFIER: - case REGULAR_IDENTIFIER: - case SCIENTIFIC_NUMBER: - case UNSIGNED_FLOAT: - case UNSIGNED_INTEGER: - op = ValueExpressionPrimary(); - break; - default: - jj_la1[55] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } - if (negative){ - try{ - op = queryFactory.createNegativeOperand(op); - NegativeOperand negativeOp = (NegativeOperand)op; - negativeOp.setPosition(new TextPosition(minusSign.beginLine, minusSign.beginColumn, negativeOp.getPosition().endLine, negativeOp.getPosition().endColumn)); - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } - } + jj_la1[81] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); } - + }catch(Exception ex){ { if (true) - return op; + throw generateParseException(ex); } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("Factor"); } + { + if (true) + return funct; + } + throw new Error("Missing return statement in function"); } - final public ADQLOperand StringExpression() throws ParseException{ - trace_call("StringExpression"); + /* ************** */ + /* ADQL FUNCTIONS */ + /* ************** */ + final public ADQLOperand[] Coordinates() throws ParseException{ + ADQLOperand[] ops = new ADQLOperand[2]; + ops[0] = NumericExpression(); + jj_consume_token(COMMA); + ops[1] = NumericExpression(); + { + if (true) + return ops; + } + throw new Error("Missing return statement in function"); + } + + final public GeometryFunction GeometryFunction() throws ParseException{ + Token fct = null, end; + GeometryValue gvf1, gvf2; + GeometryValue gvp1, gvp2; + GeometryFunction gf = null; + PointFunction p1 = null, p2 = null; + ADQLColumn col1 = null, col2 = null; try{ - ADQLOperand leftOp; - ADQLOperand rightOp = null; - leftOp = StringFactor(); - label_9: while(true){ - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case CONCAT: - ; - break; - default: - jj_la1[56] = jj_gen; - break label_9; - } - jj_consume_token(CONCAT); - rightOp = StringFactor(); - if (!(leftOp instanceof Concatenation)){ - try{ - ADQLOperand temp = leftOp; - leftOp = queryFactory.createConcatenation(); - ((Concatenation)leftOp).add(temp); - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } - } - } - ((Concatenation)leftOp).add(rightOp); - } - if (leftOp instanceof Concatenation){ - Concatenation concat = (Concatenation)leftOp; - concat.setPosition(new TextPosition(concat.get(0).getPosition(), concat.get(concat.size() - 1).getPosition())); - } - { - if (true) - return leftOp; - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("StringExpression"); - } - } - - final public ADQLOperand StringFactor() throws ParseException{ - trace_call("StringFactor"); - try{ - ADQLOperand op; switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case COORDSYS: - op = ExtractCoordSys(); - break; - default: - jj_la1[57] = jj_gen; - if (jj_2_8(2)){ - op = UserDefinedFunction(); - }else{ - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case LEFT_PAR: - case AVG: - case MAX: - case MIN: - case SUM: - case COUNT: - case STRING_LITERAL: - case DELIMITED_IDENTIFIER: - case REGULAR_IDENTIFIER: - case SCIENTIFIC_NUMBER: - case UNSIGNED_FLOAT: - case UNSIGNED_INTEGER: - op = ValueExpressionPrimary(); - break; - default: - jj_la1[58] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } + case CONTAINS: + case INTERSECTS: + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case CONTAINS: + fct = jj_consume_token(CONTAINS); + break; + case INTERSECTS: + fct = jj_consume_token(INTERSECTS); + break; + default: + jj_la1[82] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); } - } - { - if (true) - return op; - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("StringFactor"); - } - } - - final public GeometryValue GeometryExpression() throws ParseException{ - trace_call("GeometryExpression"); - try{ - ADQLColumn col = null; - GeometryFunction gf = null; - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case DELIMITED_IDENTIFIER: - case REGULAR_IDENTIFIER: - col = Column(); + jj_consume_token(LEFT_PAR); + gvf1 = GeometryExpression(); + jj_consume_token(COMMA); + gvf2 = GeometryExpression(); + end = jj_consume_token(RIGHT_PAR); + if (fct.image.equalsIgnoreCase("contains")) + gf = queryFactory.createContains(gvf1, gvf2); + else + gf = queryFactory.createIntersects(gvf1, gvf2); break; - case BOX: - case CENTROID: - case CIRCLE: - case POINT: - case POLYGON: - case REGION: - gf = GeometryValueFunction(); + case AREA: + fct = jj_consume_token(AREA); + jj_consume_token(LEFT_PAR); + gvf1 = GeometryExpression(); + end = jj_consume_token(RIGHT_PAR); + gf = queryFactory.createArea(gvf1); break; - default: - jj_la1[59] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - if (col != null){ - if (true) - return new GeometryValue(col); - }else{ - if (true) - return new GeometryValue(gf); - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("GeometryExpression"); - } - } - - /* ********************************** */ - /* BOOLEAN EXPRESSIONS (WHERE clause) */ - /* ********************************** */ - final public ClauseConstraints ConditionsList(ClauseConstraints clause) throws ParseException{ - trace_call("ConditionsList"); - try{ - ADQLConstraint constraint = null; - Token op = null; - boolean notOp = false; - try{ - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case NOT: - op = jj_consume_token(NOT); - notOp = true; - break; - default: - jj_la1[60] = jj_gen; - ; - } - constraint = Constraint(); - if (notOp){ - TextPosition oldPos = constraint.getPosition(); - constraint = queryFactory.createNot(constraint); - ((NotConstraint)constraint).setPosition(new TextPosition(op.beginLine, op.beginColumn, oldPos.endLine, oldPos.endColumn)); - } - notOp = false; - - if (clause instanceof ADQLConstraint) - clause.add(constraint); - else - clause.add(constraint); - label_10: while(true){ + case COORD1: + fct = jj_consume_token(COORD1); + jj_consume_token(LEFT_PAR); switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case AND: - case OR: - ; + case POINT: + p1 = Point(); + gf = queryFactory.createCoord1(p1); + break; + case DELIMITED_IDENTIFIER: + case REGULAR_IDENTIFIER: + col1 = Column(); + col1.setExpectedType('G'); + gf = queryFactory.createCoord1(col1); break; default: - jj_la1[61] = jj_gen; - break label_10; + jj_la1[83] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); } + end = jj_consume_token(RIGHT_PAR); + break; + case COORD2: + fct = jj_consume_token(COORD2); + jj_consume_token(LEFT_PAR); switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case AND: - op = jj_consume_token(AND); + case POINT: + p1 = Point(); + gf = queryFactory.createCoord2(p1); break; - case OR: - op = jj_consume_token(OR); + case DELIMITED_IDENTIFIER: + case REGULAR_IDENTIFIER: + col1 = Column(); + col1.setExpectedType('G'); + gf = queryFactory.createCoord2(col1); break; default: - jj_la1[62] = jj_gen; + jj_la1[84] = jj_gen; jj_consume_token(-1); throw new ParseException(); } + end = jj_consume_token(RIGHT_PAR); + break; + case DISTANCE: + fct = jj_consume_token(DISTANCE); + jj_consume_token(LEFT_PAR); switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case NOT: - jj_consume_token(NOT); - notOp = true; + case POINT: + p1 = Point(); + break; + case DELIMITED_IDENTIFIER: + case REGULAR_IDENTIFIER: + col1 = Column(); break; default: - jj_la1[63] = jj_gen; - ; + jj_la1[85] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); } - constraint = Constraint(); - if (notOp){ - TextPosition oldPos = constraint.getPosition(); - constraint = queryFactory.createNot(constraint); - ((NotConstraint)constraint).setPosition(new TextPosition(op.beginLine, op.beginColumn, oldPos.endLine, oldPos.endColumn)); + if (p1 != null) + gvp1 = new GeometryValue(p1); + else{ + col1.setExpectedType('G'); + gvp1 = new GeometryValue(col1); } - notOp = false; - - if (clause instanceof ADQLConstraint) - clause.add(op.image, constraint); - else - clause.add(op.image, constraint); - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } - } - if (!clause.isEmpty()){ - TextPosition start = clause.get(0).getPosition(); - TextPosition end = clause.get(clause.size() - 1).getPosition(); - clause.setPosition(new TextPosition(start, end)); + jj_consume_token(COMMA); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case POINT: + p2 = Point(); + break; + case DELIMITED_IDENTIFIER: + case REGULAR_IDENTIFIER: + col2 = Column(); + break; + default: + jj_la1[86] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + if (p2 != null) + gvp2 = new GeometryValue(p2); + else{ + col2.setExpectedType('G'); + gvp2 = new GeometryValue(col2); + } + end = jj_consume_token(RIGHT_PAR); + gf = queryFactory.createDistance(gvp1, gvp2); + break; + default: + jj_la1[87] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); } + }catch(Exception ex){ { if (true) - return clause; + throw generateParseException(ex); } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("ConditionsList"); } + gf.setPosition(new TextPosition(fct, end)); + { + if (true) + return gf; + } + throw new Error("Missing return statement in function"); } - final public ADQLConstraint Constraint() throws ParseException{ - trace_call("Constraint"); - try{ - ADQLConstraint constraint = null; - Token start, end; - if (jj_2_9(2147483647)){ - constraint = Predicate(); - }else{ - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case LEFT_PAR: - start = jj_consume_token(LEFT_PAR); - try{ - constraint = queryFactory.createGroupOfConstraints(); - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } - } - ConditionsList((ConstraintsGroup)constraint); - end = jj_consume_token(RIGHT_PAR); - ((ConstraintsGroup)constraint).setPosition(new TextPosition(start, end)); - break; - default: - jj_la1[64] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } - { - if (true) - return constraint; - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("Constraint"); + final public ADQLOperand CoordinateSystem() throws ParseException{ + ADQLOperand coordSys = null; + coordSys = StringExpression(); + { + if (true) + return coordSys; } + throw new Error("Missing return statement in function"); } - final public ADQLConstraint Predicate() throws ParseException{ - trace_call("Predicate"); + final public GeometryFunction GeometryValueFunction() throws ParseException{ + Token fct = null, end = null; + ADQLOperand coordSys; + ADQLOperand width, height; + ADQLOperand[] coords, tmp; + Vector vCoords; + ADQLOperand op = null; + GeometryValue gvf = null; + GeometryFunction gf = null; try{ - ADQLQuery q = null; - ADQLColumn column = null; - ADQLOperand strExpr1 = null, strExpr2 = null; - ADQLOperand op; - Token start, notToken = null, end; - ADQLConstraint constraint = null; - try{ - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case EXISTS: - start = jj_consume_token(EXISTS); - q = SubQueryExpression(); - Exists e = queryFactory.createExists(q); - e.setPosition(new TextPosition(start.beginLine, start.beginColumn, q.getPosition().endLine, q.getPosition().endColumn)); - { - if (true) - return e; + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case BOX: + fct = jj_consume_token(BOX); + jj_consume_token(LEFT_PAR); + coordSys = CoordinateSystem(); + jj_consume_token(COMMA); + coords = Coordinates(); + jj_consume_token(COMMA); + width = NumericExpression(); + jj_consume_token(COMMA); + height = NumericExpression(); + end = jj_consume_token(RIGHT_PAR); + gf = queryFactory.createBox(coordSys, coords[0], coords[1], width, height); + break; + case CENTROID: + fct = jj_consume_token(CENTROID); + jj_consume_token(LEFT_PAR); + gvf = GeometryExpression(); + end = jj_consume_token(RIGHT_PAR); + gf = queryFactory.createCentroid(gvf); + break; + case CIRCLE: + fct = jj_consume_token(CIRCLE); + jj_consume_token(LEFT_PAR); + coordSys = CoordinateSystem(); + jj_consume_token(COMMA); + coords = Coordinates(); + jj_consume_token(COMMA); + width = NumericExpression(); + end = jj_consume_token(RIGHT_PAR); + gf = queryFactory.createCircle(coordSys, coords[0], coords[1], width); + break; + case POINT: + gf = Point(); + break; + case POLYGON: + fct = jj_consume_token(POLYGON); + jj_consume_token(LEFT_PAR); + coordSys = CoordinateSystem(); + vCoords = new Vector(); + jj_consume_token(COMMA); + tmp = Coordinates(); + vCoords.add(tmp[0]); + vCoords.add(tmp[1]); + jj_consume_token(COMMA); + tmp = Coordinates(); + vCoords.add(tmp[0]); + vCoords.add(tmp[1]); + jj_consume_token(COMMA); + tmp = Coordinates(); + vCoords.add(tmp[0]); + vCoords.add(tmp[1]); + label_12: while(true){ + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case COMMA: + ; + break; + default: + jj_la1[88] = jj_gen; + break label_12; } - break; - default: - jj_la1[69] = jj_gen; - if (jj_2_11(2147483647)){ - column = Column(); - jj_consume_token(IS); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case NOT: - notToken = jj_consume_token(NOT); - break; - default: - jj_la1[65] = jj_gen; - ; - } - end = jj_consume_token(NULL); - IsNull in = queryFactory.createIsNull((notToken != null), column); - in.setPosition(new TextPosition(column.getPosition().beginLine, column.getPosition().beginColumn, end.endLine, (end.endColumn < 0) ? -1 : (end.endColumn + 1))); - { - if (true) - return in; - } - }else if (jj_2_12(2147483647)){ - strExpr1 = StringExpression(); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case NOT: - notToken = jj_consume_token(NOT); - break; - default: - jj_la1[66] = jj_gen; - ; - } - jj_consume_token(LIKE); - strExpr2 = StringExpression(); - Comparison comp = queryFactory.createComparison(strExpr1, (notToken == null) ? ComparisonOperator.LIKE : ComparisonOperator.NOTLIKE, strExpr2); - comp.setPosition(new TextPosition(strExpr1.getPosition(), strExpr2.getPosition())); - { - if (true) - return comp; - } - }else{ - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case LEFT_PAR: - case PLUS: - case MINUS: - case AVG: - case MAX: - case MIN: - case SUM: - case COUNT: - case BOX: - case CENTROID: - case CIRCLE: - case POINT: - case POLYGON: - case REGION: - case CONTAINS: - case INTERSECTS: - case AREA: - case COORD1: - case COORD2: - case COORDSYS: - case DISTANCE: - case ABS: - case CEILING: - case DEGREES: - case EXP: - case FLOOR: - case LOG: - case LOG10: - case MOD: - case PI: - case POWER: - case RADIANS: - case RAND: - case ROUND: - case SQRT: - case TRUNCATE: - case ACOS: - case ASIN: - case ATAN: - case ATAN2: - case COS: - case COT: - case SIN: - case TAN: - case STRING_LITERAL: - case DELIMITED_IDENTIFIER: - case REGULAR_IDENTIFIER: - case SCIENTIFIC_NUMBER: - case UNSIGNED_FLOAT: - case UNSIGNED_INTEGER: - op = ValueExpression(); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case EQUAL: - case NOT_EQUAL: - case LESS_THAN: - case LESS_EQUAL_THAN: - case GREATER_THAN: - case GREATER_EQUAL_THAN: - constraint = ComparisonEnd(op); - break; - default: - jj_la1[67] = jj_gen; - if (jj_2_10(2)){ - constraint = BetweenEnd(op); - }else{ - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case NOT: - case IN: - constraint = InEnd(op); - break; - default: - jj_la1[68] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } - } - break; - default: - jj_la1[70] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } - } - { - if (true) - return constraint; - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("Predicate"); - } - } - - final public Comparison ComparisonEnd(ADQLOperand leftOp) throws ParseException{ - trace_call("ComparisonEnd"); - try{ - Token comp; - ADQLOperand rightOp; - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case EQUAL: - comp = jj_consume_token(EQUAL); - break; - case NOT_EQUAL: - comp = jj_consume_token(NOT_EQUAL); - break; - case LESS_THAN: - comp = jj_consume_token(LESS_THAN); - break; - case LESS_EQUAL_THAN: - comp = jj_consume_token(LESS_EQUAL_THAN); - break; - case GREATER_THAN: - comp = jj_consume_token(GREATER_THAN); - break; - case GREATER_EQUAL_THAN: - comp = jj_consume_token(GREATER_EQUAL_THAN); - break; - default: - jj_la1[71] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - rightOp = ValueExpression(); - try{ - Comparison comparison = queryFactory.createComparison(leftOp, ComparisonOperator.getOperator(comp.image), rightOp); - comparison.setPosition(new TextPosition(leftOp.getPosition(), rightOp.getPosition())); - { - if (true) - return comparison; - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("ComparisonEnd"); - } - } - - final public Between BetweenEnd(ADQLOperand leftOp) throws ParseException{ - trace_call("BetweenEnd"); - try{ - Token start, notToken = null; - ADQLOperand min, max; - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case NOT: - notToken = jj_consume_token(NOT); - break; - default: - jj_la1[72] = jj_gen; - ; - } - start = jj_consume_token(BETWEEN); - min = ValueExpression(); - jj_consume_token(AND); - max = ValueExpression(); - try{ - Between bet = queryFactory.createBetween((notToken != null), leftOp, min, max); - if (notToken != null) - start = notToken; - bet.setPosition(new TextPosition(start.beginLine, start.beginColumn, max.getPosition().endLine, max.getPosition().endColumn)); - { - if (true) - return bet; - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("BetweenEnd"); - } - } - - final public In InEnd(ADQLOperand leftOp) throws ParseException{ - trace_call("InEnd"); - try{ - Token not = null, start; - ADQLQuery q = null; - ADQLOperand item; - Vector items = new Vector(); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case NOT: - not = jj_consume_token(NOT); - break; - default: - jj_la1[73] = jj_gen; - ; - } - start = jj_consume_token(IN); - if (jj_2_13(2)){ - q = SubQueryExpression(); - }else{ - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case LEFT_PAR: - jj_consume_token(LEFT_PAR); - item = ValueExpression(); - items.add(item); - label_11: while(true){ - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case COMMA: - ; - break; - default: - jj_la1[74] = jj_gen; - break label_11; - } - jj_consume_token(COMMA); - item = ValueExpression(); - items.add(item); - } - jj_consume_token(RIGHT_PAR); - break; - default: - jj_la1[75] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - } - try{ - In in; - start = (not != null) ? not : start; - if (q != null){ - in = queryFactory.createIn(leftOp, q, not != null); - in.setPosition(new TextPosition(start.beginLine, start.beginColumn, q.getPosition().endLine, q.getPosition().endColumn)); - }else{ - ADQLOperand[] list = new ADQLOperand[items.size()]; - int i = 0; - for(ADQLOperand op : items) - list[i++] = op; - in = queryFactory.createIn(leftOp, list, not != null); - in.setPosition(new TextPosition(start.beginLine, start.beginColumn, list[list.length - 1].getPosition().endLine, list[list.length - 1].getPosition().endColumn)); - } - { - if (true) - return in; - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("InEnd"); - } - } - - /* ************* */ - /* SQL FUNCTIONS */ - /* ************* */ - final public SQLFunction SqlFunction() throws ParseException{ - trace_call("SqlFunction"); - try{ - Token fct, all = null, distinct = null, end; - ADQLOperand op = null; - SQLFunction funct = null; - try{ - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case COUNT: - fct = jj_consume_token(COUNT); - jj_consume_token(LEFT_PAR); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case QUANTIFIER: - distinct = jj_consume_token(QUANTIFIER); - break; - default: - jj_la1[76] = jj_gen; - ; - } - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case ASTERISK: - all = jj_consume_token(ASTERISK); - break; - case LEFT_PAR: - case PLUS: - case MINUS: - case AVG: - case MAX: - case MIN: - case SUM: - case COUNT: - case BOX: - case CENTROID: - case CIRCLE: - case POINT: - case POLYGON: - case REGION: - case CONTAINS: - case INTERSECTS: - case AREA: - case COORD1: - case COORD2: - case COORDSYS: - case DISTANCE: - case ABS: - case CEILING: - case DEGREES: - case EXP: - case FLOOR: - case LOG: - case LOG10: - case MOD: - case PI: - case POWER: - case RADIANS: - case RAND: - case ROUND: - case SQRT: - case TRUNCATE: - case ACOS: - case ASIN: - case ATAN: - case ATAN2: - case COS: - case COT: - case SIN: - case TAN: - case STRING_LITERAL: - case DELIMITED_IDENTIFIER: - case REGULAR_IDENTIFIER: - case SCIENTIFIC_NUMBER: - case UNSIGNED_FLOAT: - case UNSIGNED_INTEGER: - op = ValueExpression(); - break; - default: - jj_la1[77] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - end = jj_consume_token(RIGHT_PAR); - funct = queryFactory.createSQLFunction((all != null) ? SQLFunctionType.COUNT_ALL : SQLFunctionType.COUNT, op, distinct != null && distinct.image.equalsIgnoreCase("distinct")); - funct.setPosition(new TextPosition(fct, end)); - break; - case AVG: - case MAX: - case MIN: - case SUM: - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case AVG: - fct = jj_consume_token(AVG); - break; - case MAX: - fct = jj_consume_token(MAX); - break; - case MIN: - fct = jj_consume_token(MIN); - break; - case SUM: - fct = jj_consume_token(SUM); - break; - default: - jj_la1[78] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - jj_consume_token(LEFT_PAR); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case QUANTIFIER: - distinct = jj_consume_token(QUANTIFIER); - break; - default: - jj_la1[79] = jj_gen; - ; - } - op = ValueExpression(); - end = jj_consume_token(RIGHT_PAR); - funct = queryFactory.createSQLFunction(SQLFunctionType.valueOf(fct.image.toUpperCase()), op, distinct != null && distinct.image.equalsIgnoreCase("distinct")); - funct.setPosition(new TextPosition(fct, end)); - break; - default: - jj_la1[80] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } - } - { - if (true) - return funct; - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("SqlFunction"); - } - } - - /* ************** */ - /* ADQL FUNCTIONS */ - /* ************** */ - final public ADQLOperand[] Coordinates() throws ParseException{ - trace_call("Coordinates"); - try{ - ADQLOperand[] ops = new ADQLOperand[2]; - ops[0] = NumericExpression(); - jj_consume_token(COMMA); - ops[1] = NumericExpression(); - { - if (true) - return ops; - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("Coordinates"); - } - } - - final public GeometryFunction GeometryFunction() throws ParseException{ - trace_call("GeometryFunction"); - try{ - Token fct = null, end; - GeometryValue gvf1, gvf2; - GeometryValue gvp1, gvp2; - GeometryFunction gf = null; - PointFunction p1 = null, p2 = null; - ADQLColumn col1 = null, col2 = null; - try{ - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case CONTAINS: - case INTERSECTS: - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case CONTAINS: - fct = jj_consume_token(CONTAINS); - break; - case INTERSECTS: - fct = jj_consume_token(INTERSECTS); - break; - default: - jj_la1[81] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - jj_consume_token(LEFT_PAR); - gvf1 = GeometryExpression(); - jj_consume_token(COMMA); - gvf2 = GeometryExpression(); - end = jj_consume_token(RIGHT_PAR); - if (fct.image.equalsIgnoreCase("contains")) - gf = queryFactory.createContains(gvf1, gvf2); - else - gf = queryFactory.createIntersects(gvf1, gvf2); - break; - case AREA: - fct = jj_consume_token(AREA); - jj_consume_token(LEFT_PAR); - gvf1 = GeometryExpression(); - end = jj_consume_token(RIGHT_PAR); - gf = queryFactory.createArea(gvf1); - break; - case COORD1: - fct = jj_consume_token(COORD1); - jj_consume_token(LEFT_PAR); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case POINT: - p1 = Point(); - gf = queryFactory.createCoord1(p1); - break; - case DELIMITED_IDENTIFIER: - case REGULAR_IDENTIFIER: - col1 = Column(); - gf = queryFactory.createCoord1(col1); - break; - default: - jj_la1[82] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - end = jj_consume_token(RIGHT_PAR); - break; - case COORD2: - fct = jj_consume_token(COORD2); - jj_consume_token(LEFT_PAR); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case POINT: - p1 = Point(); - gf = queryFactory.createCoord2(p1); - break; - case DELIMITED_IDENTIFIER: - case REGULAR_IDENTIFIER: - col1 = Column(); - gf = queryFactory.createCoord2(col1); - break; - default: - jj_la1[83] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - end = jj_consume_token(RIGHT_PAR); - break; - case DISTANCE: - fct = jj_consume_token(DISTANCE); - jj_consume_token(LEFT_PAR); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case POINT: - p1 = Point(); - break; - case DELIMITED_IDENTIFIER: - case REGULAR_IDENTIFIER: - col1 = Column(); - break; - default: - jj_la1[84] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - if (p1 != null) - gvp1 = new GeometryValue(p1); - else - gvp1 = new GeometryValue(col1); - jj_consume_token(COMMA); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case POINT: - p2 = Point(); - break; - case DELIMITED_IDENTIFIER: - case REGULAR_IDENTIFIER: - col2 = Column(); - break; - default: - jj_la1[85] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - if (p2 != null) - gvp2 = new GeometryValue(p2); - else - gvp2 = new GeometryValue(col2); - end = jj_consume_token(RIGHT_PAR); - gf = queryFactory.createDistance(gvp1, gvp2); - break; - default: - jj_la1[86] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } - } - gf.setPosition(new TextPosition(fct, end)); - { - if (true) - return gf; - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("GeometryFunction"); - } - } - - final public ADQLOperand CoordinateSystem() throws ParseException{ - trace_call("CoordinateSystem"); - try{ - Token oldToken = token; - ADQLOperand coordSys = null; - coordSys = StringExpression(); - if (allowedCoordSys.size() > 0){ - TextPosition position = new TextPosition(oldToken.next, token); - if (coordSys == null){ - if (true) - throw new ParseException("A coordinate system must always be provided !", position); - } - if (coordSys instanceof StringConstant && !isAllowedCoordSys(((StringConstant)coordSys).getValue())){ - if (true) - throw new ParseException("\u005c"" + coordSys.toADQL() + "\u005c" is not an allowed coordinate systems !", position); - } - } - - { - if (true) - return coordSys; - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("CoordinateSystem"); - } - } - - final public GeometryFunction GeometryValueFunction() throws ParseException{ - trace_call("GeometryValueFunction"); - try{ - Token fct = null, end = null; - ADQLOperand coordSys; - ADQLOperand width, height; - ADQLOperand[] coords, tmp; - Vector vCoords; - ADQLOperand op = null; - GeometryValue gvf = null; - GeometryFunction gf = null; - try{ - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case BOX: - fct = jj_consume_token(BOX); - jj_consume_token(LEFT_PAR); - coordSys = CoordinateSystem(); - jj_consume_token(COMMA); - coords = Coordinates(); - jj_consume_token(COMMA); - width = NumericExpression(); - jj_consume_token(COMMA); - height = NumericExpression(); - end = jj_consume_token(RIGHT_PAR); - gf = queryFactory.createBox(coordSys, coords[0], coords[1], width, height); - break; - case CENTROID: - fct = jj_consume_token(CENTROID); - jj_consume_token(LEFT_PAR); - gvf = GeometryExpression(); - end = jj_consume_token(RIGHT_PAR); - gf = queryFactory.createCentroid(gvf); - break; - case CIRCLE: - fct = jj_consume_token(CIRCLE); - jj_consume_token(LEFT_PAR); - coordSys = CoordinateSystem(); - jj_consume_token(COMMA); - coords = Coordinates(); - jj_consume_token(COMMA); - width = NumericExpression(); - end = jj_consume_token(RIGHT_PAR); - gf = queryFactory.createCircle(coordSys, coords[0], coords[1], width); - break; - case POINT: - gf = Point(); - break; - case POLYGON: - fct = jj_consume_token(POLYGON); - jj_consume_token(LEFT_PAR); - coordSys = CoordinateSystem(); - vCoords = new Vector(); - jj_consume_token(COMMA); - tmp = Coordinates(); - vCoords.add(tmp[0]); - vCoords.add(tmp[1]); - jj_consume_token(COMMA); - tmp = Coordinates(); - vCoords.add(tmp[0]); - vCoords.add(tmp[1]); jj_consume_token(COMMA); tmp = Coordinates(); vCoords.add(tmp[0]); vCoords.add(tmp[1]); - label_12: while(true){ - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case COMMA: - ; - break; - default: - jj_la1[87] = jj_gen; - break label_12; - } - jj_consume_token(COMMA); - tmp = Coordinates(); - vCoords.add(tmp[0]); - vCoords.add(tmp[1]); - } - end = jj_consume_token(RIGHT_PAR); - gf = queryFactory.createPolygon(coordSys, vCoords); - break; - case REGION: - fct = jj_consume_token(REGION); - jj_consume_token(LEFT_PAR); - op = StringExpression(); - end = jj_consume_token(RIGHT_PAR); - gf = queryFactory.createRegion(op); - break; - default: - jj_la1[88] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } - } - if (fct != null && end != null) // = !(gf instanceof Point) - gf.setPosition(new TextPosition(fct, end)); - { - if (true) - return gf; - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("GeometryValueFunction"); - } - } - - final public PointFunction Point() throws ParseException{ - trace_call("Point"); - try{ - Token start, end; - ADQLOperand coordSys; - ADQLOperand[] coords; - start = jj_consume_token(POINT); - jj_consume_token(LEFT_PAR); - coordSys = CoordinateSystem(); - jj_consume_token(COMMA); - coords = Coordinates(); - end = jj_consume_token(RIGHT_PAR); - try{ - PointFunction pf = queryFactory.createPoint(coordSys, coords[0], coords[1]); - pf.setPosition(new TextPosition(start, end)); - { - if (true) - return pf; - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("Point"); - } - } - - final public GeometryFunction ExtractCoordSys() throws ParseException{ - trace_call("ExtractCoordSys"); - try{ - Token start, end; - GeometryValue gvf; - start = jj_consume_token(COORDSYS); - jj_consume_token(LEFT_PAR); - gvf = GeometryExpression(); - end = jj_consume_token(RIGHT_PAR); - try{ - GeometryFunction gf = queryFactory.createExtractCoordSys(gvf); - gf.setPosition(new TextPosition(start, end)); - { - if (true) - return gf; - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("ExtractCoordSys"); - } - } - - /* ***************** */ - /* NUMERIC FUNCTIONS */ - /* ***************** */ - final public ADQLFunction NumericFunction() throws ParseException{ - trace_call("NumericFunction"); - try{ - ADQLFunction fct; - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case ABS: - case CEILING: - case DEGREES: - case EXP: - case FLOOR: - case LOG: - case LOG10: - case MOD: - case PI: - case POWER: - case RADIANS: - case RAND: - case ROUND: - case SQRT: - case TRUNCATE: - fct = MathFunction(); - break; - case ACOS: - case ASIN: - case ATAN: - case ATAN2: - case COS: - case COT: - case SIN: - case TAN: - fct = TrigFunction(); - break; - case CONTAINS: - case INTERSECTS: - case AREA: - case COORD1: - case COORD2: - case DISTANCE: - fct = GeometryFunction(); + } + end = jj_consume_token(RIGHT_PAR); + gf = queryFactory.createPolygon(coordSys, vCoords); break; - case REGULAR_IDENTIFIER: - fct = UserDefinedFunction(); + case REGION: + fct = jj_consume_token(REGION); + jj_consume_token(LEFT_PAR); + op = StringExpression(); + end = jj_consume_token(RIGHT_PAR); + gf = queryFactory.createRegion(op); break; default: jj_la1[89] = jj_gen; jj_consume_token(-1); throw new ParseException(); } + }catch(Exception ex){ { if (true) - return fct; + throw generateParseException(ex); } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("NumericFunction"); } + if (fct != null && end != null) // = !(gf instanceof Point) + gf.setPosition(new TextPosition(fct, end)); + { + if (true) + return gf; + } + throw new Error("Missing return statement in function"); } - final public MathFunction MathFunction() throws ParseException{ - trace_call("MathFunction"); + final public PointFunction Point() throws ParseException{ + Token start, end; + ADQLOperand coordSys; + ADQLOperand[] coords; + start = jj_consume_token(POINT); + jj_consume_token(LEFT_PAR); + coordSys = CoordinateSystem(); + jj_consume_token(COMMA); + coords = Coordinates(); + end = jj_consume_token(RIGHT_PAR); try{ - Token fct = null, end; - ADQLOperand param1 = null, param2 = null; - NumericConstant integerValue = null; - try{ - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case ABS: - fct = jj_consume_token(ABS); - jj_consume_token(LEFT_PAR); - param1 = NumericExpression(); - end = jj_consume_token(RIGHT_PAR); - break; - case CEILING: - fct = jj_consume_token(CEILING); - jj_consume_token(LEFT_PAR); - param1 = NumericExpression(); - end = jj_consume_token(RIGHT_PAR); - break; - case DEGREES: - fct = jj_consume_token(DEGREES); - jj_consume_token(LEFT_PAR); - param1 = NumericExpression(); - end = jj_consume_token(RIGHT_PAR); - break; - case EXP: - fct = jj_consume_token(EXP); - jj_consume_token(LEFT_PAR); - param1 = NumericExpression(); - end = jj_consume_token(RIGHT_PAR); - break; - case FLOOR: - fct = jj_consume_token(FLOOR); - jj_consume_token(LEFT_PAR); - param1 = NumericExpression(); - end = jj_consume_token(RIGHT_PAR); - break; - case LOG: - fct = jj_consume_token(LOG); - jj_consume_token(LEFT_PAR); - param1 = NumericExpression(); - end = jj_consume_token(RIGHT_PAR); - break; - case LOG10: - fct = jj_consume_token(LOG10); - jj_consume_token(LEFT_PAR); - param1 = NumericExpression(); - end = jj_consume_token(RIGHT_PAR); - break; - case MOD: - fct = jj_consume_token(MOD); - jj_consume_token(LEFT_PAR); - param1 = NumericExpression(); - jj_consume_token(COMMA); - param2 = NumericExpression(); - end = jj_consume_token(RIGHT_PAR); - break; - case PI: - fct = jj_consume_token(PI); - jj_consume_token(LEFT_PAR); - end = jj_consume_token(RIGHT_PAR); - break; - case POWER: - fct = jj_consume_token(POWER); - jj_consume_token(LEFT_PAR); - param1 = NumericExpression(); - jj_consume_token(COMMA); - param2 = NumericExpression(); - end = jj_consume_token(RIGHT_PAR); - break; - case RADIANS: - fct = jj_consume_token(RADIANS); - jj_consume_token(LEFT_PAR); - param1 = NumericExpression(); - end = jj_consume_token(RIGHT_PAR); - break; - case RAND: - fct = jj_consume_token(RAND); - jj_consume_token(LEFT_PAR); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case LEFT_PAR: - case PLUS: - case MINUS: - case AVG: - case MAX: - case MIN: - case SUM: - case COUNT: - case CONTAINS: - case INTERSECTS: - case AREA: - case COORD1: - case COORD2: - case DISTANCE: - case ABS: - case CEILING: - case DEGREES: - case EXP: - case FLOOR: - case LOG: - case LOG10: - case MOD: - case PI: - case POWER: - case RADIANS: - case RAND: - case ROUND: - case SQRT: - case TRUNCATE: - case ACOS: - case ASIN: - case ATAN: - case ATAN2: - case COS: - case COT: - case SIN: - case TAN: - case STRING_LITERAL: - case DELIMITED_IDENTIFIER: - case REGULAR_IDENTIFIER: - case SCIENTIFIC_NUMBER: - case UNSIGNED_FLOAT: - case UNSIGNED_INTEGER: - param1 = NumericExpression(); - break; - default: - jj_la1[90] = jj_gen; - ; - } - end = jj_consume_token(RIGHT_PAR); - break; - case ROUND: - fct = jj_consume_token(ROUND); - jj_consume_token(LEFT_PAR); - param1 = NumericExpression(); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case COMMA: - jj_consume_token(COMMA); - param2 = SignedInteger(); - break; - default: - jj_la1[91] = jj_gen; - ; - } - end = jj_consume_token(RIGHT_PAR); - break; - case SQRT: - fct = jj_consume_token(SQRT); - jj_consume_token(LEFT_PAR); - param1 = NumericExpression(); - end = jj_consume_token(RIGHT_PAR); - break; - case TRUNCATE: - fct = jj_consume_token(TRUNCATE); - jj_consume_token(LEFT_PAR); - param1 = NumericExpression(); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case COMMA: - jj_consume_token(COMMA); - param2 = SignedInteger(); - break; - default: - jj_la1[92] = jj_gen; - ; - } - end = jj_consume_token(RIGHT_PAR); - break; - default: - jj_la1[93] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - if (param1 != null){ - MathFunction mf = queryFactory.createMathFunction(MathFunctionType.valueOf(fct.image.toUpperCase()), param1, param2); - mf.setPosition(new TextPosition(fct, end)); - { - if (true) - return mf; - } - }else{ - if (true) - return null; - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } + PointFunction pf = queryFactory.createPoint(coordSys, coords[0], coords[1]); + pf.setPosition(new TextPosition(start, end)); + { + if (true) + return pf; + } + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("MathFunction"); } + throw new Error("Missing return statement in function"); } - final public MathFunction TrigFunction() throws ParseException{ - trace_call("TrigFunction"); + final public GeometryFunction ExtractCoordSys() throws ParseException{ + Token start, end; + GeometryValue gvf; + start = jj_consume_token(COORDSYS); + jj_consume_token(LEFT_PAR); + gvf = GeometryExpression(); + end = jj_consume_token(RIGHT_PAR); + try{ + GeometryFunction gf = queryFactory.createExtractCoordSys(gvf); + gf.setPosition(new TextPosition(start, end)); + { + if (true) + return gf; + } + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); + } + } + throw new Error("Missing return statement in function"); + } + + /* ***************** */ + /* NUMERIC FUNCTIONS */ + /* ***************** */ + final public ADQLFunction NumericFunction() throws ParseException{ + ADQLFunction fct; + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case ABS: + case CEILING: + case DEGREES: + case EXP: + case FLOOR: + case LOG: + case LOG10: + case MOD: + case PI: + case POWER: + case RADIANS: + case RAND: + case ROUND: + case SQRT: + case TRUNCATE: + fct = MathFunction(); + break; + case ACOS: + case ASIN: + case ATAN: + case ATAN2: + case COS: + case COT: + case SIN: + case TAN: + fct = TrigFunction(); + break; + case CONTAINS: + case INTERSECTS: + case AREA: + case COORD1: + case COORD2: + case DISTANCE: + fct = GeometryFunction(); + break; + case REGULAR_IDENTIFIER: + fct = UserDefinedFunction(); + ((UserDefinedFunction)fct).setExpectedType('N'); + break; + default: + jj_la1[90] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + { + if (true) + return fct; + } + throw new Error("Missing return statement in function"); + } + + final public MathFunction MathFunction() throws ParseException{ + Token fct = null, end; + ADQLOperand param1 = null, param2 = null; + NumericConstant integerValue = null; try{ - Token fct = null, end; - ADQLOperand param1 = null, param2 = null; switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case ACOS: - fct = jj_consume_token(ACOS); + case ABS: + fct = jj_consume_token(ABS); jj_consume_token(LEFT_PAR); param1 = NumericExpression(); end = jj_consume_token(RIGHT_PAR); break; - case ASIN: - fct = jj_consume_token(ASIN); + case CEILING: + fct = jj_consume_token(CEILING); jj_consume_token(LEFT_PAR); param1 = NumericExpression(); end = jj_consume_token(RIGHT_PAR); break; - case ATAN: - fct = jj_consume_token(ATAN); + case DEGREES: + fct = jj_consume_token(DEGREES); jj_consume_token(LEFT_PAR); param1 = NumericExpression(); end = jj_consume_token(RIGHT_PAR); break; - case ATAN2: - fct = jj_consume_token(ATAN2); + case EXP: + fct = jj_consume_token(EXP); jj_consume_token(LEFT_PAR); param1 = NumericExpression(); - jj_consume_token(COMMA); - param2 = NumericExpression(); end = jj_consume_token(RIGHT_PAR); break; - case COS: - fct = jj_consume_token(COS); + case FLOOR: + fct = jj_consume_token(FLOOR); jj_consume_token(LEFT_PAR); param1 = NumericExpression(); end = jj_consume_token(RIGHT_PAR); break; - case COT: - fct = jj_consume_token(COT); + case LOG: + fct = jj_consume_token(LOG); jj_consume_token(LEFT_PAR); param1 = NumericExpression(); end = jj_consume_token(RIGHT_PAR); break; - case SIN: - fct = jj_consume_token(SIN); + case LOG10: + fct = jj_consume_token(LOG10); jj_consume_token(LEFT_PAR); param1 = NumericExpression(); end = jj_consume_token(RIGHT_PAR); break; - case TAN: - fct = jj_consume_token(TAN); + case MOD: + fct = jj_consume_token(MOD); jj_consume_token(LEFT_PAR); param1 = NumericExpression(); + jj_consume_token(COMMA); + param2 = NumericExpression(); end = jj_consume_token(RIGHT_PAR); break; - default: - jj_la1[94] = jj_gen; - jj_consume_token(-1); - throw new ParseException(); - } - try{ - if (param1 != null){ - MathFunction mf = queryFactory.createMathFunction(MathFunctionType.valueOf(fct.image.toUpperCase()), param1, param2); - mf.setPosition(new TextPosition(fct, end)); - { - if (true) - return mf; - } - }else{ - if (true) - return null; - } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); - } - } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("TrigFunction"); - } - } - - /* /!\ WARNING: The function name may be prefixed by "udf_" but there is no way to check it here ! */ - final public UserDefinedFunction UserDefinedFunction() throws ParseException{ - trace_call("UserDefinedFunction"); - try{ - Token fct, end; - Vector params = new Vector(); - ADQLOperand op; - fct = jj_consume_token(REGULAR_IDENTIFIER); - jj_consume_token(LEFT_PAR); - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case LEFT_PAR: - case PLUS: - case MINUS: - case AVG: - case MAX: - case MIN: - case SUM: - case COUNT: - case BOX: - case CENTROID: - case CIRCLE: - case POINT: - case POLYGON: - case REGION: - case CONTAINS: - case INTERSECTS: - case AREA: - case COORD1: - case COORD2: - case COORDSYS: - case DISTANCE: - case ABS: - case CEILING: - case DEGREES: - case EXP: - case FLOOR: - case LOG: - case LOG10: - case MOD: case PI: + fct = jj_consume_token(PI); + jj_consume_token(LEFT_PAR); + end = jj_consume_token(RIGHT_PAR); + break; case POWER: + fct = jj_consume_token(POWER); + jj_consume_token(LEFT_PAR); + param1 = NumericExpression(); + jj_consume_token(COMMA); + param2 = NumericExpression(); + end = jj_consume_token(RIGHT_PAR); + break; case RADIANS: + fct = jj_consume_token(RADIANS); + jj_consume_token(LEFT_PAR); + param1 = NumericExpression(); + end = jj_consume_token(RIGHT_PAR); + break; case RAND: + fct = jj_consume_token(RAND); + jj_consume_token(LEFT_PAR); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case LEFT_PAR: + case PLUS: + case MINUS: + case AVG: + case MAX: + case MIN: + case SUM: + case COUNT: + case CONTAINS: + case INTERSECTS: + case AREA: + case COORD1: + case COORD2: + case DISTANCE: + case ABS: + case CEILING: + case DEGREES: + case EXP: + case FLOOR: + case LOG: + case LOG10: + case MOD: + case PI: + case POWER: + case RADIANS: + case RAND: + case ROUND: + case SQRT: + case TRUNCATE: + case ACOS: + case ASIN: + case ATAN: + case ATAN2: + case COS: + case COT: + case SIN: + case TAN: + case DELIMITED_IDENTIFIER: + case REGULAR_IDENTIFIER: + case SCIENTIFIC_NUMBER: + case UNSIGNED_FLOAT: + case UNSIGNED_INTEGER: + param1 = NumericExpression(); + break; + default: + jj_la1[91] = jj_gen; + ; + } + end = jj_consume_token(RIGHT_PAR); + break; case ROUND: + fct = jj_consume_token(ROUND); + jj_consume_token(LEFT_PAR); + param1 = NumericExpression(); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case COMMA: + jj_consume_token(COMMA); + param2 = SignedInteger(); + break; + default: + jj_la1[92] = jj_gen; + ; + } + end = jj_consume_token(RIGHT_PAR); + break; case SQRT: + fct = jj_consume_token(SQRT); + jj_consume_token(LEFT_PAR); + param1 = NumericExpression(); + end = jj_consume_token(RIGHT_PAR); + break; case TRUNCATE: - case ACOS: - case ASIN: - case ATAN: - case ATAN2: - case COS: - case COT: - case SIN: - case TAN: - case STRING_LITERAL: - case DELIMITED_IDENTIFIER: - case REGULAR_IDENTIFIER: - case SCIENTIFIC_NUMBER: - case UNSIGNED_FLOAT: - case UNSIGNED_INTEGER: - op = ValueExpression(); - params.add(op); - label_13: while(true){ - switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ - case COMMA: - ; - break; - default: - jj_la1[95] = jj_gen; - break label_13; - } - jj_consume_token(COMMA); - op = ValueExpression(); - params.add(op); + fct = jj_consume_token(TRUNCATE); + jj_consume_token(LEFT_PAR); + param1 = NumericExpression(); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case COMMA: + jj_consume_token(COMMA); + param2 = SignedInteger(); + break; + default: + jj_la1[93] = jj_gen; + ; } + end = jj_consume_token(RIGHT_PAR); break; default: - jj_la1[96] = jj_gen; - ; + jj_la1[94] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); } - end = jj_consume_token(RIGHT_PAR); - //System.out.println("INFO [ADQLParser]: \""+fct.image+"\" (from line "+fct.beginLine+" and column "+fct.beginColumn+" to line "+token.endLine+" and column "+(token.endColumn+1)+") is considered as an user defined function !"); - try{ - ADQLOperand[] parameters = new ADQLOperand[params.size()]; - for(int i = 0; i < params.size(); i++) - parameters[i] = params.get(i); - UserDefinedFunction udf = queryFactory.createUserDefinedFunction(fct.image, parameters); - udf.setPosition(new TextPosition(fct, end)); + if (param1 != null){ + MathFunction mf = queryFactory.createMathFunction(MathFunctionType.valueOf(fct.image.toUpperCase()), param1, param2); + mf.setPosition(new TextPosition(fct, end)); { if (true) - return udf; + return mf; } - }catch(UnsupportedOperationException uoe){ + }else{ + if (true) + return null; + } + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); + } + } + throw new Error("Missing return statement in function"); + } + + final public MathFunction TrigFunction() throws ParseException{ + Token fct = null, end; + ADQLOperand param1 = null, param2 = null; + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case ACOS: + fct = jj_consume_token(ACOS); + jj_consume_token(LEFT_PAR); + param1 = NumericExpression(); + end = jj_consume_token(RIGHT_PAR); + break; + case ASIN: + fct = jj_consume_token(ASIN); + jj_consume_token(LEFT_PAR); + param1 = NumericExpression(); + end = jj_consume_token(RIGHT_PAR); + break; + case ATAN: + fct = jj_consume_token(ATAN); + jj_consume_token(LEFT_PAR); + param1 = NumericExpression(); + end = jj_consume_token(RIGHT_PAR); + break; + case ATAN2: + fct = jj_consume_token(ATAN2); + jj_consume_token(LEFT_PAR); + param1 = NumericExpression(); + jj_consume_token(COMMA); + param2 = NumericExpression(); + end = jj_consume_token(RIGHT_PAR); + break; + case COS: + fct = jj_consume_token(COS); + jj_consume_token(LEFT_PAR); + param1 = NumericExpression(); + end = jj_consume_token(RIGHT_PAR); + break; + case COT: + fct = jj_consume_token(COT); + jj_consume_token(LEFT_PAR); + param1 = NumericExpression(); + end = jj_consume_token(RIGHT_PAR); + break; + case SIN: + fct = jj_consume_token(SIN); + jj_consume_token(LEFT_PAR); + param1 = NumericExpression(); + end = jj_consume_token(RIGHT_PAR); + break; + case TAN: + fct = jj_consume_token(TAN); + jj_consume_token(LEFT_PAR); + param1 = NumericExpression(); + end = jj_consume_token(RIGHT_PAR); + break; + default: + jj_la1[95] = jj_gen; + jj_consume_token(-1); + throw new ParseException(); + } + try{ + if (param1 != null){ + MathFunction mf = queryFactory.createMathFunction(MathFunctionType.valueOf(fct.image.toUpperCase()), param1, param2); + mf.setPosition(new TextPosition(fct, end)); { if (true) - throw new ParseException(uoe.getMessage(), new TextPosition(fct, token)); + return mf; } - }catch(Exception ex){ - { - if (true) - throw generateParseException(ex); + }else{ + if (true) + return null; + } + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); + } + } + throw new Error("Missing return statement in function"); + } + + final public UserDefinedFunction UserDefinedFunction() throws ParseException{ + Token fct, end; + Vector params = new Vector(); + ADQLOperand op; + fct = jj_consume_token(REGULAR_IDENTIFIER); + jj_consume_token(LEFT_PAR); + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case LEFT_PAR: + case PLUS: + case MINUS: + case AVG: + case MAX: + case MIN: + case SUM: + case COUNT: + case BOX: + case CENTROID: + case CIRCLE: + case POINT: + case POLYGON: + case REGION: + case CONTAINS: + case INTERSECTS: + case AREA: + case COORD1: + case COORD2: + case COORDSYS: + case DISTANCE: + case ABS: + case CEILING: + case DEGREES: + case EXP: + case FLOOR: + case LOG: + case LOG10: + case MOD: + case PI: + case POWER: + case RADIANS: + case RAND: + case ROUND: + case SQRT: + case TRUNCATE: + case ACOS: + case ASIN: + case ATAN: + case ATAN2: + case COS: + case COT: + case SIN: + case TAN: + case STRING_LITERAL: + case DELIMITED_IDENTIFIER: + case REGULAR_IDENTIFIER: + case SCIENTIFIC_NUMBER: + case UNSIGNED_FLOAT: + case UNSIGNED_INTEGER: + op = ValueExpression(); + params.add(op); + label_13: while(true){ + switch((jj_ntk == -1) ? jj_ntk() : jj_ntk){ + case COMMA: + ; + break; + default: + jj_la1[96] = jj_gen; + break label_13; + } + jj_consume_token(COMMA); + op = ValueExpression(); + params.add(op); } + break; + default: + jj_la1[97] = jj_gen; + ; + } + end = jj_consume_token(RIGHT_PAR); + //System.out.println("INFO [ADQLParser]: \""+fct.image+"\" (from line "+fct.beginLine+" and column "+fct.beginColumn+" to line "+token.endLine+" and column "+(token.endColumn+1)+") is considered as an user defined function !"); + try{ + // Build the parameters list: + ADQLOperand[] parameters = new ADQLOperand[params.size()]; + for(int i = 0; i < params.size(); i++) + parameters[i] = params.get(i); + // Create the UDF function: + UserDefinedFunction udf = queryFactory.createUserDefinedFunction(fct.image, parameters); + udf.setPosition(new TextPosition(fct, end)); + { + if (true) + return udf; + } + }catch(UnsupportedOperationException uoe){ + /* This catch clause is just for backward compatibility: + * if the createUserDefinedFunction(...) is overridden and + * the function can not be identified a such exception may be thrown). */ + { + if (true) + throw new ParseException(uoe.getMessage(), new TextPosition(fct, token)); + } + }catch(Exception ex){ + { + if (true) + throw generateParseException(ex); } - throw new Error("Missing return statement in function"); - }finally{ - trace_return("UserDefinedFunction"); } + throw new Error("Missing return statement in function"); } private boolean jj_2_1(int xla){ @@ -3727,251 +3510,385 @@ public class ADQLParser implements ADQLParserConstants { } } - private boolean jj_3R_16(){ + private boolean jj_2_14(int xla){ + jj_la = xla; + jj_lastpos = jj_scanpos = token; + try{ + return !jj_3_14(); + }catch(LookaheadSuccess ls){ + return true; + }finally{ + jj_save(13, xla); + } + } + + private boolean jj_2_15(int xla){ + jj_la = xla; + jj_lastpos = jj_scanpos = token; + try{ + return !jj_3_15(); + }catch(LookaheadSuccess ls){ + return true; + }finally{ + jj_save(14, xla); + } + } + + private boolean jj_2_16(int xla){ + jj_la = xla; + jj_lastpos = jj_scanpos = token; + try{ + return !jj_3_16(); + }catch(LookaheadSuccess ls){ + return true; + }finally{ + jj_save(15, xla); + } + } + + private boolean jj_3R_138(){ + if (jj_scan_token(CIRCLE)) + return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_28()) + if (jj_3R_169()) + return true; + if (jj_scan_token(COMMA)) + return true; + if (jj_3R_170()) + return true; + if (jj_scan_token(COMMA)) + return true; + if (jj_3R_108()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_68(){ - if (jj_3R_23()) + private boolean jj_3R_126(){ + if (jj_scan_token(LEFT_PAR)) + return true; + if (jj_3R_27()) + return true; + if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_67(){ - if (jj_3R_116()) + private boolean jj_3R_137(){ + if (jj_scan_token(CENTROID)) + return true; + if (jj_scan_token(LEFT_PAR)) + return true; + if (jj_3R_122()) + return true; + if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_66(){ - if (jj_3R_115()) + private boolean jj_3R_125(){ + if (jj_3R_21()) + return true; + return false; + } + + private boolean jj_3R_46(){ + Token xsp; + xsp = jj_scanpos; + if (jj_3R_62()){ + jj_scanpos = xsp; + if (jj_3R_63()){ + jj_scanpos = xsp; + if (jj_3R_64()){ + jj_scanpos = xsp; + if (jj_3R_65()){ + jj_scanpos = xsp; + if (jj_3R_66()){ + jj_scanpos = xsp; + if (jj_3R_67()){ + jj_scanpos = xsp; + if (jj_3R_68()){ + jj_scanpos = xsp; + if (jj_3R_69()) + return true; + } + } + } + } + } + } + } + return false; + } + + private boolean jj_3R_148(){ + if (jj_scan_token(TOP)) + return true; + if (jj_scan_token(UNSIGNED_INTEGER)) return true; return false; } private boolean jj_3R_124(){ + if (jj_3R_22()) + return true; + return false; + } + + private boolean jj_3R_147(){ + if (jj_scan_token(QUANTIFIER)) + return true; + return false; + } + + private boolean jj_3R_112(){ + if (jj_scan_token(FULL)) + return true; + return false; + } + + private boolean jj_3R_48(){ + if (jj_scan_token(SELECT)) + return true; + Token xsp; + xsp = jj_scanpos; + if (jj_3R_147()) + jj_scanpos = xsp; + xsp = jj_scanpos; + if (jj_3R_148()) + jj_scanpos = xsp; + if (jj_3R_149()) + return true; + while(true){ + xsp = jj_scanpos; + if (jj_3R_150()){ + jj_scanpos = xsp; + break; + } + } + return false; + } + + private boolean jj_3R_136(){ if (jj_scan_token(BOX)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_157()) + if (jj_3R_169()) return true; if (jj_scan_token(COMMA)) return true; - if (jj_3R_158()) + if (jj_3R_170()) return true; if (jj_scan_token(COMMA)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(COMMA)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_109(){ - if (jj_scan_token(FULL)) + private boolean jj_3R_182(){ + if (jj_3R_21()) return true; return false; } - private boolean jj_3R_122(){ - if (jj_3R_144()) + private boolean jj_3R_121(){ + if (jj_scan_token(LEFT_PAR)) + return true; + if (jj_3R_108()) + return true; + if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_49(){ - Token xsp; - xsp = jj_scanpos; - if (jj_3R_66()){ - jj_scanpos = xsp; - if (jj_3R_67()){ - jj_scanpos = xsp; - if (jj_3R_68()){ - jj_scanpos = xsp; - if (jj_3R_69()){ - jj_scanpos = xsp; - if (jj_3R_70()) - return true; - } - } - } - } + private boolean jj_3R_16(){ + if (jj_scan_token(LEFT_PAR)) + return true; + if (jj_3R_31()) + return true; + if (jj_scan_token(RIGHT_PAR)) + return true; return false; } - private boolean jj_3R_121(){ + private boolean jj_3R_120(){ if (jj_3R_143()) return true; return false; } - private boolean jj_3R_101(){ + private boolean jj_3R_76(){ Token xsp; xsp = jj_scanpos; if (jj_3R_124()){ jj_scanpos = xsp; if (jj_3R_125()){ jj_scanpos = xsp; - if (jj_3R_126()){ - jj_scanpos = xsp; - if (jj_3R_127()){ - jj_scanpos = xsp; - if (jj_3R_128()){ - jj_scanpos = xsp; - if (jj_3R_129()) - return true; - } - } - } + if (jj_3R_126()) + return true; } } return false; } - private boolean jj_3R_190(){ - if (jj_3R_23()) + private boolean jj_3R_119(){ + if (jj_3R_21()) return true; return false; } - private boolean jj_3R_120(){ + private boolean jj_3R_177(){ + if (jj_3R_158()) + return true; + return false; + } + + private boolean jj_3R_118(){ if (jj_3R_142()) return true; return false; } - private boolean jj_3R_119(){ - if (jj_3R_141()) + private boolean jj_3R_109(){ + Token xsp; + xsp = jj_scanpos; + if (jj_3R_136()){ + jj_scanpos = xsp; + if (jj_3R_137()){ + jj_scanpos = xsp; + if (jj_3R_138()){ + jj_scanpos = xsp; + if (jj_3R_139()){ + jj_scanpos = xsp; + if (jj_3R_140()){ + jj_scanpos = xsp; + if (jj_3R_141()) + return true; + } + } + } + } + } + return false; + } + + private boolean jj_3R_183(){ + if (jj_3R_46()) return true; return false; } - private boolean jj_3R_185(){ - if (jj_3R_147()) + private boolean jj_3R_175(){ + if (jj_3R_158()) return true; return false; } - private boolean jj_3R_183(){ - if (jj_3R_147()) + private boolean jj_3R_180(){ + if (jj_3R_21()) return true; return false; } - private boolean jj_3R_149(){ - if (jj_3R_41()) + private boolean jj_3R_181(){ + if (jj_3R_158()) return true; return false; } - private boolean jj_3R_188(){ - if (jj_3R_23()) + private boolean jj_3R_169(){ + if (jj_3R_27()) return true; return false; } - private boolean jj_3R_189(){ - if (jj_3R_147()) + private boolean jj_3R_115(){ + if (jj_scan_token(FULL)) return true; return false; } - private boolean jj_3R_105(){ - if (jj_scan_token(RIGHT)) + private boolean jj_3R_133(){ + if (jj_3R_155()) return true; return false; } - private boolean jj_3R_28(){ - if (jj_3R_44()) - return true; - if (jj_3R_118()) - return true; + private boolean jj_3R_74(){ Token xsp; xsp = jj_scanpos; - if (jj_3R_119()) - jj_scanpos = xsp; - xsp = jj_scanpos; - if (jj_3R_120()) - jj_scanpos = xsp; - xsp = jj_scanpos; - if (jj_3R_121()) - jj_scanpos = xsp; - xsp = jj_scanpos; - if (jj_3R_122()) + if (jj_3R_118()){ jj_scanpos = xsp; + if (jj_3R_119()){ + jj_scanpos = xsp; + if (jj_3R_120()){ + jj_scanpos = xsp; + if (jj_3R_121()) + return true; + } + } + } return false; } - private boolean jj_3R_157(){ - if (jj_3R_24()) + private boolean jj_3R_132(){ + if (jj_3R_154()) return true; return false; } - private boolean jj_3R_192(){ - Token xsp; - xsp = jj_scanpos; - if (jj_scan_token(8)){ - jj_scanpos = xsp; - if (jj_scan_token(9)) - return true; - } + private boolean jj_3R_179(){ + if (jj_3R_158()) + return true; return false; } - private boolean jj_3R_191(){ - Token xsp; - xsp = jj_scanpos; - if (jj_3R_192()) - jj_scanpos = xsp; - if (jj_scan_token(UNSIGNED_INTEGER)) + private boolean jj_3R_131(){ + if (jj_3R_153()) return true; return false; } - private boolean jj_3R_187(){ - if (jj_3R_147()) + private boolean jj_3R_130(){ + if (jj_3R_152()) return true; return false; } - private boolean jj_3R_108(){ + private boolean jj_3R_111(){ if (jj_scan_token(RIGHT)) return true; return false; } - private boolean jj_3R_98(){ + private boolean jj_3R_105(){ if (jj_scan_token(DISTANCE)) return true; if (jj_scan_token(LEFT_PAR)) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_187()){ + if (jj_3R_179()){ jj_scanpos = xsp; - if (jj_3R_188()) + if (jj_3R_180()) return true; } if (jj_scan_token(COMMA)) return true; xsp = jj_scanpos; - if (jj_3R_189()){ + if (jj_3R_181()){ jj_scanpos = xsp; - if (jj_3R_190()) + if (jj_3R_182()) return true; } if (jj_scan_token(RIGHT_PAR)) @@ -3979,16 +3896,16 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_97(){ + private boolean jj_3R_104(){ if (jj_scan_token(COORD2)) return true; if (jj_scan_token(LEFT_PAR)) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_185()){ + if (jj_3R_177()){ jj_scanpos = xsp; - if (jj_3R_186()) + if (jj_3R_178()) return true; } if (jj_scan_token(RIGHT_PAR)) @@ -3996,16 +3913,37 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_96(){ + private boolean jj_3R_31(){ + if (jj_3R_48()) + return true; + if (jj_3R_129()) + return true; + Token xsp; + xsp = jj_scanpos; + if (jj_3R_130()) + jj_scanpos = xsp; + xsp = jj_scanpos; + if (jj_3R_131()) + jj_scanpos = xsp; + xsp = jj_scanpos; + if (jj_3R_132()) + jj_scanpos = xsp; + xsp = jj_scanpos; + if (jj_3R_133()) + jj_scanpos = xsp; + return false; + } + + private boolean jj_3R_103(){ if (jj_scan_token(COORD1)) return true; if (jj_scan_token(LEFT_PAR)) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_183()){ + if (jj_3R_175()){ jj_scanpos = xsp; - if (jj_3R_184()) + if (jj_3R_176()) return true; } if (jj_scan_token(RIGHT_PAR)) @@ -4013,27 +3951,40 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_95(){ + private boolean jj_3R_102(){ if (jj_scan_token(AREA)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_64()) + if (jj_3R_122()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_179(){ - if (jj_scan_token(COMMA)) - return true; - if (jj_3R_14()) + private boolean jj_3R_197(){ + Token xsp; + xsp = jj_scanpos; + if (jj_scan_token(8)){ + jj_scanpos = xsp; + if (jj_scan_token(9)) + return true; + } + return false; + } + + private boolean jj_3R_191(){ + Token xsp; + xsp = jj_scanpos; + if (jj_3R_197()) + jj_scanpos = xsp; + if (jj_scan_token(UNSIGNED_INTEGER)) return true; return false; } - private boolean jj_3R_94(){ + private boolean jj_3R_101(){ Token xsp; xsp = jj_scanpos; if (jj_scan_token(58)){ @@ -4043,94 +3994,164 @@ public class ADQLParser implements ADQLParserConstants { } if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_64()) + if (jj_3R_122()) return true; if (jj_scan_token(COMMA)) return true; - if (jj_3R_64()) + if (jj_3R_122()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_104(){ - if (jj_scan_token(LEFT)) + private boolean jj_3R_114(){ + if (jj_scan_token(RIGHT)) return true; return false; } - private boolean jj_3R_115(){ + private boolean jj_3R_201(){ + if (jj_scan_token(COMMA)) + return true; + if (jj_3R_14()) + return true; + return false; + } + + private boolean jj_3R_59(){ Token xsp; xsp = jj_scanpos; - if (jj_scan_token(99)){ + if (jj_3R_101()){ jj_scanpos = xsp; - if (jj_scan_token(100)){ + if (jj_3R_102()){ jj_scanpos = xsp; - if (jj_scan_token(101)) - return true; + if (jj_3R_103()){ + jj_scanpos = xsp; + if (jj_3R_104()){ + jj_scanpos = xsp; + if (jj_3R_105()) + return true; + } + } } } return false; } - private boolean jj_3R_61(){ + private boolean jj_3R_157(){ + if (jj_scan_token(COMMA)) + return true; + if (jj_3R_46()) + return true; + return false; + } + + private boolean jj_3R_170(){ + if (jj_3R_108()) + return true; + if (jj_scan_token(COMMA)) + return true; + if (jj_3R_108()) + return true; + return false; + } + + private boolean jj_3R_160(){ Token xsp; xsp = jj_scanpos; - if (jj_3R_104()){ + if (jj_scan_token(47)){ jj_scanpos = xsp; - if (jj_3R_105()){ + if (jj_scan_token(48)){ jj_scanpos = xsp; - if (jj_3R_106()) - return true; + if (jj_scan_token(49)){ + jj_scanpos = xsp; + if (jj_scan_token(50)) + return true; + } } } + if (jj_scan_token(LEFT_PAR)) + return true; xsp = jj_scanpos; - if (jj_scan_token(25)) + if (jj_scan_token(19)) jj_scanpos = xsp; + if (jj_3R_46()) + return true; + if (jj_scan_token(RIGHT_PAR)) + return true; return false; } - private boolean jj_3R_45(){ + private boolean jj_3R_142(){ Token xsp; xsp = jj_scanpos; - if (jj_scan_token(24)){ + if (jj_scan_token(99)){ jj_scanpos = xsp; - if (jj_3R_61()) - return true; + if (jj_scan_token(100)){ + jj_scanpos = xsp; + if (jj_scan_token(101)) + return true; + } } return false; } - private boolean jj_3R_52(){ + private boolean jj_3R_110(){ + if (jj_scan_token(LEFT)) + return true; + return false; + } + + private boolean jj_3R_70(){ Token xsp; xsp = jj_scanpos; - if (jj_3R_94()){ + if (jj_3R_110()){ jj_scanpos = xsp; - if (jj_3R_95()){ + if (jj_3R_111()){ jj_scanpos = xsp; - if (jj_3R_96()){ - jj_scanpos = xsp; - if (jj_3R_97()){ - jj_scanpos = xsp; - if (jj_3R_98()) - return true; - } - } + if (jj_3R_112()) + return true; } } + xsp = jj_scanpos; + if (jj_scan_token(25)) + jj_scanpos = xsp; return false; } - private boolean jj_3R_146(){ - if (jj_scan_token(COMMA)) + private boolean jj_3R_159(){ + if (jj_scan_token(COUNT)) + return true; + if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_41()) + Token xsp; + xsp = jj_scanpos; + if (jj_scan_token(19)) + jj_scanpos = xsp; + xsp = jj_scanpos; + if (jj_scan_token(10)){ + jj_scanpos = xsp; + if (jj_3R_183()) + return true; + } + if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_178(){ + private boolean jj_3R_49(){ + Token xsp; + xsp = jj_scanpos; + if (jj_scan_token(24)){ + jj_scanpos = xsp; + if (jj_3R_70()) + return true; + } + return false; + } + + private boolean jj_3R_200(){ if (jj_scan_token(USING)) return true; if (jj_scan_token(LEFT_PAR)) @@ -4140,7 +4161,7 @@ public class ADQLParser implements ADQLParserConstants { Token xsp; while(true){ xsp = jj_scanpos; - if (jj_3R_179()){ + if (jj_3R_201()){ jj_scanpos = xsp; break; } @@ -4150,29 +4171,19 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_133(){ + private boolean jj_3R_37(){ if (jj_scan_token(STRING_LITERAL)) return true; return false; } - private boolean jj_3R_158(){ - if (jj_3R_102()) - return true; - if (jj_scan_token(COMMA)) - return true; - if (jj_3R_102()) - return true; - return false; - } - - private boolean jj_3R_116(){ + private boolean jj_3R_22(){ Token xsp; - if (jj_3R_133()) + if (jj_3R_37()) return true; while(true){ xsp = jj_scanpos; - if (jj_3R_133()){ + if (jj_3R_37()){ jj_scanpos = xsp; break; } @@ -4180,46 +4191,20 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_135(){ - Token xsp; - xsp = jj_scanpos; - if (jj_scan_token(47)){ - jj_scanpos = xsp; - if (jj_scan_token(48)){ - jj_scanpos = xsp; - if (jj_scan_token(49)){ - jj_scanpos = xsp; - if (jj_scan_token(50)) - return true; - } - } - } - if (jj_scan_token(LEFT_PAR)) - return true; - xsp = jj_scanpos; - if (jj_scan_token(19)) - jj_scanpos = xsp; - if (jj_3R_41()) - return true; - if (jj_scan_token(RIGHT_PAR)) - return true; - return false; - } - - private boolean jj_3R_107(){ + private boolean jj_3R_113(){ if (jj_scan_token(LEFT)) return true; return false; } - private boolean jj_3R_62(){ + private boolean jj_3R_71(){ Token xsp; xsp = jj_scanpos; - if (jj_3R_107()){ + if (jj_3R_113()){ jj_scanpos = xsp; - if (jj_3R_108()){ + if (jj_3R_114()){ jj_scanpos = xsp; - if (jj_3R_109()) + if (jj_3R_115()) return true; } } @@ -4229,83 +4214,103 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_177(){ + private boolean jj_3R_199(){ if (jj_scan_token(ON)) return true; - if (jj_3R_152()) + if (jj_3R_163()) return true; return false; } - private boolean jj_3R_134(){ - if (jj_scan_token(COUNT)) - return true; - if (jj_scan_token(LEFT_PAR)) - return true; + private boolean jj_3R_143(){ Token xsp; xsp = jj_scanpos; - if (jj_scan_token(19)) - jj_scanpos = xsp; - xsp = jj_scanpos; - if (jj_scan_token(10)){ + if (jj_3R_159()){ jj_scanpos = xsp; - if (jj_3R_149()) + if (jj_3R_160()) return true; } - if (jj_scan_token(RIGHT_PAR)) - return true; return false; } - private boolean jj_3R_46(){ + private boolean jj_3R_50(){ Token xsp; xsp = jj_scanpos; if (jj_scan_token(24)){ jj_scanpos = xsp; - if (jj_3R_62()) + if (jj_3R_71()) return true; } return false; } - private boolean jj_3R_30(){ + private boolean jj_3R_33(){ Token xsp; xsp = jj_scanpos; - if (jj_3R_46()) + if (jj_3R_50()) jj_scanpos = xsp; if (jj_scan_token(JOIN)) return true; - if (jj_3R_47()) + if (jj_3R_51()) return true; xsp = jj_scanpos; - if (jj_3R_177()){ + if (jj_3R_199()){ jj_scanpos = xsp; - if (jj_3R_178()) + if (jj_3R_200()) return true; } return false; } - private boolean jj_3R_29(){ + private boolean jj_3R_32(){ if (jj_scan_token(NATURAL)) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_45()) + if (jj_3R_49()) jj_scanpos = xsp; if (jj_scan_token(JOIN)) return true; - if (jj_3R_47()) + if (jj_3R_51()) return true; return false; } - private boolean jj_3R_117(){ + private boolean jj_3R_134(){ + if (jj_scan_token(LEFT_PAR)) + return true; + if (jj_3R_46()) + return true; + Token xsp; + while(true){ + xsp = jj_scanpos; + if (jj_3R_157()){ + jj_scanpos = xsp; + break; + } + } + if (jj_scan_token(RIGHT_PAR)) + return true; + return false; + } + + private boolean jj_3_16(){ + if (jj_3R_16()) + return true; + return false; + } + + private boolean jj_3R_107(){ Token xsp; xsp = jj_scanpos; - if (jj_3R_134()){ + if (jj_scan_token(35)) + jj_scanpos = xsp; + if (jj_scan_token(IN)) + return true; + xsp = jj_scanpos; + if (jj_3_16()){ jj_scanpos = xsp; - if (jj_3R_135()) + if (jj_3R_134()) return true; } return false; @@ -4314,21 +4319,21 @@ public class ADQLParser implements ADQLParserConstants { private boolean jj_3R_17(){ Token xsp; xsp = jj_scanpos; - if (jj_3R_29()){ + if (jj_3R_32()){ jj_scanpos = xsp; - if (jj_3R_30()) + if (jj_3R_33()) return true; } return false; } - private boolean jj_3R_176(){ + private boolean jj_3R_198(){ if (jj_3R_17()) return true; return false; } - private boolean jj_3R_171(){ + private boolean jj_3R_192(){ Token xsp; xsp = jj_scanpos; if (jj_scan_token(22)) @@ -4338,15 +4343,15 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_172(){ - if (jj_3R_63()) + private boolean jj_3R_193(){ + if (jj_3R_72()) return true; Token xsp; - if (jj_3R_176()) + if (jj_3R_198()) return true; while(true){ xsp = jj_scanpos; - if (jj_3R_176()){ + if (jj_3R_198()){ jj_scanpos = xsp; break; } @@ -4354,10 +4359,10 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_111(){ + private boolean jj_3R_117(){ if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_172()) + if (jj_3R_193()) return true; if (jj_scan_token(RIGHT_PAR)) return true; @@ -4370,25 +4375,23 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_123(){ - if (jj_scan_token(LEFT_PAR)) + private boolean jj_3R_26(){ + Token xsp; + xsp = jj_scanpos; + if (jj_scan_token(35)) + jj_scanpos = xsp; + if (jj_scan_token(BETWEEN)) return true; - if (jj_3R_41()) + if (jj_3R_46()) return true; - Token xsp; - while(true){ - xsp = jj_scanpos; - if (jj_3R_146()){ - jj_scanpos = xsp; - break; - } - } - if (jj_scan_token(RIGHT_PAR)) + if (jj_scan_token(AND)) + return true; + if (jj_3R_46()) return true; return false; } - private boolean jj_3R_169(){ + private boolean jj_3R_190(){ Token xsp; xsp = jj_scanpos; if (jj_scan_token(45)){ @@ -4399,8 +4402,8 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_47(){ - if (jj_3R_63()) + private boolean jj_3R_51(){ + if (jj_3R_72()) return true; Token xsp; while(true){ @@ -4413,85 +4416,25 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3_13(){ - if (jj_3R_16()) - return true; - return false; - } - - private boolean jj_3R_100(){ - Token xsp; - xsp = jj_scanpos; - if (jj_scan_token(35)) - jj_scanpos = xsp; - if (jj_scan_token(IN)) - return true; - xsp = jj_scanpos; - if (jj_3_13()){ - jj_scanpos = xsp; - if (jj_3R_123()) - return true; - } - return false; - } - - private boolean jj_3_2(){ - if (jj_3R_16()) - return true; - Token xsp; - xsp = jj_scanpos; - if (jj_scan_token(22)) - jj_scanpos = xsp; - if (jj_3R_14()) - return true; - return false; - } - - private boolean jj_3R_110(){ - if (jj_3R_103()) - return true; - Token xsp; - xsp = jj_scanpos; - if (jj_3R_171()) - jj_scanpos = xsp; - return false; - } - - private boolean jj_3R_22(){ - Token xsp; - xsp = jj_scanpos; - if (jj_scan_token(35)) - jj_scanpos = xsp; - if (jj_scan_token(BETWEEN)) - return true; - if (jj_3R_41()) - return true; - if (jj_scan_token(AND)) - return true; - if (jj_3R_41()) - return true; - return false; - } - - private boolean jj_3R_54(){ - if (jj_3R_100()) + private boolean jj_3R_61(){ + if (jj_3R_107()) return true; return false; } - private boolean jj_3_10(){ - if (jj_3R_22()) + private boolean jj_3_13(){ + if (jj_3R_26()) return true; return false; } - private boolean jj_3R_53(){ - if (jj_3R_99()) + private boolean jj_3R_60(){ + if (jj_3R_106()) return true; return false; } - private boolean jj_3R_99(){ + private boolean jj_3R_106(){ Token xsp; xsp = jj_scanpos; if (jj_scan_token(12)){ @@ -4511,13 +4454,13 @@ public class ADQLParser implements ADQLParserConstants { } } } - if (jj_3R_41()) + if (jj_3R_46()) return true; return false; } - private boolean jj_3_12(){ - if (jj_3R_24()) + private boolean jj_3_15(){ + if (jj_3R_27()) return true; Token xsp; xsp = jj_scanpos; @@ -4528,46 +4471,54 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_63(){ + private boolean jj_3_2(){ + if (jj_3R_16()) + return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_110()){ + if (jj_scan_token(22)) jj_scanpos = xsp; - if (jj_3_2()){ - jj_scanpos = xsp; - if (jj_3R_111()) - return true; - } - } + if (jj_3R_14()) + return true; return false; } - private boolean jj_3R_40(){ - if (jj_3R_41()) + private boolean jj_3R_116(){ + if (jj_3R_77()) + return true; + Token xsp; + xsp = jj_scanpos; + if (jj_3R_192()) + jj_scanpos = xsp; + return false; + } + + private boolean jj_3R_45(){ + if (jj_3R_46()) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_53()){ + if (jj_3R_60()){ jj_scanpos = xsp; - if (jj_3_10()){ + if (jj_3_13()){ jj_scanpos = xsp; - if (jj_3R_54()) + if (jj_3R_61()) return true; } } return false; } - private boolean jj_3_11(){ - if (jj_3R_23()) + private boolean jj_3_14(){ + if (jj_3R_21()) return true; if (jj_scan_token(IS)) return true; return false; } - private boolean jj_3R_39(){ - if (jj_3R_24()) + private boolean jj_3R_44(){ + if (jj_3R_27()) return true; Token xsp; xsp = jj_scanpos; @@ -4575,13 +4526,13 @@ public class ADQLParser implements ADQLParserConstants { jj_scanpos = xsp; if (jj_scan_token(LIKE)) return true; - if (jj_3R_24()) + if (jj_3R_27()) return true; return false; } - private boolean jj_3R_38(){ - if (jj_3R_23()) + private boolean jj_3R_43(){ + if (jj_3R_21()) return true; if (jj_scan_token(IS)) return true; @@ -4594,35 +4545,21 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_168(){ - if (jj_3R_42()) - return true; - return false; - } - - private boolean jj_3R_114(){ - if (jj_scan_token(COMMA)) - return true; - if (jj_3R_41()) - return true; - return false; - } - - private boolean jj_3R_155(){ + private boolean jj_3R_72(){ Token xsp; xsp = jj_scanpos; - if (jj_3R_168()){ + if (jj_3R_116()){ jj_scanpos = xsp; - if (jj_scan_token(101)) - return true; + if (jj_3_2()){ + jj_scanpos = xsp; + if (jj_3R_117()) + return true; + } } - xsp = jj_scanpos; - if (jj_3R_169()) - jj_scanpos = xsp; return false; } - private boolean jj_3R_37(){ + private boolean jj_3R_42(){ if (jj_scan_token(EXISTS)) return true; if (jj_3R_16()) @@ -4630,33 +4567,24 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_167(){ - if (jj_3R_42()) + private boolean jj_3R_146(){ + if (jj_scan_token(COMMA)) + return true; + if (jj_3R_46()) return true; return false; } - private boolean jj_3R_153(){ - Token xsp; - xsp = jj_scanpos; - if (jj_3R_167()){ - jj_scanpos = xsp; - if (jj_scan_token(101)) - return true; - } - return false; - } - - private boolean jj_3R_21(){ + private boolean jj_3R_25(){ Token xsp; xsp = jj_scanpos; - if (jj_3R_37()){ + if (jj_3R_42()){ jj_scanpos = xsp; - if (jj_3R_38()){ + if (jj_3R_43()){ jj_scanpos = xsp; - if (jj_3R_39()){ + if (jj_3R_44()){ jj_scanpos = xsp; - if (jj_3R_40()) + if (jj_3R_45()) return true; } } @@ -4664,100 +4592,111 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_60(){ - if (jj_scan_token(DOT)) - return true; - if (jj_3R_103()) + private boolean jj_3R_189(){ + if (jj_3R_36()) return true; return false; } - private boolean jj_3_9(){ - if (jj_3R_21()) - return true; + private boolean jj_3R_166(){ + Token xsp; + xsp = jj_scanpos; + if (jj_3R_189()){ + jj_scanpos = xsp; + if (jj_scan_token(101)) + return true; + } + xsp = jj_scanpos; + if (jj_3R_190()) + jj_scanpos = xsp; return false; } - private boolean jj_3R_23(){ - if (jj_3R_42()) + private boolean jj_3_12(){ + if (jj_3R_25()) return true; return false; } - private boolean jj_3R_174(){ + private boolean jj_3R_195(){ if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_152()) + if (jj_3R_163()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_173(){ - if (jj_3R_21()) + private boolean jj_3R_194(){ + if (jj_3R_25()) return true; return false; } - private boolean jj_3R_165(){ + private boolean jj_3R_188(){ + if (jj_3R_36()) + return true; + return false; + } + + private boolean jj_3R_186(){ Token xsp; xsp = jj_scanpos; - if (jj_3R_173()){ + if (jj_3R_194()){ jj_scanpos = xsp; - if (jj_3R_174()) + if (jj_3R_195()) return true; } return false; } - private boolean jj_3R_42(){ - if (jj_3R_14()) - return true; + private boolean jj_3R_164(){ Token xsp; xsp = jj_scanpos; - if (jj_3R_60()) + if (jj_3R_188()){ jj_scanpos = xsp; - return false; - } - - private boolean jj_3R_65(){ - if (jj_3R_41()) - return true; - Token xsp; - while(true){ - xsp = jj_scanpos; - if (jj_3R_114()){ - jj_scanpos = xsp; - break; - } + if (jj_scan_token(101)) + return true; } return false; } - private boolean jj_3R_132(){ + private boolean jj_3R_56(){ if (jj_scan_token(DOT)) return true; - if (jj_3R_14()) + if (jj_3R_77()) return true; return false; } - private boolean jj_3R_131(){ - if (jj_scan_token(DOT)) + private boolean jj_3R_21(){ + if (jj_3R_36()) return true; - if (jj_3R_14()) + return false; + } + + private boolean jj_3R_123(){ + if (jj_3R_46()) return true; + Token xsp; + while(true){ + xsp = jj_scanpos; + if (jj_3R_146()){ + jj_scanpos = xsp; + break; + } + } return false; } - private boolean jj_3R_175(){ + private boolean jj_3R_196(){ if (jj_scan_token(NOT)) return true; return false; } - private boolean jj_3R_166(){ + private boolean jj_3R_187(){ Token xsp; xsp = jj_scanpos; if (jj_scan_token(33)){ @@ -4766,14 +4705,14 @@ public class ADQLParser implements ADQLParserConstants { return true; } xsp = jj_scanpos; - if (jj_3R_175()) + if (jj_3R_196()) jj_scanpos = xsp; - if (jj_3R_165()) + if (jj_3R_186()) return true; return false; } - private boolean jj_3R_182(){ + private boolean jj_3R_174(){ if (jj_scan_token(COMMA)) return true; if (jj_3R_191()) @@ -4781,20 +4720,25 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_103(){ + private boolean jj_3R_36(){ if (jj_3R_14()) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_131()) - jj_scanpos = xsp; - xsp = jj_scanpos; - if (jj_3R_132()) + if (jj_3R_56()) jj_scanpos = xsp; return false; } - private boolean jj_3R_181(){ + private boolean jj_3R_128(){ + if (jj_scan_token(DOT)) + return true; + if (jj_3R_14()) + return true; + return false; + } + + private boolean jj_3R_173(){ if (jj_scan_token(COMMA)) return true; if (jj_3R_191()) @@ -4802,66 +4746,61 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_26(){ - if (jj_scan_token(DELIMITED_IDENTIFIER)) + private boolean jj_3R_127(){ + if (jj_scan_token(DOT)) + return true; + if (jj_3R_14()) return true; return false; } - private boolean jj_3R_25(){ - if (jj_scan_token(REGULAR_IDENTIFIER)) + private boolean jj_3R_185(){ + if (jj_scan_token(NOT)) return true; return false; } - private boolean jj_3R_14(){ + private boolean jj_3R_77(){ + if (jj_3R_14()) + return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_25()){ + if (jj_3R_127()) + jj_scanpos = xsp; + xsp = jj_scanpos; + if (jj_3R_128()) jj_scanpos = xsp; - if (jj_3R_26()) - return true; - } return false; } - private boolean jj_3R_164(){ - if (jj_scan_token(NOT)) + private boolean jj_3R_29(){ + if (jj_scan_token(DELIMITED_IDENTIFIER)) return true; return false; } - private boolean jj_3R_156(){ - if (jj_scan_token(COMMA)) - return true; - if (jj_3R_155()) + private boolean jj_3R_28(){ + if (jj_scan_token(REGULAR_IDENTIFIER)) return true; return false; } - private boolean jj_3R_20(){ - if (jj_scan_token(REGULAR_IDENTIFIER)) - return true; - if (jj_scan_token(LEFT_PAR)) - return true; - Token xsp; - xsp = jj_scanpos; - if (jj_3R_65()) - jj_scanpos = xsp; - if (jj_scan_token(RIGHT_PAR)) + private boolean jj_3R_145(){ + if (jj_3R_109()) return true; return false; } - private boolean jj_3R_144(){ - if (jj_scan_token(ORDER_BY)) - return true; - if (jj_3R_155()) - return true; + private boolean jj_3R_163(){ Token xsp; + xsp = jj_scanpos; + if (jj_3R_185()) + jj_scanpos = xsp; + if (jj_3R_186()) + return true; while(true){ xsp = jj_scanpos; - if (jj_3R_156()){ + if (jj_3R_187()){ jj_scanpos = xsp; break; } @@ -4869,60 +4808,60 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_113(){ - if (jj_3R_101()) + private boolean jj_3R_24(){ + if (jj_scan_token(REGULAR_IDENTIFIER)) + return true; + if (jj_scan_token(LEFT_PAR)) + return true; + Token xsp; + xsp = jj_scanpos; + if (jj_3R_123()) + jj_scanpos = xsp; + if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_152(){ + private boolean jj_3R_14(){ Token xsp; xsp = jj_scanpos; - if (jj_3R_164()) + if (jj_3R_28()){ jj_scanpos = xsp; - if (jj_3R_165()) - return true; - while(true){ - xsp = jj_scanpos; - if (jj_3R_166()){ - jj_scanpos = xsp; - break; - } + if (jj_3R_29()) + return true; } return false; } - private boolean jj_3R_180(){ - if (jj_3R_102()) + private boolean jj_3R_172(){ + if (jj_3R_108()) return true; return false; } - private boolean jj_3R_143(){ - if (jj_scan_token(HAVING)) + private boolean jj_3R_167(){ + if (jj_scan_token(COMMA)) return true; - if (jj_3R_152()) + if (jj_3R_166()) return true; return false; } - private boolean jj_3R_154(){ - if (jj_scan_token(COMMA)) - return true; - if (jj_3R_153()) + private boolean jj_3R_144(){ + if (jj_3R_21()) return true; return false; } - private boolean jj_3R_142(){ - if (jj_scan_token(GROUP_BY)) + private boolean jj_3R_155(){ + if (jj_scan_token(ORDER_BY)) return true; - if (jj_3R_153()) + if (jj_3R_166()) return true; Token xsp; while(true){ xsp = jj_scanpos; - if (jj_3R_154()){ + if (jj_3R_167()){ jj_scanpos = xsp; break; } @@ -4930,161 +4869,181 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_93(){ + private boolean jj_3R_122(){ + Token xsp; + xsp = jj_scanpos; + if (jj_3R_144()){ + jj_scanpos = xsp; + if (jj_3R_145()) + return true; + } + return false; + } + + private boolean jj_3R_100(){ if (jj_scan_token(TAN)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_92(){ + private boolean jj_3R_99(){ if (jj_scan_token(SIN)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_91(){ + private boolean jj_3R_55(){ + if (jj_3R_76()) + return true; + return false; + } + + private boolean jj_3R_98(){ if (jj_scan_token(COT)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_112(){ - if (jj_3R_23()) + private boolean jj_3_11(){ + if (jj_3R_24()) return true; return false; } - private boolean jj_3R_90(){ + private boolean jj_3R_97(){ if (jj_scan_token(COS)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_64(){ - Token xsp; - xsp = jj_scanpos; - if (jj_3R_112()){ - jj_scanpos = xsp; - if (jj_3R_113()) - return true; - } - return false; - } - - private boolean jj_3R_89(){ + private boolean jj_3R_96(){ if (jj_scan_token(ATAN2)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(COMMA)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_88(){ + private boolean jj_3R_54(){ + if (jj_3R_75()) + return true; + return false; + } + + private boolean jj_3R_95(){ if (jj_scan_token(ATAN)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_87(){ + private boolean jj_3R_35(){ + Token xsp; + xsp = jj_scanpos; + if (jj_3R_54()){ + jj_scanpos = xsp; + if (jj_3_11()){ + jj_scanpos = xsp; + if (jj_3R_55()) + return true; + } + } + return false; + } + + private boolean jj_3R_94(){ if (jj_scan_token(ASIN)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_32(){ - if (jj_3R_49()) - return true; - return false; - } - - private boolean jj_3R_141(){ - if (jj_scan_token(WHERE)) + private boolean jj_3R_154(){ + if (jj_scan_token(HAVING)) return true; - if (jj_3R_152()) + if (jj_3R_163()) return true; return false; } - private boolean jj_3_8(){ - if (jj_3R_20()) + private boolean jj_3R_53(){ + if (jj_3R_74()) return true; return false; } - private boolean jj_3R_86(){ + private boolean jj_3R_93(){ if (jj_scan_token(ACOS)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_51(){ + private boolean jj_3R_58(){ Token xsp; xsp = jj_scanpos; - if (jj_3R_86()){ + if (jj_3R_93()){ jj_scanpos = xsp; - if (jj_3R_87()){ + if (jj_3R_94()){ jj_scanpos = xsp; - if (jj_3R_88()){ + if (jj_3R_95()){ jj_scanpos = xsp; - if (jj_3R_89()){ + if (jj_3R_96()){ jj_scanpos = xsp; - if (jj_3R_90()){ + if (jj_3R_97()){ jj_scanpos = xsp; - if (jj_3R_91()){ + if (jj_3R_98()){ jj_scanpos = xsp; - if (jj_3R_92()){ + if (jj_3R_99()){ jj_scanpos = xsp; - if (jj_3R_93()) + if (jj_3R_100()) return true; } } @@ -5096,135 +5055,117 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_31(){ - if (jj_3R_48()) + private boolean jj_3R_165(){ + if (jj_scan_token(COMMA)) + return true; + if (jj_3R_164()) return true; return false; } - private boolean jj_3R_18(){ + private boolean jj_3R_153(){ + if (jj_scan_token(GROUP_BY)) + return true; + if (jj_3R_164()) + return true; Token xsp; - xsp = jj_scanpos; - if (jj_3R_31()){ - jj_scanpos = xsp; - if (jj_3_8()){ + while(true){ + xsp = jj_scanpos; + if (jj_3R_165()){ jj_scanpos = xsp; - if (jj_3R_32()) - return true; + break; } } return false; } - private boolean jj_3R_162(){ - if (jj_3R_49()) - return true; - return false; - } - - private boolean jj_3R_163(){ - if (jj_scan_token(AS)) - return true; - if (jj_3R_14()) - return true; - return false; - } - - private boolean jj_3R_140(){ - if (jj_scan_token(COMMA)) - return true; - if (jj_3R_47()) - return true; - return false; - } - - private boolean jj_3R_85(){ + private boolean jj_3R_92(){ if (jj_scan_token(TRUNCATE)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_182()) + if (jj_3R_174()) jj_scanpos = xsp; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_84(){ + private boolean jj_3R_91(){ if (jj_scan_token(SQRT)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_83(){ + private boolean jj_3R_90(){ if (jj_scan_token(ROUND)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_181()) + if (jj_3R_173()) jj_scanpos = xsp; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_82(){ + private boolean jj_3R_89(){ if (jj_scan_token(RAND)) return true; if (jj_scan_token(LEFT_PAR)) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_180()) + if (jj_3R_172()) jj_scanpos = xsp; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_81(){ + private boolean jj_3R_88(){ if (jj_scan_token(RADIANS)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_80(){ + private boolean jj_3R_87(){ if (jj_scan_token(POWER)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(COMMA)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_79(){ + private boolean jj_3R_86(){ if (jj_scan_token(PI)) return true; if (jj_scan_token(LEFT_PAR)) @@ -5234,167 +5175,167 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_78(){ + private boolean jj_3R_152(){ + if (jj_scan_token(WHERE)) + return true; + if (jj_3R_163()) + return true; + return false; + } + + private boolean jj_3R_85(){ if (jj_scan_token(MOD)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(COMMA)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_77(){ + private boolean jj_3R_84(){ if (jj_scan_token(LOG10)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_76(){ - if (jj_scan_token(LOG)) - return true; - if (jj_scan_token(LEFT_PAR)) - return true; - if (jj_3R_102()) + private boolean jj_3R_47(){ + if (jj_scan_token(CONCAT)) return true; - if (jj_scan_token(RIGHT_PAR)) + if (jj_3R_35()) return true; return false; } - private boolean jj_3R_118(){ - if (jj_scan_token(FROM)) + private boolean jj_3R_83(){ + if (jj_scan_token(LOG)) return true; - if (jj_3R_47()) + if (jj_scan_token(LEFT_PAR)) + return true; + if (jj_3R_108()) + return true; + if (jj_scan_token(RIGHT_PAR)) return true; - Token xsp; - while(true){ - xsp = jj_scanpos; - if (jj_3R_140()){ - jj_scanpos = xsp; - break; - } - } return false; } - private boolean jj_3R_75(){ + private boolean jj_3R_82(){ if (jj_scan_token(FLOOR)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_74(){ + private boolean jj_3R_81(){ if (jj_scan_token(EXP)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_73(){ - if (jj_scan_token(DEGREES)) + private boolean jj_3R_184(){ + if (jj_scan_token(AS)) return true; - if (jj_scan_token(LEFT_PAR)) + if (jj_3R_14()) return true; - if (jj_3R_102()) + return false; + } + + private boolean jj_3R_151(){ + if (jj_scan_token(COMMA)) return true; - if (jj_scan_token(RIGHT_PAR)) + if (jj_3R_51()) return true; return false; } - private boolean jj_3R_72(){ - if (jj_scan_token(CEILING)) + private boolean jj_3R_80(){ + if (jj_scan_token(DEGREES)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_27(){ - if (jj_3R_14()) + private boolean jj_3R_79(){ + if (jj_scan_token(CEILING)) return true; - if (jj_scan_token(DOT)) + if (jj_scan_token(LEFT_PAR)) return true; - return false; - } - - private boolean jj_3R_43(){ - if (jj_scan_token(CONCAT)) + if (jj_3R_108()) return true; - if (jj_3R_18()) + if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_71(){ + private boolean jj_3R_78(){ if (jj_scan_token(ABS)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_102()) + if (jj_3R_108()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_151(){ - if (jj_3R_41()) + private boolean jj_3R_27(){ + if (jj_3R_35()) return true; Token xsp; - xsp = jj_scanpos; - if (jj_3R_163()) - jj_scanpos = xsp; + while(true){ + xsp = jj_scanpos; + if (jj_3R_47()){ + jj_scanpos = xsp; + break; + } + } return false; } - private boolean jj_3R_15(){ - if (jj_3R_14()) - return true; - if (jj_scan_token(DOT)) + private boolean jj_3R_73(){ + if (jj_scan_token(MINUS)) return true; - Token xsp; - xsp = jj_scanpos; - if (jj_3R_27()) - jj_scanpos = xsp; return false; } - private boolean jj_3R_24(){ - if (jj_3R_18()) + private boolean jj_3R_129(){ + if (jj_scan_token(FROM)) + return true; + if (jj_3R_51()) return true; Token xsp; while(true){ xsp = jj_scanpos; - if (jj_3R_43()){ + if (jj_3R_151()){ jj_scanpos = xsp; break; } @@ -5402,38 +5343,38 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_50(){ + private boolean jj_3R_57(){ Token xsp; xsp = jj_scanpos; - if (jj_3R_71()){ + if (jj_3R_78()){ jj_scanpos = xsp; - if (jj_3R_72()){ + if (jj_3R_79()){ jj_scanpos = xsp; - if (jj_3R_73()){ + if (jj_3R_80()){ jj_scanpos = xsp; - if (jj_3R_74()){ + if (jj_3R_81()){ jj_scanpos = xsp; - if (jj_3R_75()){ + if (jj_3R_82()){ jj_scanpos = xsp; - if (jj_3R_76()){ + if (jj_3R_83()){ jj_scanpos = xsp; - if (jj_3R_77()){ + if (jj_3R_84()){ jj_scanpos = xsp; - if (jj_3R_78()){ + if (jj_3R_85()){ jj_scanpos = xsp; - if (jj_3R_79()){ + if (jj_3R_86()){ jj_scanpos = xsp; - if (jj_3R_80()){ + if (jj_3R_87()){ jj_scanpos = xsp; - if (jj_3R_81()){ + if (jj_3R_88()){ jj_scanpos = xsp; - if (jj_3R_82()){ + if (jj_3R_89()){ jj_scanpos = xsp; - if (jj_3R_83()){ + if (jj_3R_90()){ jj_scanpos = xsp; - if (jj_3R_84()){ + if (jj_3R_91()){ jj_scanpos = xsp; - if (jj_3R_85()) + if (jj_3R_92()) return true; } } @@ -5452,46 +5393,65 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_170(){ - if (jj_scan_token(MINUS)) + private boolean jj_3R_30(){ + if (jj_3R_14()) + return true; + if (jj_scan_token(DOT)) return true; return false; } - private boolean jj_3R_36(){ - if (jj_3R_20()) + private boolean jj_3R_41(){ + if (jj_3R_24()) return true; return false; } - private boolean jj_3R_35(){ - if (jj_3R_52()) + private boolean jj_3_10(){ + if (jj_3R_23()) return true; return false; } - private boolean jj_3R_34(){ - if (jj_3R_51()) + private boolean jj_3R_40(){ + if (jj_3R_59()) return true; return false; } - private boolean jj_3R_33(){ - if (jj_3R_50()) + private boolean jj_3R_52(){ + Token xsp; + xsp = jj_scanpos; + if (jj_scan_token(8)){ + jj_scanpos = xsp; + if (jj_3R_73()) + return true; + } + return false; + } + + private boolean jj_3R_39(){ + if (jj_3R_58()) return true; return false; } - private boolean jj_3R_19(){ + private boolean jj_3R_38(){ + if (jj_3R_57()) + return true; + return false; + } + + private boolean jj_3R_23(){ Token xsp; xsp = jj_scanpos; - if (jj_3R_33()){ + if (jj_3R_38()){ jj_scanpos = xsp; - if (jj_3R_34()){ + if (jj_3R_39()){ jj_scanpos = xsp; - if (jj_3R_35()){ + if (jj_3R_40()){ jj_scanpos = xsp; - if (jj_3R_36()) + if (jj_3R_41()) return true; } } @@ -5499,24 +5459,29 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3_7(){ - if (jj_3R_19()) + private boolean jj_3R_162(){ + if (jj_3R_46()) return true; + Token xsp; + xsp = jj_scanpos; + if (jj_3R_184()) + jj_scanpos = xsp; return false; } - private boolean jj_3R_161(){ + private boolean jj_3R_15(){ + if (jj_3R_14()) + return true; + if (jj_scan_token(DOT)) + return true; Token xsp; xsp = jj_scanpos; - if (jj_scan_token(8)){ + if (jj_3R_30()) jj_scanpos = xsp; - if (jj_3R_170()) - return true; - } return false; } - private boolean jj_3R_160(){ + private boolean jj_3R_168(){ Token xsp; xsp = jj_scanpos; if (jj_scan_token(10)){ @@ -5524,165 +5489,204 @@ public class ADQLParser implements ADQLParserConstants { if (jj_scan_token(11)) return true; } - if (jj_3R_130()) + if (jj_3R_135()) return true; return false; } - private boolean jj_3R_148(){ + private boolean jj_3R_34(){ Token xsp; xsp = jj_scanpos; - if (jj_3R_161()) + if (jj_3R_52()) jj_scanpos = xsp; xsp = jj_scanpos; - if (jj_3_7()){ + if (jj_3_10()){ jj_scanpos = xsp; - if (jj_3R_162()) + if (jj_3R_53()) return true; } return false; } - private boolean jj_3R_48(){ + private boolean jj_3R_75(){ if (jj_scan_token(COORDSYS)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_64()) + if (jj_3R_122()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_150(){ - if (jj_scan_token(ASTERISK)) + private boolean jj_3R_156(){ + Token xsp; + xsp = jj_scanpos; + if (jj_scan_token(8)){ + jj_scanpos = xsp; + if (jj_scan_token(9)) + return true; + } + if (jj_3R_108()) return true; return false; } - private boolean jj_3_1(){ - if (jj_3R_14()) - return true; - if (jj_scan_token(DOT)) + private boolean jj_3R_19(){ + if (jj_3R_34()) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_15()) + if (jj_scan_token(8)){ jj_scanpos = xsp; - if (jj_scan_token(ASTERISK)) + if (jj_scan_token(9)){ + jj_scanpos = xsp; + if (jj_scan_token(10)){ + jj_scanpos = xsp; + if (jj_scan_token(11)) + return true; + } + } + } + return false; + } + + private boolean jj_3R_178(){ + if (jj_3R_21()) + return true; + return false; + } + + private boolean jj_3R_20(){ + if (jj_3R_35()) + return true; + if (jj_scan_token(CONCAT)) return true; return false; } - private boolean jj_3R_159(){ + private boolean jj_3R_176(){ + if (jj_3R_21()) + return true; + return false; + } + + private boolean jj_3R_171(){ if (jj_scan_token(COMMA)) return true; - if (jj_3R_158()) + if (jj_3R_170()) return true; return false; } - private boolean jj_3R_145(){ + private boolean jj_3R_135(){ + if (jj_3R_34()) + return true; Token xsp; xsp = jj_scanpos; - if (jj_scan_token(8)){ + if (jj_3R_168()) jj_scanpos = xsp; - if (jj_scan_token(9)) - return true; - } - if (jj_3R_102()) - return true; return false; } - private boolean jj_3R_138(){ - Token xsp; - xsp = jj_scanpos; - if (jj_3R_150()){ - jj_scanpos = xsp; - if (jj_3_1()){ - jj_scanpos = xsp; - if (jj_3R_151()) - return true; - } - } + private boolean jj_3R_158(){ + if (jj_scan_token(POINT)) + return true; + if (jj_scan_token(LEFT_PAR)) + return true; + if (jj_3R_169()) + return true; + if (jj_scan_token(COMMA)) + return true; + if (jj_3R_170()) + return true; + if (jj_scan_token(RIGHT_PAR)) + return true; return false; } - private boolean jj_3R_186(){ - if (jj_3R_23()) + private boolean jj_3_9(){ + if (jj_3R_22()) return true; return false; } - private boolean jj_3R_184(){ - if (jj_3R_23()) + private boolean jj_3R_161(){ + if (jj_scan_token(ASTERISK)) return true; return false; } - private boolean jj_3R_130(){ - if (jj_3R_148()) + private boolean jj_3_1(){ + if (jj_3R_14()) + return true; + if (jj_scan_token(DOT)) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_160()) + if (jj_3R_15()) jj_scanpos = xsp; + if (jj_scan_token(ASTERISK)) + return true; return false; } - private boolean jj_3R_147(){ - if (jj_scan_token(POINT)) - return true; - if (jj_scan_token(LEFT_PAR)) - return true; - if (jj_3R_157()) - return true; - if (jj_scan_token(COMMA)) - return true; - if (jj_3R_158()) - return true; - if (jj_scan_token(RIGHT_PAR)) + private boolean jj_3_8(){ + if (jj_3R_21()) return true; return false; } - private boolean jj_3R_129(){ + private boolean jj_3R_141(){ if (jj_scan_token(REGION)) return true; if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_24()) + if (jj_3R_27()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_139(){ - if (jj_scan_token(COMMA)) + private boolean jj_3_7(){ + if (jj_scan_token(REGULAR_IDENTIFIER)) return true; - if (jj_3R_138()) + if (jj_scan_token(LEFT_PAR)) return true; return false; } private boolean jj_3_6(){ - if (jj_3R_18()) - return true; - if (jj_scan_token(CONCAT)) + if (jj_scan_token(LEFT_PAR)) return true; return false; } private boolean jj_3_5(){ - if (jj_scan_token(COORDSYS)) - return true; + Token xsp; + xsp = jj_scanpos; + if (jj_scan_token(63)){ + jj_scanpos = xsp; + if (jj_3R_20()) + return true; + } return false; } private boolean jj_3_4(){ + Token xsp; + xsp = jj_scanpos; + if (jj_3R_18()){ + jj_scanpos = xsp; + if (jj_3R_19()) + return true; + } + return false; + } + + private boolean jj_3R_18(){ Token xsp; xsp = jj_scanpos; if (jj_scan_token(8)){ @@ -5693,190 +5697,124 @@ public class ADQLParser implements ADQLParserConstants { return false; } - private boolean jj_3R_128(){ - if (jj_scan_token(POLYGON)) - return true; - if (jj_scan_token(LEFT_PAR)) - return true; - if (jj_3R_157()) - return true; - if (jj_scan_token(COMMA)) - return true; - if (jj_3R_158()) - return true; - if (jj_scan_token(COMMA)) - return true; - if (jj_3R_158()) - return true; - if (jj_scan_token(COMMA)) - return true; - if (jj_3R_158()) + private boolean jj_3R_69(){ + if (jj_3R_34()) return true; + return false; + } + + private boolean jj_3R_149(){ Token xsp; - while(true){ - xsp = jj_scanpos; - if (jj_3R_159()){ + xsp = jj_scanpos; + if (jj_3R_161()){ + jj_scanpos = xsp; + if (jj_3_1()){ jj_scanpos = xsp; - break; + if (jj_3R_162()) + return true; } } - if (jj_scan_token(RIGHT_PAR)) - return true; return false; } - private boolean jj_3R_137(){ - if (jj_scan_token(TOP)) - return true; - if (jj_scan_token(UNSIGNED_INTEGER)) + private boolean jj_3R_68(){ + if (jj_3R_35()) return true; return false; } - private boolean jj_3R_136(){ - if (jj_scan_token(QUANTIFIER)) + private boolean jj_3R_67(){ + if (jj_3R_21()) return true; return false; } - private boolean jj_3R_102(){ - if (jj_3R_130()) + private boolean jj_3R_108(){ + if (jj_3R_135()) return true; Token xsp; xsp = jj_scanpos; - if (jj_3R_145()) + if (jj_3R_156()) jj_scanpos = xsp; return false; } - private boolean jj_3R_106(){ - if (jj_scan_token(FULL)) + private boolean jj_3R_66(){ + if (jj_3R_109()) return true; return false; } - private boolean jj_3R_127(){ - if (jj_3R_147()) + private boolean jj_3R_140(){ + if (jj_scan_token(POLYGON)) return true; - return false; - } - - private boolean jj_3R_44(){ - if (jj_scan_token(SELECT)) + if (jj_scan_token(LEFT_PAR)) return true; - Token xsp; - xsp = jj_scanpos; - if (jj_3R_136()) - jj_scanpos = xsp; - xsp = jj_scanpos; - if (jj_3R_137()) - jj_scanpos = xsp; - if (jj_3R_138()) + if (jj_3R_169()) + return true; + if (jj_scan_token(COMMA)) + return true; + if (jj_3R_170()) + return true; + if (jj_scan_token(COMMA)) + return true; + if (jj_3R_170()) + return true; + if (jj_scan_token(COMMA)) + return true; + if (jj_3R_170()) return true; + Token xsp; while(true){ xsp = jj_scanpos; - if (jj_3R_139()){ + if (jj_3R_171()){ jj_scanpos = xsp; break; } } - return false; - } - - private boolean jj_3R_59(){ - if (jj_3R_102()) - return true; - return false; - } - - private boolean jj_3R_58(){ - if (jj_3R_24()) + if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_57(){ + private boolean jj_3R_65(){ if (jj_3R_24()) return true; return false; } - private boolean jj_3R_70(){ + private boolean jj_3R_64(){ if (jj_scan_token(LEFT_PAR)) return true; - if (jj_3R_41()) + if (jj_3R_46()) return true; if (jj_scan_token(RIGHT_PAR)) return true; return false; } - private boolean jj_3R_56(){ - if (jj_3R_102()) + private boolean jj_3R_63(){ + if (jj_3R_27()) return true; return false; } - private boolean jj_3R_126(){ - if (jj_scan_token(CIRCLE)) - return true; - if (jj_scan_token(LEFT_PAR)) - return true; - if (jj_3R_157()) - return true; - if (jj_scan_token(COMMA)) - return true; + private boolean jj_3R_139(){ if (jj_3R_158()) return true; - if (jj_scan_token(COMMA)) - return true; - if (jj_3R_102()) - return true; - if (jj_scan_token(RIGHT_PAR)) - return true; return false; } - private boolean jj_3R_69(){ - if (jj_3R_117()) + private boolean jj_3R_150(){ + if (jj_scan_token(COMMA)) return true; - return false; - } - - private boolean jj_3R_55(){ - if (jj_3R_101()) + if (jj_3R_149()) return true; return false; } - private boolean jj_3R_41(){ - Token xsp; - xsp = jj_scanpos; - if (jj_3R_55()){ - jj_scanpos = xsp; - if (jj_3R_56()){ - jj_scanpos = xsp; - if (jj_3R_57()){ - jj_scanpos = xsp; - if (jj_3R_58()){ - jj_scanpos = xsp; - if (jj_3R_59()) - return true; - } - } - } - } - return false; - } - - private boolean jj_3R_125(){ - if (jj_scan_token(CENTROID)) - return true; - if (jj_scan_token(LEFT_PAR)) - return true; - if (jj_3R_64()) - return true; - if (jj_scan_token(RIGHT_PAR)) + private boolean jj_3R_62(){ + if (jj_3R_108()) return true; return false; } @@ -5892,7 +5830,7 @@ public class ADQLParser implements ADQLParserConstants { private Token jj_scanpos, jj_lastpos; private int jj_la; private int jj_gen; - final private int[] jj_la1 = new int[97]; + final private int[] jj_la1 = new int[98]; static private int[] jj_la1_0; static private int[] jj_la1_1; static private int[] jj_la1_2; @@ -5905,22 +5843,22 @@ public class ADQLParser implements ADQLParserConstants { } private static void jj_la1_init_0(){ - jj_la1_0 = new int[]{0x41,0x0,0x0,0x0,0x0,0x80000,0x100000,0x20,0x0,0x0,0x400000,0x400,0x304,0x20,0x20,0x20,0x0,0x10,0x10,0x10,0x0,0x0,0x0,0x0,0x400000,0x400000,0x400000,0x0,0x4,0x3d800000,0x1c000000,0x2000000,0x1d000000,0x1d000000,0x1c000000,0x2000000,0x1d000000,0x1d000000,0x20,0xc0000000,0x3d800000,0x0,0x0,0x0,0x300,0x300,0x4,0x0,0x304,0x300,0x300,0xc00,0xc00,0x300,0x300,0x4,0x80,0x0,0x4,0x0,0x0,0x0,0x0,0x0,0x4,0x0,0x0,0x3f000,0x0,0x0,0x304,0x3f000,0x0,0x0,0x20,0x4,0x80000,0x704,0x0,0x80000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x20,0x0,0x0,0x304,0x20,0x20,0x0,0x0,0x20,0x304,}; + jj_la1_0 = new int[]{0x41,0x0,0x0,0x0,0x0,0x80000,0x100000,0x20,0x0,0x0,0x400000,0x400,0x304,0x20,0x20,0x20,0x0,0x10,0x10,0x10,0x0,0x0,0x0,0x0,0x400000,0x400000,0x400000,0x0,0x4,0x3d800000,0x1c000000,0x2000000,0x1d000000,0x1d000000,0x1c000000,0x2000000,0x1d000000,0x1d000000,0x20,0xc0000000,0x3d800000,0x0,0x0,0x0,0x300,0x300,0x4,0x4,0x0,0x304,0x300,0x300,0xc00,0xc00,0x300,0x300,0x4,0x80,0x0,0x4,0x0,0x0,0x0,0x0,0x0,0x4,0x0,0x0,0x3f000,0x0,0x0,0x304,0x3f000,0x0,0x0,0x20,0x4,0x80000,0x704,0x0,0x80000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x20,0x0,0x0,0x304,0x20,0x20,0x0,0x0,0x20,0x304,}; } private static void jj_la1_init_1(){ - jj_la1_1 = new int[]{0x0,0x1,0x400,0x800,0x1000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffff8000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x6000,0x6000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xf8000,0x3f00000,0x7c0f8000,0x0,0x0,0x0,0x0,0x0,0x0,0xf8000,0x0,0x80000000,0xf8000,0x3f00000,0x8,0x6,0x6,0x8,0x0,0x8,0x8,0x0,0x108,0x200,0xffff8000,0x0,0x8,0x8,0x0,0x0,0x0,0xffff8000,0x78000,0x0,0xf8000,0xc000000,0x800000,0x800000,0x800000,0x800000,0x7c000000,0x0,0x3f00000,0x7c000000,0x7c0f8000,0x0,0x0,0x0,0x0,0x0,0xffff8000,}; + jj_la1_1 = new int[]{0x0,0x1,0x400,0x800,0x1000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xffff8000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x6000,0x6000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0xf8000,0x0,0x3f00000,0x7c0f8000,0x0,0x0,0x0,0x0,0x0,0x0,0xf8000,0x0,0x80000000,0x0,0x3f00000,0x8,0x6,0x6,0x8,0x0,0x8,0x8,0x0,0x108,0x200,0xffff8000,0x0,0x8,0x8,0x0,0x0,0x0,0xffff8000,0x78000,0x0,0xf8000,0xc000000,0x800000,0x800000,0x800000,0x800000,0x7c000000,0x0,0x3f00000,0x7c000000,0x7c0f8000,0x0,0x0,0x0,0x0,0x0,0xffff8000,}; } private static void jj_la1_init_2(){ - jj_la1_2 = new int[]{0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x20ffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x20000000,0x0,0x0,0x0,0x0,0x20000000,0x0,0x20ffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x20000000,0x0,0x0,0x20000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x20ffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x20ffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1,0x0,0x0,0xffffff,0x20ffffff,0x0,0x0,0xfffe,0xff0000,0x0,0x20ffffff,}; + jj_la1_2 = new int[]{0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x20ffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x20000000,0x0,0x0,0x0,0x0,0x0,0x20000000,0x0,0xffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x20000000,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x20ffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x20ffffff,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x1,0x0,0x0,0xffffff,0xffffff,0x0,0x0,0xfffe,0xff0000,0x0,0x20ffffff,}; } private static void jj_la1_init_3(){ - jj_la1_3 = new int[]{0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x3,0x3,0x0,0x0,0x3b,0x0,0x0,0x0,0x3,0x0,0x0,0x0,0x23,0x23,0x0,0x0,0x0,0x3,0x0,0x3,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x38,0x30,0x0,0x0,0x3b,0x0,0x3b,0x0,0x0,0x0,0x0,0x0,0x0,0x3b,0x0,0x0,0x3b,0x3,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x3b,0x0,0x0,0x0,0x0,0x0,0x0,0x3b,0x0,0x0,0x0,0x0,0x3,0x3,0x3,0x3,0x0,0x0,0x0,0x2,0x3b,0x0,0x0,0x0,0x0,0x0,0x3b,}; + jj_la1_3 = new int[]{0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x3,0x3,0x0,0x0,0x3b,0x0,0x0,0x0,0x3,0x0,0x0,0x0,0x23,0x23,0x0,0x0,0x0,0x3,0x0,0x3,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x38,0x30,0x0,0x0,0x3b,0x3,0x0,0x3b,0x0,0x0,0x0,0x0,0x0,0x0,0x3b,0x0,0x0,0x3,0x3,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x0,0x3b,0x0,0x0,0x0,0x0,0x0,0x0,0x3b,0x0,0x0,0x0,0x0,0x3,0x3,0x3,0x3,0x0,0x0,0x0,0x2,0x3b,0x0,0x0,0x0,0x0,0x0,0x3b,}; } - final private JJCalls[] jj_2_rtns = new JJCalls[13]; + final private JJCalls[] jj_2_rtns = new JJCalls[16]; private boolean jj_rescan = false; private int jj_gc = 0; @@ -5940,7 +5878,7 @@ public class ADQLParser implements ADQLParserConstants { token = new Token(); jj_ntk = -1; jj_gen = 0; - for(int i = 0; i < 97; i++) + for(int i = 0; i < 98; i++) jj_la1[i] = -1; for(int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); @@ -5962,7 +5900,7 @@ public class ADQLParser implements ADQLParserConstants { token = new Token(); jj_ntk = -1; jj_gen = 0; - for(int i = 0; i < 97; i++) + for(int i = 0; i < 98; i++) jj_la1[i] = -1; for(int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); @@ -5975,7 +5913,7 @@ public class ADQLParser implements ADQLParserConstants { token = new Token(); jj_ntk = -1; jj_gen = 0; - for(int i = 0; i < 97; i++) + for(int i = 0; i < 98; i++) jj_la1[i] = -1; for(int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); @@ -5988,7 +5926,7 @@ public class ADQLParser implements ADQLParserConstants { token = new Token(); jj_ntk = -1; jj_gen = 0; - for(int i = 0; i < 97; i++) + for(int i = 0; i < 98; i++) jj_la1[i] = -1; for(int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); @@ -6000,7 +5938,7 @@ public class ADQLParser implements ADQLParserConstants { token = new Token(); jj_ntk = -1; jj_gen = 0; - for(int i = 0; i < 97; i++) + for(int i = 0; i < 98; i++) jj_la1[i] = -1; for(int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); @@ -6012,7 +5950,7 @@ public class ADQLParser implements ADQLParserConstants { token = new Token(); jj_ntk = -1; jj_gen = 0; - for(int i = 0; i < 97; i++) + for(int i = 0; i < 98; i++) jj_la1[i] = -1; for(int i = 0; i < jj_2_rtns.length; i++) jj_2_rtns[i] = new JJCalls(); @@ -6038,7 +5976,6 @@ public class ADQLParser implements ADQLParserConstants { } } } - trace_token(token, ""); return token; } token = oldToken; @@ -6086,7 +6023,6 @@ public class ADQLParser implements ADQLParserConstants { token = token.next = token_source.getNextToken(); jj_ntk = -1; jj_gen++; - trace_token(token, " (in getNextToken)"); return token; } @@ -6150,7 +6086,7 @@ public class ADQLParser implements ADQLParserConstants { la1tokens[jj_kind] = true; jj_kind = -1; } - for(int i = 0; i < 97; i++){ + for(int i = 0; i < 98; i++){ if (jj_la1[i] == jj_gen){ for(int j = 0; j < 32; j++){ if ((jj_la1_0[i] & (1 << j)) != 0){ @@ -6185,68 +6121,15 @@ public class ADQLParser implements ADQLParserConstants { return new ParseException(token, exptokseq, tokenImage); } - private int trace_indent = 0; - private boolean trace_enabled = true; - /** Enable tracing. */ - final public void enable_tracing(){ - trace_enabled = true; - } + final public void enable_tracing(){} /** Disable tracing. */ - final public void disable_tracing(){ - trace_enabled = false; - } - - private void trace_call(String s){ - if (trace_enabled){ - for(int i = 0; i < trace_indent; i++){ - System.out.print(" "); - } - System.out.println("Call: " + s); - } - trace_indent = trace_indent + 2; - } - - private void trace_return(String s){ - trace_indent = trace_indent - 2; - if (trace_enabled){ - for(int i = 0; i < trace_indent; i++){ - System.out.print(" "); - } - System.out.println("Return: " + s); - } - } - - private void trace_token(Token t, String where){ - if (trace_enabled){ - for(int i = 0; i < trace_indent; i++){ - System.out.print(" "); - } - System.out.print("Consumed token: <" + tokenImage[t.kind]); - if (t.kind != 0 && !tokenImage[t.kind].equals("\"" + t.image + "\"")){ - System.out.print(": \"" + t.image + "\""); - } - System.out.println(" at line " + t.beginLine + " column " + t.beginColumn + ">" + where); - } - } - - private void trace_scan(Token t1, int t2){ - if (trace_enabled){ - for(int i = 0; i < trace_indent; i++){ - System.out.print(" "); - } - System.out.print("Visited token: <" + tokenImage[t1.kind]); - if (t1.kind != 0 && !tokenImage[t1.kind].equals("\"" + t1.image + "\"")){ - System.out.print(": \"" + t1.image + "\""); - } - System.out.println(" at line " + t1.beginLine + " column " + t1.beginColumn + ">; Expected token: <" + tokenImage[t2] + ">"); - } - } + final public void disable_tracing(){} private void jj_rescan_token(){ jj_rescan = true; - for(int i = 0; i < 13; i++){ + for(int i = 0; i < 16; i++){ try{ JJCalls p = jj_2_rtns[i]; do{ @@ -6293,6 +6176,15 @@ public class ADQLParser implements ADQLParserConstants { case 12: jj_3_13(); break; + case 13: + jj_3_14(); + break; + case 14: + jj_3_15(); + break; + case 15: + jj_3_16(); + break; } } p = p.next; diff --git a/src/adql/parser/ADQLQueryFactory.java b/src/adql/parser/ADQLQueryFactory.java index 12887f4d69e8e34afc5b2f98767cb04834636282..4bcfdf2fbfc8828d34bc123e7001e111eb994c93 100644 --- a/src/adql/parser/ADQLQueryFactory.java +++ b/src/adql/parser/ADQLQueryFactory.java @@ -16,13 +16,13 @@ package adql.parser; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012-2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.util.Collection; -import java.util.Vector; +import adql.db.FunctionDef; import adql.parser.IdentifierItems.IdentifierItem; import adql.query.ADQLOrder; import adql.query.ADQLQuery; @@ -83,26 +83,29 @@ import adql.query.operand.function.geometry.RegionFunction; *

To customize the object representation you merely have to extends the appropriate functions of this class.

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) * * @see ADQLParser */ public class ADQLQueryFactory { - protected boolean allowUnknownFunctions = false; - + /** + * Type of table JOIN. + * + * @author Grégory Mantelet (CDS) + * @version 1.0 (08/2011) + */ public static enum JoinType{ CROSS, INNER, OUTER_LEFT, OUTER_RIGHT, OUTER_FULL; } + /** + * Create a query factory. + */ public ADQLQueryFactory(){ ; } - public ADQLQueryFactory(boolean allowUnknownFunctions){ - this.allowUnknownFunctions = allowUnknownFunctions; - } - public ADQLQuery createQuery() throws Exception{ return new ADQLQuery(); } @@ -268,21 +271,29 @@ public class ADQLQueryFactory { /** *

Creates the user defined functions called as the given name and with the given parameters.

- *

IMPORTANT: This function must be overridden if some user defined functions are available.

+ * + *

+ * By default, this function returns a {@link DefaultUDF} instance. It is generic enough to cover every kind of functions. + * But you can of course override this function in order to return your own instance of {@link UserDefinedFunction}. + * In this case, you may not forget to call the super function (super.createUserDefinedFunction(name, params)) so that + * all other unknown functions are still returned as {@link DefaultUDF} instances. + *

+ * + *

IMPORTANT: + * The tests done to check whether a user defined function is allowed/managed in this implementation, is done later by the parser. + * Only declared UDF will pass the test of the parser. For that, you should give it a list of allowed UDFs (each UDF will be then + * represented by a {@link FunctionDef} object). + *

* * @param name Name of the user defined function to create. * @param params Parameters of the user defined function to create. * - * @return The corresponding user defined function. + * @return The corresponding user defined function (by default an instance of {@link DefaultUDF}). * - * @throws Exception An {@link UnsupportedOperationException} by default, otherwise any other type of error may be - * thrown if there is a problem while creating the function. + * @throws Exception If there is a problem while creating the function. */ public UserDefinedFunction createUserDefinedFunction(String name, ADQLOperand[] params) throws Exception{ - if (allowUnknownFunctions) - return new DefaultUDF(name, params); - else - throw new UnsupportedOperationException("No ADQL function called \"" + name + "\" !"); + return new DefaultUDF(name, params); } public DistanceFunction createDistance(PointFunction point1, PointFunction point2) throws Exception{ @@ -317,7 +328,7 @@ public class ADQLQueryFactory { return new RegionFunction(param); } - public PolygonFunction createPolygon(ADQLOperand coordSys, Vector coords) throws Exception{ + public PolygonFunction createPolygon(ADQLOperand coordSys, Collection coords) throws Exception{ return new PolygonFunction(coordSys, coords); } @@ -375,14 +386,14 @@ public class ADQLQueryFactory { /** * Replace {@link #createOrder(int, boolean, TextPosition)}. - * @since 1.3 + * @since 1.4 */ public ADQLOrder createOrder(final int ind, final boolean desc) throws Exception{ return new ADQLOrder(ind, desc); } /** - * @deprecated since 1.3 ; Replaced by {@link #createOrder(int, boolean)} + * @deprecated since 1.4 ; Replaced by {@link #createOrder(int, boolean)} */ @Deprecated public ADQLOrder createOrder(final int ind, final boolean desc, final TextPosition position) throws Exception{ diff --git a/src/adql/parser/QueryChecker.java b/src/adql/parser/QueryChecker.java index 0c15a16d9b86e8d11fa80c5aae25428a2b42e705..dec0c618926f409650ecc98124a786813b052f9a 100644 --- a/src/adql/parser/QueryChecker.java +++ b/src/adql/parser/QueryChecker.java @@ -17,7 +17,7 @@ package adql.parser; * along with ADQLLibrary. If not, see . * * Copyright 2012-2013 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomisches Rechen Institute (ARI) + * Astronomisches Rechen Institut (ARI) */ import adql.db.DBChecker; diff --git a/src/adql/parser/TokenMgrError.java b/src/adql/parser/TokenMgrError.java index 6e875080474475d7cb8e71395dc12a0a47740423..b41b384078d9f119be346b62aa752b8411b10ce8 100644 --- a/src/adql/parser/TokenMgrError.java +++ b/src/adql/parser/TokenMgrError.java @@ -3,13 +3,7 @@ package adql.parser; /** Token Manager Error. */ -/** - * TODO Javadoc of TokenMgrError ! - * - * @author Grégory Mantelet (CDS) - * @version 08/2011 - * - */ + @SuppressWarnings("all") public class TokenMgrError extends Error { @@ -129,6 +123,7 @@ public class TokenMgrError extends Error { * * from this method for such cases in the release version of your parser. */ + @Override public String getMessage(){ return super.getMessage(); } diff --git a/src/adql/parser/adqlGrammar.jj b/src/adql/parser/adqlGrammar.jj index cc2d75e2291a7d9086615c150f19960e947bf4ea..442b9f7d96a6da1507f25342cabd822226e160b8 100644 --- a/src/adql/parser/adqlGrammar.jj +++ b/src/adql/parser/adqlGrammar.jj @@ -14,7 +14,7 @@ * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012-2014 - UDS/Centre de DonnM-CM-)es astronomiques de Strasbourg (CDS), + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), * Astronomisches Rechen Institute (ARI) */ @@ -26,7 +26,7 @@ * If the syntax is not conform to the ADQL definition an error message is printed else it will be the message "Correct syntax". * * Author: Grégory Mantelet (CDS;ARI) - gmantele@ari.uni-heidelberg.de -* Version: 1.2 (03/2014) +* Version: 1.4 (06/2015) */ /* ########### */ @@ -35,7 +35,7 @@ options { STATIC = false; IGNORE_CASE = true; - DEBUG_PARSER = true; + DEBUG_PARSER = false; } /* ########## */ @@ -89,7 +89,7 @@ import adql.translator.TranslationException; * @see ADQLQueryFactory * * @author Grégory Mantelet (CDS;ARI) - gmantele@ari.uni-heidelberg.de -* @version 1.2 (03/2014) +* @version 1.4 (06/2015) */ public class ADQLParser { @@ -108,9 +108,6 @@ public class ADQLParser { /** The first token of a table/column name. This token is extracted by {@link #Identifier()}. */ private Token currentIdentifierToken = null; - /** List of all allowed coordinate systems. */ - private ArrayList allowedCoordSys = new ArrayList(); - /** * Builds an ADQL parser without a query to parse. */ @@ -348,24 +345,6 @@ public class ADQLParser { return Query(); } - public final void addCoordinateSystem(final String coordSys){ - allowedCoordSys.add(coordSys); - } - - public final void setCoordinateSystems(final Collection coordSys){ - allowedCoordSys.clear(); - if (coordSys != null) - allowedCoordSys.addAll(coordSys); - } - - public final boolean isAllowedCoordSys(final String coordSys) { - for(String cs : allowedCoordSys){ - if (cs.equalsIgnoreCase(coordSys)) - return true; - } - return false; - } - public final void setDebug(boolean debug){ if (debug) enable_tracing(); else disable_tracing(); @@ -1114,29 +1093,48 @@ NumericConstant SignedInteger(): {Token sign=null, number; NumericConstant cst;} /* *********** */ /* EXPRESSIONS */ /* *********** */ -ADQLOperand ValueExpressionPrimary(): {ADQLColumn column; ADQLOperand op; Token left,right;} { - try{ +ADQLOperand NumericValueExpressionPrimary(): {String expr; ADQLColumn column; ADQLOperand op; Token left, right;} { + try{ (// unsigned_value_specification op=UnsignedNumeric() {return op;} - // string - | op=String() {return op;} // column_reference - | column=Column() {return column;} + | column=Column() {column.setExpectedType('N'); return column;} // set_function_specification | op=SqlFunction() {return op;} // LEFT_PAR value_expression RIGHT_PAR - | (left= op=ValueExpression() right=) { WrappedOperand wop = queryFactory.createWrappedOperand(op); wop.setPosition(new TextPosition(left, right)); return wop;}) - }catch(Exception ex){ throw generateParseException(ex); + | (left= op=NumericExpression() right=) { WrappedOperand wop = queryFactory.createWrappedOperand(op); wop.setPosition(new TextPosition(left, right)); return wop;}) + }catch(Exception ex){ + throw generateParseException(ex); + } +} + +ADQLOperand StringValueExpressionPrimary(): {StringConstant expr; ADQLColumn column; ADQLOperand op;} { + try{ + (// string + expr=String() {return expr;} + // column_reference + | column=Column() {column.setExpectedType('S'); return column;} + // LEFT_PAR value_expression RIGHT_PAR + | ( (op=StringExpression()) ) {return queryFactory.createWrappedOperand(op);}) + }catch(Exception ex){ + throw generateParseException(ex); } } ADQLOperand ValueExpression(): {ADQLOperand valueExpr = null; } { - (valueExpr=GeometryValueFunction() - | LOOKAHEAD( | ) valueExpr=NumericExpression() - | LOOKAHEAD() valueExpr=StringExpression() - | LOOKAHEAD(StringFactor() ) valueExpr=StringExpression() - | valueExpr=NumericExpression()) - {return valueExpr;} + try{ + (LOOKAHEAD((|) | (Factor() (|||))) valueExpr=NumericExpression() + | LOOKAHEAD( | (StringFactor() )) valueExpr=StringExpression() + | LOOKAHEAD() valueExpr=ValueExpression() { valueExpr = queryFactory.createWrappedOperand(valueExpr); } + | LOOKAHEAD( ) valueExpr=UserDefinedFunction() + | valueExpr=GeometryValueFunction() + | LOOKAHEAD(Column()) valueExpr=Column() + | LOOKAHEAD(String()) valueExpr=StringFactor() + | valueExpr=Factor()) + {return valueExpr;} + }catch(Exception ex){ + throw generateParseException(ex); + } } ADQLOperand NumericExpression(): {Token sign=null; ADQLOperand leftOp, rightOp=null;} { @@ -1175,8 +1173,8 @@ ADQLOperand NumericTerm(): {Token sign=null; ADQLOperand leftOp, rightOp=null;} ADQLOperand Factor(): {boolean negative = false; Token minusSign = null; ADQLOperand op;} { ( - ( | (minusSign= {negative = true;}))? - (LOOKAHEAD(2) op=NumericFunction() | op=ValueExpressionPrimary()) + ( | ( {negative = true;}))? + (LOOKAHEAD(2) op=NumericFunction() | op=NumericValueExpressionPrimary()) ) { @@ -1222,17 +1220,17 @@ ADQLOperand StringExpression(): {ADQLOperand leftOp; ADQLOperand rightOp = null; ADQLOperand StringFactor(): {ADQLOperand op;} { (op=ExtractCoordSys() - | LOOKAHEAD(2) op=UserDefinedFunction() - | op=ValueExpressionPrimary()) + | LOOKAHEAD(2) op=UserDefinedFunction() { ((UserDefinedFunction)op).setExpectedType('S'); } + | op=StringValueExpressionPrimary()) {return op;} } GeometryValue GeometryExpression(): {ADQLColumn col = null; GeometryFunction gf = null;} { (col=Column() | gf=GeometryValueFunction()) { - if (col != null) + if (col != null){ col.setExpectedType('G'); return new GeometryValue(col); - else + }else return new GeometryValue(gf); } } @@ -1444,24 +1442,26 @@ GeometryFunction GeometryFunction(): {Token fct=null, end; GeometryValue gvf1=GeometryExpression() end=) {gf = queryFactory.createArea(gvf1);} - | (fct= (p1=Point() {gf = queryFactory.createCoord1(p1);} | col1=Column() {gf = queryFactory.createCoord1(col1);}) end=) - | (fct= (p1=Point() {gf = queryFactory.createCoord2(p1);} | col1=Column() {gf = queryFactory.createCoord2(col1);}) end=) + | (fct= (p1=Point() {gf = queryFactory.createCoord1(p1);} | col1=Column() {col1.setExpectedType('G'); gf = queryFactory.createCoord1(col1);}) end=) + | (fct= (p1=Point() {gf = queryFactory.createCoord2(p1);} | col1=Column() {col1.setExpectedType('G'); gf = queryFactory.createCoord2(col1);}) end=) | (fct= (p1=Point()|col1=Column()) { if (p1 != null) gvp1 = new GeometryValue(p1); - else + else{ col1.setExpectedType('G'); gvp1 = new GeometryValue(col1); + } } (p2=Point()|col2=Column()) { if (p2 != null) gvp2 = new GeometryValue(p2); - else + else{ col2.setExpectedType('G'); gvp2 = new GeometryValue(col2); + } } end= {gf = queryFactory.createDistance(gvp1, gvp2);} @@ -1477,19 +1477,9 @@ GeometryFunction GeometryFunction(): {Token fct=null, end; GeometryValue 0){ - TextPosition position = new TextPosition(oldToken.next, token); - if (coordSys == null) - throw new ParseException("A coordinate system must always be provided !", position); - if (coordSys instanceof StringConstant && !isAllowedCoordSys(((StringConstant)coordSys).getValue())) - throw new ParseException("\""+coordSys.toADQL()+"\" is not an allowed coordinate systems !", position); - } - - return coordSys; - } + { return coordSys; } } GeometryFunction GeometryValueFunction(): {Token fct=null, end=null; ADQLOperand coordSys; ADQLOperand width, height; ADQLOperand[] coords, tmp; Vector vCoords; ADQLOperand op=null; GeometryValue gvf = null; GeometryFunction gf = null;} { @@ -1569,7 +1559,7 @@ ADQLFunction NumericFunction(): {ADQLFunction fct;} { (fct=MathFunction() | fct=TrigFunction() | fct=GeometryFunction() - | fct=UserDefinedFunction()) + | fct=UserDefinedFunction() { ((UserDefinedFunction)fct).setExpectedType('N'); }) {return fct;} } @@ -1624,19 +1614,23 @@ MathFunction TrigFunction(): {Token fct=null, end; ADQLOperand param1=null, para } } -/* /!\ WARNING: The function name may be prefixed by "udf_" but there is no way to check it here ! */ UserDefinedFunction UserDefinedFunction(): {Token fct, end; Vector params = new Vector(); ADQLOperand op;} { fct= (op=ValueExpression() {params.add(op);} ( op=ValueExpression() {params.add(op);})*)? end= { //System.out.println("INFO [ADQLParser]: \""+fct.image+"\" (from line "+fct.beginLine+" and column "+fct.beginColumn+" to line "+token.endLine+" and column "+(token.endColumn+1)+") is considered as an user defined function !"); try{ + // Build the parameters list: ADQLOperand[] parameters = new ADQLOperand[params.size()]; for(int i=0; i. * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.util.Iterator; @@ -27,8 +28,8 @@ import java.util.Vector; * *

Since it is a list, it is possible to add, remove, modify and iterate on a such object.

* - * @author Grégory Mantelet (CDS) - * @version 06/2011 + * @author Grégory Mantelet (CDS;ARI) + * @version 1.4 (06/2015) * * @see ClauseADQL * @see ClauseConstraints @@ -44,7 +45,7 @@ public abstract class ADQLList< T extends ADQLObject > implements ADQLObject, It private final Vector list = new Vector(); /** Position inside an ADQL query string. - * @since 1.3 */ + * @since 1.4 */ private TextPosition position = null; /** @@ -195,7 +196,7 @@ public abstract class ADQLList< T extends ADQLObject > implements ADQLObject, It * Sets the position at which this {@link ADQLList} has been found in the original ADQL query string. * * @param pos Position of this {@link ADQLList}. - * @since 1.3 + * @since 1.4 */ public final void setPosition(final TextPosition position){ this.position = position; @@ -203,12 +204,15 @@ public abstract class ADQLList< T extends ADQLObject > implements ADQLObject, It @Override public String toADQL(){ - String adql = (getName() == null) ? "" : (getName() + " "); + StringBuffer adql = new StringBuffer((getName() == null) ? "" : (getName() + " ")); - for(int i = 0; i < size(); i++) - adql += ((i == 0) ? "" : (" " + getSeparator(i) + " ")) + get(i).toADQL(); + for(int i = 0; i < size(); i++){ + if (i > 0) + adql.append(" " + getSeparator(i) + " "); + adql.append(get(i).toADQL()); + } - return adql; + return adql.toString(); } @Override diff --git a/src/adql/query/ADQLObject.java b/src/adql/query/ADQLObject.java index 85ebaa043519a3a2158b033eb517f6f9fc34c0f5..fee5aef5963ff5f522f752312858dd4f9f593a37 100644 --- a/src/adql/query/ADQLObject.java +++ b/src/adql/query/ADQLObject.java @@ -18,8 +18,8 @@ import adql.search.ISearchHandler; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institute (ARI) */ /** @@ -33,7 +33,7 @@ import adql.search.ISearchHandler; *

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public interface ADQLObject { @@ -51,7 +51,7 @@ public interface ADQLObject { * @return Position of this ADQL item in the ADQL query, * or NULL if not written originally in the query (for example, if added afterwards. * - * @since 1.3 + * @since 1.4 */ public TextPosition getPosition(); diff --git a/src/adql/query/ADQLQuery.java b/src/adql/query/ADQLQuery.java index 3f4729476923b3f3652035ae0c6ac5b8f8b3d96d..1372e716fed3a0cdd5f46028dd7606b9ce84c6e1 100644 --- a/src/adql/query/ADQLQuery.java +++ b/src/adql/query/ADQLQuery.java @@ -16,8 +16,8 @@ package adql.query; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012-2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.util.ArrayList; @@ -38,7 +38,7 @@ import adql.search.ISearchHandler; *

The resulting object of the {@link ADQLParser} is an object of this class.

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public class ADQLQuery implements ADQLObject { @@ -61,7 +61,7 @@ public class ADQLQuery implements ADQLObject { private ClauseADQL orderBy; /** Position of this Query (or sub-query) inside the whole given ADQL query string. - * @since 1.3*/ + * @since 1.4 */ private TextPosition position = null; /** @@ -271,7 +271,7 @@ public class ADQLQuery implements ADQLObject { * Set the position of this {@link ADQLQuery} (or sub-query) inside the whole given ADQL query string. * * @param position New position of this {@link ADQLQuery}. - * @since 1.3 + * @since 1.4 */ public final void setPosition(final TextPosition position){ this.position = position; @@ -301,7 +301,12 @@ public class ADQLQuery implements ADQLObject { ADQLOperand operand = item.getOperand(); if (item instanceof SelectAllColumns){ try{ - columns.addAll(from.getDBColumns()); + // If "{table}.*", add all columns of the specified table: + if (((SelectAllColumns)item).getAdqlTable() != null) + columns.addAll(((SelectAllColumns)item).getAdqlTable().getDBColumns()); + // Otherwise ("*"), add all columns of all selected tables: + else + columns.addAll(from.getDBColumns()); }catch(ParseException pe){ // Here, this error should not occur any more, since it must have been caught by the DBChecker! } @@ -465,4 +470,4 @@ public class ADQLQuery implements ADQLObject { return adql.toString(); } -} \ No newline at end of file +} diff --git a/src/adql/query/ClauseADQL.java b/src/adql/query/ClauseADQL.java index 7116b41004d154947d4636ddf82cd1f763dc4004..30bf6abacbbd2f09f25eac70cc2b4c230c1efaa9 100644 --- a/src/adql/query/ClauseADQL.java +++ b/src/adql/query/ClauseADQL.java @@ -17,7 +17,7 @@ package adql.query; * along with ADQLLibrary. If not, see . * * Copyright 2012-2013 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomisches Rechen Institute (ARI) + * Astronomisches Rechen Institut (ARI) */ /** diff --git a/src/adql/query/SelectAllColumns.java b/src/adql/query/SelectAllColumns.java index 0096fb012c4532ca8d14dc0002b86c1592654415..33dc27ed0ea5979c11d61e1214b6262edae19ab8 100644 --- a/src/adql/query/SelectAllColumns.java +++ b/src/adql/query/SelectAllColumns.java @@ -20,8 +20,8 @@ import adql.query.from.ADQLTable; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012-2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ /** @@ -29,7 +29,7 @@ import adql.query.from.ADQLTable; * It means: 'select all columns'. * * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public final class SelectAllColumns extends SelectItem { @@ -108,7 +108,7 @@ public final class SelectAllColumns extends SelectItem { * @param table An {@link ADQLTable} (MUST NOT BE NULL). */ public final void setAdqlTable(final ADQLTable table){ - if (table == null){ + if (table != null){ adqlTable = table; query = null; setPosition(null); diff --git a/src/adql/query/SelectItem.java b/src/adql/query/SelectItem.java index 41795cc9de3730b6c8d8be2a5354e16bd04f02c8..803101abca0ce66a7b11dde618c107a1d127ab03 100644 --- a/src/adql/query/SelectItem.java +++ b/src/adql/query/SelectItem.java @@ -16,8 +16,8 @@ package adql.query; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012-2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institute (ARI) */ import java.util.NoSuchElementException; @@ -30,7 +30,7 @@ import adql.query.operand.ADQLOperand; *

It merely encapsulates an operand and allows to associate to it an alias (according to the following syntax: "SELECT operand AS alias").

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) * * @see ClauseSelect */ @@ -46,7 +46,7 @@ public class SelectItem implements ADQLObject { private boolean caseSensitive = false; /** Position of this Select item in the ADQL query string. - * @since 1.3 */ + * @since 1.4 */ private TextPosition position = null; /** @@ -172,7 +172,7 @@ public class SelectItem implements ADQLObject { * Set the position of this {@link SelectItem} in the given ADQL query string. * * @param position New position of this {@link SelectItem}. - * @since 1.3 + * @since 1.4 */ public final void setPosition(final TextPosition position){ this.position = position; diff --git a/src/adql/query/TextPosition.java b/src/adql/query/TextPosition.java index 5bda46a1d2326edf8bf0c3f5eff91de8c023f98c..5324ebab077160eed2e873d290f21779ad2161f8 100644 --- a/src/adql/query/TextPosition.java +++ b/src/adql/query/TextPosition.java @@ -16,8 +16,8 @@ package adql.query; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institute (ARI) */ import adql.parser.Token; @@ -27,7 +27,7 @@ import adql.parser.Token; * It is particularly used to localize columns and tables in the original ADQL query. * * @author Grégory Mantelet (CDS;ARI) - * @version 05/2014 + * @version 1.4 (06/2015) */ public class TextPosition { @@ -94,7 +94,7 @@ public class TextPosition { * Builds a copy of the given position. * * @param positionToCopy Position to copy. - * @since 1.3 + * @since 1.4 */ public TextPosition(final TextPosition positionToCopy){ this(positionToCopy.beginLine, positionToCopy.beginColumn, positionToCopy.endLine, positionToCopy.endColumn); @@ -105,7 +105,7 @@ public class TextPosition { * * @param startPos Start position (only beginLine and beginColumn will be used). * @param endPos End position (only endLine and endColumn will be used). - * @since 1.3 + * @since 1.4 */ public TextPosition(final TextPosition startPos, final TextPosition endPos){ this(startPos.beginLine, startPos.beginColumn, endPos.endLine, endPos.endColumn); diff --git a/src/adql/query/constraint/Between.java b/src/adql/query/constraint/Between.java index 430fec1a3de2e00443fec0f544d1e2f4142205dc..d321f1829430c05e831609469bedc78674ace2c7 100644 --- a/src/adql/query/constraint/Between.java +++ b/src/adql/query/constraint/Between.java @@ -16,8 +16,8 @@ package adql.query.constraint; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institute (ARI) */ import java.util.NoSuchElementException; @@ -34,7 +34,7 @@ import adql.query.operand.ADQLOperand; * between the value of the two other operands, else it returns false.

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public class Between implements ADQLConstraint { @@ -51,7 +51,7 @@ public class Between implements ADQLConstraint { private boolean notBetween = false; /** Position of this {@link Between} in the given ADQL query string. - * @since 1.3 */ + * @since 1.4 */ private TextPosition position = null; /** @@ -183,7 +183,7 @@ public class Between implements ADQLConstraint { * Set the position of this {@link Between} in the given ADQL query string. * * @param position New position of this {@link Between}. - * @since 1.3 + * @since 1.4 */ public final void setPosition(final TextPosition position){ this.position = position; diff --git a/src/adql/query/constraint/Comparison.java b/src/adql/query/constraint/Comparison.java index 14b2d1bd72f368dc53c0e36d72e1620b000ace8a..1c0d23612d974b0f68b23c412734c4f3dae94af8 100644 --- a/src/adql/query/constraint/Comparison.java +++ b/src/adql/query/constraint/Comparison.java @@ -16,8 +16,8 @@ package adql.query.constraint; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institute (ARI) */ import java.util.NoSuchElementException; @@ -31,7 +31,7 @@ import adql.query.operand.ADQLOperand; * Represents a comparison (numeric or not) between two operands. * * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) * * @see ComparisonOperator */ @@ -47,7 +47,7 @@ public class Comparison implements ADQLConstraint { private ADQLOperand rightOperand; /** Position of this {@link Comparison} in the given ADQL query string. - * @since 1.3 */ + * @since 1.4 */ private TextPosition position = null; /** @@ -173,7 +173,7 @@ public class Comparison implements ADQLConstraint { * Set the position of this {@link Comparison} in the given ADQL query string. * * @param position New position of this {@link Comparison}. - * @since 1.3 + * @since 1.4 */ public final void setPosition(final TextPosition position){ this.position = position; diff --git a/src/adql/query/constraint/Exists.java b/src/adql/query/constraint/Exists.java index 92c119789b5d31031fb4c02206b3dfe86244ce8e..52af88f7d327cab45a7cce7c8b2048d17c020815 100644 --- a/src/adql/query/constraint/Exists.java +++ b/src/adql/query/constraint/Exists.java @@ -16,8 +16,8 @@ package adql.query.constraint; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institute (ARI) */ import java.util.NoSuchElementException; @@ -33,7 +33,7 @@ import adql.query.TextPosition; *

This function returns true if the sub-query given in parameter returns at least one result, else it returns false.

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public class Exists implements ADQLConstraint { @@ -41,7 +41,7 @@ public class Exists implements ADQLConstraint { private ADQLQuery subQuery; /** Position of this {@link Exists} in the given ADQL query string. - * @since 1.3 */ + * @since 1.4 */ private TextPosition position = null; /** @@ -97,7 +97,7 @@ public class Exists implements ADQLConstraint { * Set the position of this {@link Exists} in the given ADQL query string. * * @param position New position of this {@link Exists}. - * @since 1.3 + * @since 1.4 */ public final void setPosition(final TextPosition position){ this.position = position; diff --git a/src/adql/query/constraint/In.java b/src/adql/query/constraint/In.java index a5079849a12b210a40004d0dddd848766259a538..67f37e4d6228f3a5ce28ee8c00d0ed65f5a60715 100644 --- a/src/adql/query/constraint/In.java +++ b/src/adql/query/constraint/In.java @@ -16,8 +16,8 @@ package adql.query.constraint; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institute (ARI) */ import java.util.NoSuchElementException; @@ -37,7 +37,7 @@ import adql.query.operand.ADQLOperand; * either in the given values list or in the results of the given sub-query, else it returns false.

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public class In implements ADQLConstraint { @@ -54,7 +54,7 @@ public class In implements ADQLConstraint { private boolean notIn = false; /** Position of this {@link In} in the given ADQL query string. - * @since 1.3 */ + * @since 1.4 */ private TextPosition position = null; /** @@ -278,7 +278,7 @@ public class In implements ADQLConstraint { * Set the position of this {@link In} in the given ADQL query string. * * @param position New position of this {@link In}. - * @since 1.3 + * @since 1.4 */ public final void setPosition(final TextPosition position){ this.position = position; diff --git a/src/adql/query/constraint/IsNull.java b/src/adql/query/constraint/IsNull.java index 529a140a3eca41c99e6fd314650109b179041d67..56c6cdfe3a6039e0c88f3a350a805fec565ff718 100644 --- a/src/adql/query/constraint/IsNull.java +++ b/src/adql/query/constraint/IsNull.java @@ -16,8 +16,8 @@ package adql.query.constraint; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institute (ARI) */ import java.util.NoSuchElementException; @@ -31,7 +31,7 @@ import adql.query.operand.ADQLColumn; * Represents a comparison between a column to the NULL value. * * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public class IsNull implements ADQLConstraint { @@ -42,7 +42,7 @@ public class IsNull implements ADQLConstraint { private boolean isNotNull = false; /** Position of this {@link IsNull} in the given ADQL query string. - * @since 1.3 */ + * @since 1.4 */ private TextPosition position = null; /** @@ -131,7 +131,7 @@ public class IsNull implements ADQLConstraint { * Set the position of this {@link IsNull} in the given ADQL query string. * * @param position New position of this {@link IsNull}. - * @since 1.3 + * @since 1.4 */ public final void setPosition(final TextPosition position){ this.position = position; diff --git a/src/adql/query/constraint/NotConstraint.java b/src/adql/query/constraint/NotConstraint.java index 8d6f400867be91dd0637508bd02b78768157c388..9f5a7571cf35a62951ffac5f6ba676ca0d904cd3 100644 --- a/src/adql/query/constraint/NotConstraint.java +++ b/src/adql/query/constraint/NotConstraint.java @@ -16,8 +16,8 @@ package adql.query.constraint; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institute (ARI) */ import java.util.NoSuchElementException; @@ -30,14 +30,14 @@ import adql.query.TextPosition; * Lets apply the logical operator NOT on any constraint. * * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public class NotConstraint implements ADQLConstraint { private ADQLConstraint constraint; /** Position of this {@link NotConstraint} in the ADQL query string. - * @since 1.3 */ + * @since 1.4 */ private TextPosition position = null; /** @@ -71,7 +71,7 @@ public class NotConstraint implements ADQLConstraint { * Set the position of this {@link NotConstraint} in the given ADQL query string. * * @param position New position of this {@link NotConstraint}. - * @since 1.3 + * @since 1.4 */ public final void setPosition(final TextPosition position){ this.position = position; diff --git a/src/adql/query/from/ADQLJoin.java b/src/adql/query/from/ADQLJoin.java index 6cbc8c57914d310213215fc38c04f72ac5e26779..31b4f74becdc946b9314923622c8c916c85144cb 100644 --- a/src/adql/query/from/ADQLJoin.java +++ b/src/adql/query/from/ADQLJoin.java @@ -16,8 +16,8 @@ package adql.query.from; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012-2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.util.ArrayList; @@ -30,7 +30,7 @@ import java.util.NoSuchElementException; import adql.db.DBColumn; import adql.db.DBCommonColumn; import adql.db.SearchColumnList; -import adql.db.exception.UnresolvedJoin; +import adql.db.exception.UnresolvedJoinException; import adql.query.ADQLIterator; import adql.query.ADQLObject; import adql.query.ClauseConstraints; @@ -42,7 +42,7 @@ import adql.query.operand.ADQLColumn; * Defines a join between two "tables". * * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public abstract class ADQLJoin implements ADQLObject, FromContent { @@ -62,7 +62,7 @@ public abstract class ADQLJoin implements ADQLObject, FromContent { protected ArrayList lstColumns = null; /** Position of this {@link ADQLJoin} in the given ADQL query string. - * @since 1.3 */ + * @since 1.4 */ private TextPosition position = null; /* ************ */ @@ -363,7 +363,7 @@ public abstract class ADQLJoin implements ADQLObject, FromContent { } @Override - public SearchColumnList getDBColumns() throws UnresolvedJoin{ + public SearchColumnList getDBColumns() throws UnresolvedJoinException{ SearchColumnList list = new SearchColumnList(); SearchColumnList leftList = leftTable.getDBColumns(); SearchColumnList rightList = rightTable.getDBColumns(); @@ -431,20 +431,20 @@ public abstract class ADQLJoin implements ADQLObject, FromContent { } } - public final static DBColumn findExactlyOneColumn(final String columnName, final byte caseSensitive, final SearchColumnList list, final boolean leftList) throws UnresolvedJoin{ + public final static DBColumn findExactlyOneColumn(final String columnName, final byte caseSensitive, final SearchColumnList list, final boolean leftList) throws UnresolvedJoinException{ DBColumn result = findAtMostOneColumn(columnName, caseSensitive, list, leftList); if (result == null) - throw new UnresolvedJoin("Column \"" + columnName + "\" specified in USING clause does not exist in " + (leftList ? "left" : "right") + " table!"); + throw new UnresolvedJoinException("Column \"" + columnName + "\" specified in USING clause does not exist in " + (leftList ? "left" : "right") + " table!"); else return result; } - public final static DBColumn findAtMostOneColumn(final String columnName, final byte caseSensitive, final SearchColumnList list, final boolean leftList) throws UnresolvedJoin{ + public final static DBColumn findAtMostOneColumn(final String columnName, final byte caseSensitive, final SearchColumnList list, final boolean leftList) throws UnresolvedJoinException{ ArrayList result = list.search(null, null, null, columnName, caseSensitive); if (result.isEmpty()) return null; else if (result.size() > 1) - throw new UnresolvedJoin("Common column name \"" + columnName + "\" appears more than once in " + (leftList ? "left" : "right") + " table!"); + throw new UnresolvedJoinException("Common column name \"" + columnName + "\" appears more than once in " + (leftList ? "left" : "right") + " table!"); else return result.get(0); } @@ -486,4 +486,4 @@ public abstract class ADQLJoin implements ADQLObject, FromContent { @Override public abstract ADQLObject getCopy() throws Exception; -} \ No newline at end of file +} diff --git a/src/adql/query/from/FromContent.java b/src/adql/query/from/FromContent.java index e7ad5c20f1b64e0b3dec064e7ed029dec1b76cdf..a3df9cce2a1d0f1590239d1af87a7f15264a45f8 100644 --- a/src/adql/query/from/FromContent.java +++ b/src/adql/query/from/FromContent.java @@ -16,15 +16,15 @@ package adql.query.from; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012-2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.util.ArrayList; import adql.db.DBColumn; import adql.db.SearchColumnList; -import adql.db.exception.UnresolvedJoin; +import adql.db.exception.UnresolvedJoinException; import adql.query.ADQLObject; import adql.query.TextPosition; @@ -33,7 +33,7 @@ import adql.query.TextPosition; * It could be either a table ({@link ADQLTable}) or a join ({@link ADQLJoin}). * * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public interface FromContent extends ADQLObject { @@ -43,9 +43,9 @@ public interface FromContent extends ADQLObject { *

Note: In the most cases, this list is generated on the fly !

* * @return All the available {@link DBColumn}s. - * @throws UnresolvedJoin If a join is not possible. + * @throws UnresolvedJoinException If a join is not possible. */ - public SearchColumnList getDBColumns() throws UnresolvedJoin; + public SearchColumnList getDBColumns() throws UnresolvedJoinException; /** * Gets all {@link ADQLTable} instances contained in this FROM part (itself included, if it is an {@link ADQLTable}). @@ -72,7 +72,7 @@ public interface FromContent extends ADQLObject { * Set the position of this {@link FromContent} in the given ADQL query string. * * @param position New position of this {@link FromContent}. - * @since 1.3 + * @since 1.4 */ public void setPosition(final TextPosition position); diff --git a/src/adql/query/operand/ADQLColumn.java b/src/adql/query/operand/ADQLColumn.java index 590f8b5048e1b2e4fc187093d7526767cb1eeb8a..eebce23b893d6c0f15336343678928ca3e5671a9 100644 --- a/src/adql/query/operand/ADQLColumn.java +++ b/src/adql/query/operand/ADQLColumn.java @@ -16,7 +16,8 @@ package adql.query.operand; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import adql.db.DBColumn; @@ -30,10 +31,10 @@ import adql.query.from.ADQLTable; /** * Represents the complete (literal) reference to a column ({schema(s)}.{table}.{column}). * - * @author Grégory Mantelet (CDS) - * @version 07/2011 + * @author Grégory Mantelet (CDS;ARI) + * @version 1.4 (06/2015) */ -public class ADQLColumn implements ADQLOperand { +public class ADQLColumn implements ADQLOperand, UnknownType { /** Position in the original ADQL query string. */ private TextPosition position = null; @@ -59,6 +60,10 @@ public class ADQLColumn implements ADQLOperand { /** The {@link ADQLTable} which is supposed to contain this column. By default, this field is automatically filled by {@link adql.db.DBChecker}. */ private ADQLTable adqlTable = null; + /** Type expected by the parser. + * @since 1.3 */ + private char expectedType = '?'; + /* ************ */ /* CONSTRUCTORS */ /* ************ */ @@ -452,14 +457,29 @@ public class ADQLColumn implements ADQLOperand { /* ***************** */ /* INHERITED METHODS */ /* ***************** */ + @Override + public char getExpectedType(){ + return expectedType; + } + + @Override + public void setExpectedType(final char c){ + expectedType = c; + } + @Override public boolean isNumeric(){ - return true; + return (dbLink == null || dbLink.getDatatype() == null || dbLink.getDatatype().isNumeric()); } @Override public boolean isString(){ - return true; + return (dbLink == null || dbLink.getDatatype() == null || dbLink.getDatatype().isString()); + } + + @Override + public boolean isGeometry(){ + return (dbLink == null || dbLink.getDatatype() == null || dbLink.getDatatype().isGeometry()); } @Override diff --git a/src/adql/query/operand/ADQLOperand.java b/src/adql/query/operand/ADQLOperand.java index 61dc647cfc22f38eebf4f731deb0d0469b97a221..413546b4f505b01dea9666c93c346fdcf6118336 100644 --- a/src/adql/query/operand/ADQLOperand.java +++ b/src/adql/query/operand/ADQLOperand.java @@ -16,22 +16,42 @@ package adql.query.operand; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import adql.query.ADQLObject; /** *

Any ADQL operand (an operation, a constant, a column name, a function, ...) must implement this interface - * and indicates whether it corresponds to a numeric or a string value.

+ * and indicates whether it corresponds to a numeric, a string or a geometrical region value.

* - * @author Grégory Mantelet (CDS) - * @version 11/2010 + * @author Grégory Mantelet (CDS;ARI) + * @version 1.3 (10/2014) */ public interface ADQLOperand extends ADQLObject { + /** + * Tell whether this operand is numeric or not. + * + * @return true if this operand is numeric, false otherwise. + */ public boolean isNumeric(); + /** + * Tell whether this operand is a string or not. + * + * @return true if this operand is a string, false otherwise. + */ public boolean isString(); + /** + * Tell whether this operand is a geometrical region or not. + * + * @return true if this operand is a geometry, false otherwise. + * + * @since 1.3 + */ + public boolean isGeometry(); + } diff --git a/src/adql/query/operand/Concatenation.java b/src/adql/query/operand/Concatenation.java index a09b7905adc6e885cb0af492d19ef8a05e990782..24536c72cfafd754bc359a462ca7afe757b62dfc 100644 --- a/src/adql/query/operand/Concatenation.java +++ b/src/adql/query/operand/Concatenation.java @@ -16,7 +16,8 @@ package adql.query.operand; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import adql.query.ADQLList; @@ -25,8 +26,8 @@ import adql.query.ADQLObject; /** * Represents a concatenation in ADQL (ex: "_s_ra" || ':' || "_s_dec"). * - * @author Grégory Mantelet (CDS) - * @version 11/2010 + * @author Grégory Mantelet (CDS;ARI) + * @version 1.3 (10/2014) */ public final class Concatenation extends ADQLList implements ADQLOperand { @@ -65,12 +66,19 @@ public final class Concatenation extends ADQLList implements ADQLOp return "||"; } + @Override public final boolean isNumeric(){ return false; } + @Override public final boolean isString(){ return true; } + @Override + public final boolean isGeometry(){ + return false; + } + } \ No newline at end of file diff --git a/src/adql/query/operand/NegativeOperand.java b/src/adql/query/operand/NegativeOperand.java index 39bb608d70aafebdedc5fa6c6e64a57373ad244d..7b4bc5901867a65e6e4a0a92f355f15617dffeda 100644 --- a/src/adql/query/operand/NegativeOperand.java +++ b/src/adql/query/operand/NegativeOperand.java @@ -16,8 +16,8 @@ package adql.query.operand; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.util.NoSuchElementException; @@ -30,13 +30,14 @@ import adql.query.TextPosition; * Lets putting a minus sign in front of any numeric operand. * * @author Grégory Mantelet (CDS;ARI) - * @version 05/2014 + * @version 1.4 (06/2015) */ public final class NegativeOperand implements ADQLOperand { /** The negativated operand. */ private ADQLOperand operand; - /** Position of this operand. */ + /** Position of this operand. + * @since 1.4 */ private TextPosition position = null; /** @@ -93,12 +94,20 @@ public final class NegativeOperand implements ADQLOperand { * Sets the position at which this {@link NegativeOperand} has been found in the original ADQL query string. * * @param pos Position of this {@link NegativeOperand}. - * @since 1.3 + * @since 1.4 */ public final void setPosition(final TextPosition position){ this.position = position; } + /** Always returns false. + * @see adql.query.operand.ADQLOperand#isGeometry() + */ + @Override + public final boolean isGeometry(){ + return false; + } + @Override public ADQLObject getCopy() throws Exception{ NegativeOperand copy = new NegativeOperand((ADQLOperand)operand.getCopy()); diff --git a/src/adql/query/operand/NumericConstant.java b/src/adql/query/operand/NumericConstant.java index a0c93fd6a4d9a97f1280c2b7b982f9adede7f7b2..53e4937cf1f7bbff1a48c3939d6c4a7e3ad1c0a8 100644 --- a/src/adql/query/operand/NumericConstant.java +++ b/src/adql/query/operand/NumericConstant.java @@ -16,8 +16,8 @@ package adql.query.operand; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import adql.query.ADQLIterator; @@ -29,12 +29,14 @@ import adql.query.TextPosition; * A numeric (integer, double, ...) constant. * * @author Grégory Mantelet (CDS;ARI) - * @version 05/2014 + * @version 1.4 (06/2015) */ public final class NumericConstant implements ADQLOperand { private String value; - /** Position of this operand. */ + + /** Position of this operand. + * @since 1.4 */ private TextPosition position = null; /** @@ -175,12 +177,20 @@ public final class NumericConstant implements ADQLOperand { * Sets the position at which this {@link NumericConstant} has been found in the original ADQL query string. * * @param pos Position of this {@link NumericConstant}. - * @since 1.3 + * @since 1.4 */ public final void setPosition(final TextPosition position){ this.position = position; } + /** Always returns false. + * @see adql.query.operand.ADQLOperand#isGeometry() + */ + @Override + public final boolean isGeometry(){ + return false; + } + @Override public ADQLObject getCopy(){ return new NumericConstant(this); diff --git a/src/adql/query/operand/Operation.java b/src/adql/query/operand/Operation.java index 09daee20eb2381e060c21e13ac0424497b6a8fa1..1418ae1cce1ea4784c4ba47c3d2e2255018d06fa 100644 --- a/src/adql/query/operand/Operation.java +++ b/src/adql/query/operand/Operation.java @@ -16,8 +16,8 @@ package adql.query.operand; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.util.NoSuchElementException; @@ -30,7 +30,7 @@ import adql.query.TextPosition; * It represents a simple numeric operation (sum, difference, multiplication and division). * * @author Grégory Mantelet (CDS;ARI) - * @version 05/2014 + * @version 1.4 (06/2015) * * @see OperationType */ @@ -48,7 +48,8 @@ public class Operation implements ADQLOperand { /** Part of the operation at the right of the operator. */ private ADQLOperand rightOperand; - /** Position of the operation in the ADQL query string. */ + /** Position of the operation in the ADQL query string. + * @since 1.4 */ private TextPosition position = null; /** @@ -194,12 +195,20 @@ public class Operation implements ADQLOperand { * Sets the position at which this {@link WrappedOperand} has been found in the original ADQL query string. * * @param pos Position of this {@link WrappedOperand}. - * @since 1.3 + * @since 1.4 */ public final void setPosition(final TextPosition position){ this.position = position; } + /** Always returns false. + * @see adql.query.operand.ADQLOperand#isGeometry() + */ + @Override + public final boolean isGeometry(){ + return false; + } + @Override public ADQLObject getCopy() throws Exception{ return new Operation(this); @@ -269,4 +278,4 @@ public class Operation implements ADQLOperand { return leftOperand.toADQL() + operation.toADQL() + rightOperand.toADQL(); } -} \ No newline at end of file +} diff --git a/src/adql/query/operand/StringConstant.java b/src/adql/query/operand/StringConstant.java index 1551912c630a075667c760dea6c36ab3b948d143..998bda9057ee2e1d85d6202dfd9f8e14ff2fe458 100644 --- a/src/adql/query/operand/StringConstant.java +++ b/src/adql/query/operand/StringConstant.java @@ -16,8 +16,8 @@ package adql.query.operand; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import adql.query.ADQLIterator; @@ -29,12 +29,14 @@ import adql.query.TextPosition; * A string constant. * * @author Grégory Mantelet (CDS;ARI) - * @version 05/2014 + * @version 1.4 (06/2015) */ public final class StringConstant implements ADQLOperand { private String value; - /** Position of this operand. */ + + /** Position of this operand. + * @since 1.4 */ private TextPosition position = null; public StringConstant(String value){ @@ -72,12 +74,17 @@ public final class StringConstant implements ADQLOperand { * Sets the position at which this {@link StringConstant} has been found in the original ADQL query string. * * @param pos Position of this {@link StringConstant}. - * @since 1.3 + * @since 1.4 */ public final void setPosition(final TextPosition position){ this.position = position; } + @Override + public final boolean isGeometry(){ + return false; + } + @Override public ADQLObject getCopy(){ return new StringConstant(this); @@ -85,7 +92,7 @@ public final class StringConstant implements ADQLOperand { @Override public String getName(){ - return "'" + value + "'"; + return toADQL(); } @Override @@ -95,7 +102,7 @@ public final class StringConstant implements ADQLOperand { @Override public String toADQL(){ - return "'" + value + "'"; + return "'" + value.replaceAll("'", "''") + "'"; } } diff --git a/src/adql/query/operand/UnknownType.java b/src/adql/query/operand/UnknownType.java new file mode 100644 index 0000000000000000000000000000000000000000..bae9d9bb728c2aa5947dced16dd86d73fb446f86 --- /dev/null +++ b/src/adql/query/operand/UnknownType.java @@ -0,0 +1,52 @@ +package adql.query.operand; + +/* + * This file is part of ADQLLibrary. + * + * ADQLLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * ADQLLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with ADQLLibrary. If not, see . + * + * Copyright 2014 - Astronomisches Rechen Institut (ARI) + */ + +import adql.query.operand.function.UserDefinedFunction; + +/** + *

Operand whose the type can not be known at the parsing time. + * A post-parsing step with column metadata is needed to resolved their types.

+ * + *

Note: + * For the moment, only two operands are concerned: columns ({@link ADQLColumn}) and user defined functions ({@link UserDefinedFunction}). + *

+ * + * @author Grégory Mantelet (ARI) + * @version 1.3 (10/2014) + * @since 1.3 + */ +public interface UnknownType extends ADQLOperand { + + /** + * Get the type expected by the syntactic parser according to the context. + * + * @return Expected type: 'n' or 'N' for numeric, 's' or 'S' for string, 'g' or 'G' for geometry. + */ + public char getExpectedType(); + + /** + * Set the type expected for this operand. + * + * @param c Expected type: 'n' or 'N' for numeric, 's' or 'S' for string, 'g' or 'G' for geometry. + */ + public void setExpectedType(final char c); + +} diff --git a/src/adql/query/operand/WrappedOperand.java b/src/adql/query/operand/WrappedOperand.java index 92ab7bfaae0ca5b6c2d354c2f2a6e7cf74710657..ca36d7a03793796702ddd36c38aaa9e843751c47 100644 --- a/src/adql/query/operand/WrappedOperand.java +++ b/src/adql/query/operand/WrappedOperand.java @@ -16,8 +16,8 @@ package adql.query.operand; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.util.NoSuchElementException; @@ -30,13 +30,14 @@ import adql.query.TextPosition; * Lets wrapping an operand by parenthesis. * * @author Grégory Mantelet (CDS;ARI) - * @version 05/2014 + * @version 1.4 (06/2015) */ public class WrappedOperand implements ADQLOperand { /** The wrapped operand. */ private ADQLOperand operand; - /** Position of this operand. */ + /** Position of this operand. + * @since 1.4 */ private TextPosition position = null; /** @@ -80,12 +81,17 @@ public class WrappedOperand implements ADQLOperand { * Sets the position at which this {@link WrappedOperand} has been found in the original ADQL query string. * * @param pos Position of this {@link WrappedOperand}. - * @since 1.3 + * @since 1.4 */ public final void setPosition(final TextPosition position){ this.position = position; } + @Override + public final boolean isGeometry(){ + return operand.isGeometry(); + } + @Override public ADQLObject getCopy() throws Exception{ return new WrappedOperand((ADQLOperand)operand.getCopy()); diff --git a/src/adql/query/operand/function/ADQLFunction.java b/src/adql/query/operand/function/ADQLFunction.java index ed9dac72ba6c33844257dc0c834b5596388e4c80..b22ec5611baee8e66c3a175aa7269cf9296758ca 100644 --- a/src/adql/query/operand/function/ADQLFunction.java +++ b/src/adql/query/operand/function/ADQLFunction.java @@ -16,8 +16,8 @@ package adql.query.operand.function; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012-2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institute (ARI) */ import java.util.Iterator; @@ -32,12 +32,12 @@ import adql.query.operand.ADQLOperand; * Represents any kind of function. * * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public abstract class ADQLFunction implements ADQLOperand { /** Position of this {@link ADQLFunction} in the ADQL query string. - * @since 1.3 */ + * @since 1.4 */ private TextPosition position = null; @Override @@ -49,7 +49,7 @@ public abstract class ADQLFunction implements ADQLOperand { * Set the position of this {@link ADQLFunction} in the ADQL query string. * * @param position New position of this {@link ADQLFunction} - * @since 1.3 + * @since 1.4 */ public final void setPosition(final TextPosition position){ this.position = position; diff --git a/src/adql/query/operand/function/DefaultUDF.java b/src/adql/query/operand/function/DefaultUDF.java index 2ab08bdcdfbb9aece9ffbee4c3888914a3c97f3f..268de91d048012fd25f27f2b7e1d0e722404a155 100644 --- a/src/adql/query/operand/function/DefaultUDF.java +++ b/src/adql/query/operand/function/DefaultUDF.java @@ -16,33 +16,42 @@ package adql.query.operand.function; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012-2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), Astronomisches Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ +import adql.db.FunctionDef; import adql.query.ADQLList; import adql.query.ADQLObject; import adql.query.ClauseADQL; import adql.query.TextPosition; import adql.query.operand.ADQLOperand; +import adql.translator.ADQLTranslator; +import adql.translator.TranslationException; /** * It represents any function which is not managed by ADQL. * * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public final class DefaultUDF extends UserDefinedFunction { - /** Its parameters. */ + /** Define/Describe this user defined function. + * This object gives the return type and the number and type of all parameters. */ + protected FunctionDef definition = null; + + /** Its parsed parameters. */ protected final ADQLList parameters; + /** Parsed name of this UDF. */ protected final String functionName; /** * Creates a user function. * @param params Parameters of the function. */ - public DefaultUDF(final String name, ADQLOperand[] params) throws NullPointerException{ + public DefaultUDF(final String name, final ADQLOperand[] params) throws NullPointerException{ functionName = name; parameters = new ClauseADQL(); if (params != null){ @@ -58,25 +67,64 @@ public final class DefaultUDF extends UserDefinedFunction { * @throws Exception If there is an error during the copy. */ @SuppressWarnings("unchecked") - public DefaultUDF(DefaultUDF toCopy) throws Exception{ + public DefaultUDF(final DefaultUDF toCopy) throws Exception{ functionName = toCopy.functionName; parameters = (ADQLList)(toCopy.parameters.getCopy()); setPosition((toCopy.getPosition() == null) ? null : new TextPosition(toCopy.getPosition()));; } + /** + * Get the signature/definition/description of this user defined function. + * The returned object provides information on the return type and the number and type of parameters. + * + * @return Definition of this function. (MAY be NULL) + */ + public final FunctionDef getDefinition(){ + return definition; + } + + /** + *

Let set the signature/definition/description of this user defined function.

+ * + *

IMPORTANT: + * No particular checks are done here except on the function name which MUST + * be the same (case insensitive) as the name of the given definition. + * Advanced checks must have been done before calling this setter. + *

+ * + * @param def The definition applying to this parsed UDF, or NULL if none has been found. + * + * @throws IllegalArgumentException If the name in the given definition does not match the name of this parsed function. + * + * @since 1.3 + */ + public final void setDefinition(final FunctionDef def) throws IllegalArgumentException{ + if (def != null && (def.name == null || !functionName.equalsIgnoreCase(def.name))) + throw new IllegalArgumentException("The parsed function name (" + functionName + ") does not match to the name of the given UDF definition (" + def.name + ")."); + + this.definition = def; + } + @Override public final boolean isNumeric(){ - return true; + return (definition == null || definition.isNumeric()); } @Override public final boolean isString(){ - return true; + return (definition == null || definition.isString()); + } + + @Override + public final boolean isGeometry(){ + return (definition == null || definition.isGeometry()); } @Override public ADQLObject getCopy() throws Exception{ - return new DefaultUDF(this); + DefaultUDF copy = new DefaultUDF(this); + copy.setDefinition(definition); + return copy; } @Override @@ -115,4 +163,17 @@ public final class DefaultUDF extends UserDefinedFunction { return oldParam; } + @Override + public String translate(final ADQLTranslator caller) throws TranslationException{ + StringBuffer sql = new StringBuffer(functionName); + sql.append('('); + for(int i = 0; i < parameters.size(); i++){ + if (i > 0) + sql.append(',').append(' '); + sql.append(caller.translate(parameters.get(i))); + } + sql.append(')'); + return sql.toString(); + } + } diff --git a/src/adql/query/operand/function/MathFunction.java b/src/adql/query/operand/function/MathFunction.java index 88e06e06d171d19ef2ff0bea28cc38f50ef6c484..49e1a4a675e436658378ac736e4cf919057784f1 100644 --- a/src/adql/query/operand/function/MathFunction.java +++ b/src/adql/query/operand/function/MathFunction.java @@ -16,8 +16,8 @@ package adql.query.operand.function; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012-2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomisches Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import adql.query.ADQLObject; @@ -28,7 +28,7 @@ import adql.query.operand.ADQLOperand; * It represents any basic mathematical function. * * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) * * @see MathFunctionType */ @@ -151,6 +151,11 @@ public class MathFunction extends ADQLFunction { return false; } + @Override + public final boolean isGeometry(){ + return false; + } + @Override public ADQLOperand[] getParameters(){ switch(getNbParameters()){ diff --git a/src/adql/query/operand/function/SQLFunction.java b/src/adql/query/operand/function/SQLFunction.java index 588eba8ceef3f3154697c76cb8ac754d0f2e53e6..5d0748e62f47182eb86a965e29c673c60dc43181 100644 --- a/src/adql/query/operand/function/SQLFunction.java +++ b/src/adql/query/operand/function/SQLFunction.java @@ -16,8 +16,8 @@ package adql.query.operand.function; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2011,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2011-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institute (ARI) */ import adql.query.ADQLObject; @@ -28,7 +28,7 @@ import adql.query.operand.ADQLOperand; * It represents any SQL function (COUNT, MAX, MIN, AVG, SUM, etc...). * * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) * * @see SQLFunctionType */ @@ -139,6 +139,11 @@ public class SQLFunction extends ADQLFunction { return false; } + @Override + public final boolean isGeometry(){ + return false; + } + @Override public ADQLOperand[] getParameters(){ if (param != null) @@ -183,4 +188,4 @@ public class SQLFunction extends ADQLFunction { return getName() + "(" + (distinct ? "DISTINCT " : "") + param.toADQL() + ")"; } -} \ No newline at end of file +} diff --git a/src/adql/query/operand/function/UserDefinedFunction.java b/src/adql/query/operand/function/UserDefinedFunction.java index 39d87784e8a0c71fa26e6774277054eee98bb4f5..53c2474b85319c647cc87ec09c11d9ad1d92925f 100644 --- a/src/adql/query/operand/function/UserDefinedFunction.java +++ b/src/adql/query/operand/function/UserDefinedFunction.java @@ -16,17 +16,68 @@ package adql.query.operand.function; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ +import adql.query.operand.UnknownType; +import adql.translator.ADQLTranslator; +import adql.translator.TranslationException; + /** * Function defined by the user (i.e. PSQL functions). * - * @author Grégory Mantelet (CDS) - * @version 01/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 1.3 (02/2015) * * @see DefaultUDF */ -public abstract class UserDefinedFunction extends ADQLFunction { +public abstract class UserDefinedFunction extends ADQLFunction implements UnknownType { + + /** Type expected by the parser. + * @since 1.3 */ + private char expectedType = '?'; + + @Override + public char getExpectedType(){ + return expectedType; + } + + @Override + public void setExpectedType(final char c){ + expectedType = c; + } + + /** + *

Translate this User Defined Function into the language supported by the given translator.

+ * + *

VERY IMPORTANT: This function MUST NOT use {@link ADQLTranslator#translate(UserDefinedFunction)} to translate itself. + * The given {@link ADQLTranslator} must be used ONLY to translate UDF's operands.

+ * + *

Implementation example (extract of {@link DefaultUDF#translate(ADQLTranslator)}):

+ *
+	 * public String translate(final ADQLTranslator caller) throws TranslationException{
+	 * 	StringBuffer sql = new StringBuffer(functionName);
+	 * 	sql.append('(');
+	 * 	for(int i = 0; i < parameters.size(); i++){
+	 *		if (i > 0)
+	 *			sql.append(',').append(' ');
+	 * 		sql.append(caller.translate(parameters.get(i)));
+	 *	}
+	 *	sql.append(')');
+	 *	return sql.toString();
+	 * }
+	 * 
+ * + * + * @param caller Translator to use in order to translate ONLY function parameters. + * + * @return The translation of this UDF into the language supported by the given translator. + * + * @throws TranslationException If one of the parameters can not be translated. + * + * @since 1.3 + */ + public abstract String translate(final ADQLTranslator caller) throws TranslationException; } diff --git a/src/adql/query/operand/function/geometry/AreaFunction.java b/src/adql/query/operand/function/geometry/AreaFunction.java index aa59e83a0d08dcf4fe51f30bf7e1776726822c42..bb9003d5484091d54efde436410ce6c866f7c9b6 100644 --- a/src/adql/query/operand/function/geometry/AreaFunction.java +++ b/src/adql/query/operand/function/geometry/AreaFunction.java @@ -16,8 +16,8 @@ package adql.query.operand.function.geometry; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import adql.query.ADQLObject; @@ -34,7 +34,7 @@ import adql.query.operand.ADQLOperand; *

Inappropriate geometries for this construct (e.g. POINT) SHOULD either return zero or throw an error message. This choice must be done in an extended class of {@link AreaFunction}.

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public class AreaFunction extends GeometryFunction { @@ -108,6 +108,11 @@ public class AreaFunction extends GeometryFunction { return false; } + @Override + public boolean isGeometry(){ + return false; + } + @Override public ADQLOperand[] getParameters(){ return new ADQLOperand[]{parameter.getValue()}; @@ -147,4 +152,4 @@ public class AreaFunction extends GeometryFunction { throw new ArrayIndexOutOfBoundsException("No " + index + "-th parameter for the function \"" + getName() + "\" !"); } -} \ No newline at end of file +} diff --git a/src/adql/query/operand/function/geometry/BoxFunction.java b/src/adql/query/operand/function/geometry/BoxFunction.java index 485c0982b8f483ceaf31a56574eb7261eb1e591d..7585fd67b13412b1548c24e41e9e288e8d2054f5 100644 --- a/src/adql/query/operand/function/geometry/BoxFunction.java +++ b/src/adql/query/operand/function/geometry/BoxFunction.java @@ -16,8 +16,8 @@ package adql.query.operand.function.geometry; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import adql.query.ADQLObject; @@ -38,9 +38,8 @@ import adql.query.operand.ADQLOperand; * In this second example the coordinates of the center position are extracted from a coordinate's column reference. *

* - * * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public class BoxFunction extends GeometryFunction { @@ -110,6 +109,11 @@ public class BoxFunction extends GeometryFunction { @Override public boolean isString(){ + return false; + } + + @Override + public boolean isGeometry(){ return true; } diff --git a/src/adql/query/operand/function/geometry/CentroidFunction.java b/src/adql/query/operand/function/geometry/CentroidFunction.java index 8ef5af3e2021a1a4243b2b6d35b2488547d4d015..23a2b7852972ed5df64646e54739db87b29842b0 100644 --- a/src/adql/query/operand/function/geometry/CentroidFunction.java +++ b/src/adql/query/operand/function/geometry/CentroidFunction.java @@ -16,8 +16,8 @@ package adql.query.operand.function.geometry; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import adql.query.ADQLObject; @@ -36,7 +36,7 @@ import adql.query.operand.ADQLOperand; *

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public class CentroidFunction extends GeometryFunction { @@ -88,6 +88,11 @@ public class CentroidFunction extends GeometryFunction { return false; } + @Override + public boolean isGeometry(){ + return false; + } + @Override public ADQLOperand[] getParameters(){ return new ADQLOperand[]{parameter.getValue()}; @@ -127,4 +132,4 @@ public class CentroidFunction extends GeometryFunction { throw new ArrayIndexOutOfBoundsException("No " + index + "-th parameter for the function \"" + getName() + "\" !"); } -} \ No newline at end of file +} diff --git a/src/adql/query/operand/function/geometry/CircleFunction.java b/src/adql/query/operand/function/geometry/CircleFunction.java index aa0efdb8fa7d4d1cb34e11fda2ec9f4140218ac8..3d3dc6bf8e6a51ba24615538d770290661a1b7bf 100644 --- a/src/adql/query/operand/function/geometry/CircleFunction.java +++ b/src/adql/query/operand/function/geometry/CircleFunction.java @@ -16,8 +16,8 @@ package adql.query.operand.function.geometry; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import adql.query.ADQLObject; @@ -36,7 +36,7 @@ import adql.query.operand.ADQLOperand; *

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public class CircleFunction extends GeometryFunction { @@ -100,6 +100,11 @@ public class CircleFunction extends GeometryFunction { @Override public boolean isString(){ + return false; + } + + @Override + public boolean isGeometry(){ return true; } diff --git a/src/adql/query/operand/function/geometry/ContainsFunction.java b/src/adql/query/operand/function/geometry/ContainsFunction.java index 0a99325409a80733f87cfcccc34fa2ca76714305..e26fa97593863abc8242f5cea3f805e5e6d56db0 100644 --- a/src/adql/query/operand/function/geometry/ContainsFunction.java +++ b/src/adql/query/operand/function/geometry/ContainsFunction.java @@ -16,8 +16,8 @@ package adql.query.operand.function.geometry; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import adql.query.ADQLObject; @@ -42,7 +42,7 @@ import adql.query.operand.ADQLOperand; *

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public class ContainsFunction extends GeometryFunction { @@ -101,6 +101,11 @@ public class ContainsFunction extends GeometryFunction { return false; } + @Override + public boolean isGeometry(){ + return false; + } + /** * @return The leftParam. */ @@ -186,4 +191,4 @@ public class ContainsFunction extends GeometryFunction { return replaced; } -} \ No newline at end of file +} diff --git a/src/adql/query/operand/function/geometry/DistanceFunction.java b/src/adql/query/operand/function/geometry/DistanceFunction.java index f7aa8896733821b8037d15def7a5c9d1617304da..fb49a9353f98ffe6730e97bd451ef978458758d5 100644 --- a/src/adql/query/operand/function/geometry/DistanceFunction.java +++ b/src/adql/query/operand/function/geometry/DistanceFunction.java @@ -16,8 +16,8 @@ package adql.query.operand.function.geometry; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import adql.query.ADQLObject; @@ -35,7 +35,7 @@ import adql.query.operand.ADQLOperand; * coordinate system with GEOCENTER reference position.

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public class DistanceFunction extends GeometryFunction { @@ -99,6 +99,11 @@ public class DistanceFunction extends GeometryFunction { return false; } + @Override + public boolean isGeometry(){ + return false; + } + /** * Gets the first point. * @@ -200,4 +205,4 @@ public class DistanceFunction extends GeometryFunction { return replaced; } -} \ No newline at end of file +} diff --git a/src/adql/query/operand/function/geometry/ExtractCoord.java b/src/adql/query/operand/function/geometry/ExtractCoord.java index 570b79a96be8573cb02e2ced79dc536ae7ba1698..edff24aeb91c4bdce913d1d190856a8b7188355d 100644 --- a/src/adql/query/operand/function/geometry/ExtractCoord.java +++ b/src/adql/query/operand/function/geometry/ExtractCoord.java @@ -16,8 +16,8 @@ package adql.query.operand.function.geometry; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import adql.query.ADQLObject; @@ -36,7 +36,7 @@ import adql.query.operand.ADQLOperand; *

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public class ExtractCoord extends GeometryFunction { @@ -98,6 +98,11 @@ public class ExtractCoord extends GeometryFunction { return false; } + @Override + public boolean isGeometry(){ + return false; + } + @Override public ADQLOperand[] getParameters(){ return new ADQLOperand[]{point.getValue()}; @@ -137,4 +142,4 @@ public class ExtractCoord extends GeometryFunction { throw new ArrayIndexOutOfBoundsException("No " + index + "-th parameter for the function \"" + getName() + "\" !"); } -} \ No newline at end of file +} diff --git a/src/adql/query/operand/function/geometry/ExtractCoordSys.java b/src/adql/query/operand/function/geometry/ExtractCoordSys.java index 15cfa30ac9e49584e288ba61c522d868422823dc..3865c2df8799ed3d15fd6de37e26206416ef09dd 100644 --- a/src/adql/query/operand/function/geometry/ExtractCoordSys.java +++ b/src/adql/query/operand/function/geometry/ExtractCoordSys.java @@ -16,8 +16,8 @@ package adql.query.operand.function.geometry; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import adql.query.ADQLObject; @@ -36,7 +36,7 @@ import adql.query.operand.ADQLOperand; *

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public class ExtractCoordSys extends GeometryFunction { @@ -85,6 +85,11 @@ public class ExtractCoordSys extends GeometryFunction { return true; } + @Override + public boolean isGeometry(){ + return false; + } + @Override public ADQLOperand[] getParameters(){ return new ADQLOperand[]{geomExpr.getValue()}; @@ -124,4 +129,4 @@ public class ExtractCoordSys extends GeometryFunction { throw new ArrayIndexOutOfBoundsException("No " + index + "-th parameter for the function " + getName() + " !"); } -} \ No newline at end of file +} diff --git a/src/adql/query/operand/function/geometry/GeometryFunction.java b/src/adql/query/operand/function/geometry/GeometryFunction.java index 7eb29e0a19711fe00856b9cda1013605753538bd..18920016ddc3ed18cfac41030f30ee0387fbbe22 100644 --- a/src/adql/query/operand/function/geometry/GeometryFunction.java +++ b/src/adql/query/operand/function/geometry/GeometryFunction.java @@ -16,22 +16,24 @@ package adql.query.operand.function.geometry; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ +import adql.parser.ParseException; import adql.query.ADQLIterator; import adql.query.ADQLObject; import adql.query.TextPosition; import adql.query.operand.ADQLColumn; import adql.query.operand.ADQLOperand; +import adql.query.operand.StringConstant; import adql.query.operand.function.ADQLFunction; /** *

It represents any geometric function of ADQL.

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public abstract class GeometryFunction extends ADQLFunction { @@ -83,13 +85,13 @@ public abstract class GeometryFunction extends ADQLFunction { * @param coordSys Its new coordinate system. * @throws UnsupportedOperationException If this function is not associated with a coordinate system. * @throws NullPointerException If the given operand is null. - * @throws Exception If the given operand is not a string. + * @throws ParseException If the given operand is not a string. */ - public void setCoordinateSystem(ADQLOperand coordSys) throws UnsupportedOperationException, NullPointerException, Exception{ + public void setCoordinateSystem(ADQLOperand coordSys) throws UnsupportedOperationException, NullPointerException, ParseException{ if (coordSys == null) - throw new NullPointerException(""); + this.coordSys = new StringConstant(""); else if (!coordSys.isString()) - throw new Exception("A coordinate system must be a string literal: \"" + coordSys.toADQL() + "\" is not a string operand !"); + throw new ParseException("A coordinate system must be a string literal: \"" + coordSys.toADQL() + "\" is not a string operand!"); else{ this.coordSys = coordSys; setPosition(null); @@ -101,13 +103,13 @@ public abstract class GeometryFunction extends ADQLFunction { * which, in general, is either a GeometryFunction or a Column. * * @author Grégory Mantelet (CDS;ARI) - * @version 05/2014 + * @version 1.4 (06/2015) */ public static final class GeometryValue< F extends GeometryFunction > implements ADQLOperand { private ADQLColumn column; private F geomFunct; /** Position of this {@link GeometryValue} in the ADQL query string. - * @since 1.3 */ + * @since 1.4 */ private TextPosition position = null; public GeometryValue(ADQLColumn col) throws NullPointerException{ @@ -172,6 +174,11 @@ public abstract class GeometryFunction extends ADQLFunction { return position; } + @Override + public boolean isGeometry(){ + return getValue().isGeometry(); + } + @Override public ADQLObject getCopy() throws Exception{ return new GeometryValue(this); @@ -192,4 +199,4 @@ public abstract class GeometryFunction extends ADQLFunction { return getValue().toADQL(); } } -} \ No newline at end of file +} diff --git a/src/adql/query/operand/function/geometry/IntersectsFunction.java b/src/adql/query/operand/function/geometry/IntersectsFunction.java index 046f5e4a01304570eeb7755cb665108cc8ab0392..8b5d8b92763ebad7723e7b855b2a72bab7a03fac 100644 --- a/src/adql/query/operand/function/geometry/IntersectsFunction.java +++ b/src/adql/query/operand/function/geometry/IntersectsFunction.java @@ -16,8 +16,8 @@ package adql.query.operand.function.geometry; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import adql.query.ADQLObject; @@ -43,7 +43,7 @@ import adql.query.operand.ADQLOperand; *

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public class IntersectsFunction extends GeometryFunction { @@ -102,6 +102,11 @@ public class IntersectsFunction extends GeometryFunction { return false; } + @Override + public boolean isGeometry(){ + return false; + } + /** * @return The leftParam. */ @@ -187,4 +192,4 @@ public class IntersectsFunction extends GeometryFunction { return replaced; } -} \ No newline at end of file +} diff --git a/src/adql/query/operand/function/geometry/PointFunction.java b/src/adql/query/operand/function/geometry/PointFunction.java index 38a6b458e0d19e333a806538b22345dc5638fb26..1d5fd26a8a1af88014a475c8ed6db4fa9dee2cea 100644 --- a/src/adql/query/operand/function/geometry/PointFunction.java +++ b/src/adql/query/operand/function/geometry/PointFunction.java @@ -16,8 +16,8 @@ package adql.query.operand.function.geometry; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import adql.query.ADQLObject; @@ -35,7 +35,7 @@ import adql.query.operand.ADQLOperand; * to the ICRS coordinate system with GEOCENTER reference position.

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public class PointFunction extends GeometryFunction { @@ -59,7 +59,7 @@ public class PointFunction extends GeometryFunction { super(coordinateSystem); if (firstCoord == null || secondCoord == null) - throw new NullPointerException("The POINT function must have non-null coordinates !"); + throw new NullPointerException("The POINT function must have non-null coordinates!"); coord1 = firstCoord; coord2 = secondCoord; @@ -148,6 +148,11 @@ public class PointFunction extends GeometryFunction { @Override public boolean isString(){ + return false; + } + + @Override + public boolean isGeometry(){ return true; } diff --git a/src/adql/query/operand/function/geometry/PolygonFunction.java b/src/adql/query/operand/function/geometry/PolygonFunction.java index 3dfede4d3bf90e7e977bc4ab74913b0ba03ff055..a4d07a2549aca257cbec2d3578fa4fe247e62dcb 100644 --- a/src/adql/query/operand/function/geometry/PolygonFunction.java +++ b/src/adql/query/operand/function/geometry/PolygonFunction.java @@ -16,10 +16,11 @@ package adql.query.operand.function.geometry; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ +import java.util.Collection; import java.util.Vector; import adql.query.ADQLObject; @@ -41,7 +42,7 @@ import adql.query.operand.ADQLOperand; * according to the STC coordinate system with GEOCENTER reference position.

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public class PolygonFunction extends GeometryFunction { @@ -79,7 +80,7 @@ public class PolygonFunction extends GeometryFunction { * @throws NullPointerException If one of the parameters is null. * @throws Exception If there is another error. */ - public PolygonFunction(ADQLOperand coordSystem, Vector coords) throws UnsupportedOperationException, NullPointerException, Exception{ + public PolygonFunction(ADQLOperand coordSystem, Collection coords) throws UnsupportedOperationException, NullPointerException, Exception{ super(coordSystem); if (coords == null || coords.size() < 6) throw new NullPointerException("A POLYGON function must have at least 3 2-D coordinates !"); @@ -119,6 +120,11 @@ public class PolygonFunction extends GeometryFunction { @Override public boolean isString(){ + return false; + } + + @Override + public boolean isGeometry(){ return true; } diff --git a/src/adql/query/operand/function/geometry/RegionFunction.java b/src/adql/query/operand/function/geometry/RegionFunction.java index 404423d3e82d32d85ca83bf7fde8ec8f2830580b..30e9e00cc0c01034b06b4a3513576aeb8ab68367 100644 --- a/src/adql/query/operand/function/geometry/RegionFunction.java +++ b/src/adql/query/operand/function/geometry/RegionFunction.java @@ -16,8 +16,8 @@ package adql.query.operand.function.geometry; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomisches Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import adql.query.ADQLObject; @@ -38,7 +38,7 @@ import adql.query.operand.ADQLOperand; * Inappropriate geometries for this construct SHOULD throw an error message, to be defined by the service making use of ADQL.

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.3 (05/2014) + * @version 1.4 (06/2015) */ public class RegionFunction extends GeometryFunction { @@ -88,6 +88,11 @@ public class RegionFunction extends GeometryFunction { @Override public boolean isString(){ + return false; + } + + @Override + public boolean isGeometry(){ return true; } diff --git a/src/adql/translator/ADQLTranslator.java b/src/adql/translator/ADQLTranslator.java index 7ec6bf9ebaee0cef2fff8752813cd94847ac6753..1174f08378a043cc8306ad4938c4972c4734c03d 100644 --- a/src/adql/translator/ADQLTranslator.java +++ b/src/adql/translator/ADQLTranslator.java @@ -16,7 +16,8 @@ package adql.translator; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import adql.query.ADQLList; @@ -28,7 +29,6 @@ import adql.query.ClauseSelect; import adql.query.ColumnReference; import adql.query.SelectAllColumns; import adql.query.SelectItem; - import adql.query.constraint.ADQLConstraint; import adql.query.constraint.Between; import adql.query.constraint.Comparison; @@ -60,11 +60,11 @@ import adql.query.operand.function.geometry.DistanceFunction; import adql.query.operand.function.geometry.ExtractCoord; import adql.query.operand.function.geometry.ExtractCoordSys; import adql.query.operand.function.geometry.GeometryFunction; +import adql.query.operand.function.geometry.GeometryFunction.GeometryValue; import adql.query.operand.function.geometry.IntersectsFunction; import adql.query.operand.function.geometry.PointFunction; import adql.query.operand.function.geometry.PolygonFunction; import adql.query.operand.function.geometry.RegionFunction; -import adql.query.operand.function.geometry.GeometryFunction.GeometryValue; /** * Translates ADQL objects into any language (i.e. SQL). diff --git a/src/adql/translator/JDBCTranslator.java b/src/adql/translator/JDBCTranslator.java new file mode 100644 index 0000000000000000000000000000000000000000..0ba1b0df224e81ff6aefb7370f4a1f59026b00bb --- /dev/null +++ b/src/adql/translator/JDBCTranslator.java @@ -0,0 +1,908 @@ +package adql.translator; + +/* + * This file is part of ADQLLibrary. + * + * ADQLLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * ADQLLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with ADQLLibrary. If not, see . + * + * Copyright 2015 - Astronomisches Rechen Institut (ARI) + */ + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; + +import tap.data.DataReadException; +import adql.db.DBColumn; +import adql.db.DBTable; +import adql.db.DBType; +import adql.db.STCS.Region; +import adql.db.exception.UnresolvedJoinException; +import adql.parser.ParseException; +import adql.query.ADQLList; +import adql.query.ADQLObject; +import adql.query.ADQLOrder; +import adql.query.ADQLQuery; +import adql.query.ClauseConstraints; +import adql.query.ClauseSelect; +import adql.query.ColumnReference; +import adql.query.IdentifierField; +import adql.query.SelectAllColumns; +import adql.query.SelectItem; +import adql.query.constraint.ADQLConstraint; +import adql.query.constraint.Between; +import adql.query.constraint.Comparison; +import adql.query.constraint.ConstraintsGroup; +import adql.query.constraint.Exists; +import adql.query.constraint.In; +import adql.query.constraint.IsNull; +import adql.query.constraint.NotConstraint; +import adql.query.from.ADQLJoin; +import adql.query.from.ADQLTable; +import adql.query.from.FromContent; +import adql.query.operand.ADQLColumn; +import adql.query.operand.ADQLOperand; +import adql.query.operand.Concatenation; +import adql.query.operand.NegativeOperand; +import adql.query.operand.NumericConstant; +import adql.query.operand.Operation; +import adql.query.operand.StringConstant; +import adql.query.operand.WrappedOperand; +import adql.query.operand.function.ADQLFunction; +import adql.query.operand.function.MathFunction; +import adql.query.operand.function.SQLFunction; +import adql.query.operand.function.SQLFunctionType; +import adql.query.operand.function.UserDefinedFunction; +import adql.query.operand.function.geometry.AreaFunction; +import adql.query.operand.function.geometry.BoxFunction; +import adql.query.operand.function.geometry.CentroidFunction; +import adql.query.operand.function.geometry.CircleFunction; +import adql.query.operand.function.geometry.ContainsFunction; +import adql.query.operand.function.geometry.DistanceFunction; +import adql.query.operand.function.geometry.ExtractCoord; +import adql.query.operand.function.geometry.ExtractCoordSys; +import adql.query.operand.function.geometry.GeometryFunction; +import adql.query.operand.function.geometry.GeometryFunction.GeometryValue; +import adql.query.operand.function.geometry.IntersectsFunction; +import adql.query.operand.function.geometry.PointFunction; +import adql.query.operand.function.geometry.PolygonFunction; +import adql.query.operand.function.geometry.RegionFunction; + +/** + *

Implementation of {@link ADQLTranslator} which translates ADQL queries in SQL queries.

+ * + *

+ * It is already able to translate all SQL standard features, but lets abstract the translation of all + * geometrical functions. So, this translator must be extended as {@link PostgreSQLTranslator} and + * {@link PgSphereTranslator} are doing. + *

+ * + *

Note: + * Its default implementation of the SQL syntax has been inspired by the PostgreSQL one. + * However, it should work also with SQLite and MySQL, but some translations might be needed + * (as it is has been done for PostgreSQL about the mathematical functions). + *

+ * + *

PostgreSQLTranslator and PgSphereTranslator

+ * + *

+ * {@link PgSphereTranslator} extends {@link PostgreSQLTranslator} and is able to translate geometrical + * functions according to the syntax given by PgSphere. But it can also convert geometrical types + * (from and toward the database), translate PgSphere regions into STC expression and vice-versa. + *

+ * + *

+ * {@link PostgreSQLTranslator} overwrites the translation of mathematical functions whose some have + * a different name or signature. Besides, it is also implementing the translation of the geometrical + * functions. However, it does not really translate them. It is just returning the ADQL expression + * (by calling {@link #getDefaultADQLFunction(ADQLFunction)}). + * And so, of course, the execution of a SQL query containing geometrical functions and translated + * using this translator will not work. It is just a default implementation in case there is no interest + * of these geometrical functions. + *

+ * + *

SQL with or without case sensitivity?

+ * + *

+ * In ADQL and in SQL, it is possible to tell the parser to respect the exact case or not of an identifier (schema, table or column name) + * by surrounding it with double quotes. However ADQL identifiers and SQL ones may be different. In that way, the case sensitivity specified + * in ADQL on the different identifiers can not be kept in SQL. That's why this translator lets specify a general rule on which types of + * SQL identifier must be double quoted. This can be done by implementing the abstract function {@link #isCaseSensitive(IdentifierField)}. + * The functions translating column and table names will call this function in order to surround the identifiers by double quotes or not. + * So, be careful if you want to override the functions translating columns and tables! + *

+ * + *

Translation of "SELECT TOP"

+ * + *

+ * The default behavior of this translator is to translate the ADQL "TOP" into the SQL "LIMIT" at the end of the query. + * This is ok for some DBMS, but not all. So, if your DBMS does not know the "LIMIT" keyword, you should override the function + * translating the whole query: {@link #translate(ADQLQuery)}. Here is its current implementation: + *

+ *
+ * 	StringBuffer sql = new StringBuffer(translate(query.getSelect()));
+ * 	sql.append("\nFROM ").append(translate(query.getFrom()));
+ *	if (!query.getWhere().isEmpty())
+ *		sql.append('\n').append(translate(query.getWhere()));
+ *	if (!query.getGroupBy().isEmpty())
+ *		sql.append('\n').append(translate(query.getGroupBy()));
+ *	if (!query.getHaving().isEmpty())
+ *		sql.append('\n').append(translate(query.getHaving()));
+ *	if (!query.getOrderBy().isEmpty())
+ *		sql.append('\n').append(translate(query.getOrderBy()));
+ *	if (query.getSelect().hasLimit())
+ *		sql.append("\nLimit ").append(query.getSelect().getLimit());
+ *	return sql.toString();
+ * 
+ * + *

Translation of ADQL functions

+ * + *

+ * All ADQL functions are by default not translated. Consequently, the SQL translation is + * actually the ADQL expression. Generally the ADQL expression is generic enough. However some mathematical functions may need + * to be translated differently. For instance {@link PostgreSQLTranslator} is translating differently: LOG, LOG10, RAND and TRUNC. + *

+ * + *

Note: + * Geometrical regions and types have not been managed here. They stay abstract because it is obviously impossible to have a generic + * translation and conversion ; it totally depends from the database system. + *

+ * + *

Translation of "FROM" with JOINs

+ * + *

+ * The FROM clause is translated into SQL as written in ADQL. There is no differences except the identifiers that are replaced. + * The tables' aliases and their case sensitivity are kept like in ADQL. + *

+ * + * @author Grégory Mantelet (ARI) + * @version 1.3 (05/2015) + * @since 1.3 + * + * @see PostgreSQLTranslator + * @see PgSphereTranslator + */ +public abstract class JDBCTranslator implements ADQLTranslator { + + /** + *

Tell whether the specified identifier MUST be translated so that being interpreted case sensitively or not. + * By default, an identifier that must be translated with case sensitivity will be surrounded by double quotes. + * But, if this function returns FALSE, the SQL name will be written just as given in the metadata, without double quotes.

+ * + *

WARNING: + * An {@link IdentifierField} object can be a SCHEMA, TABLE, COLUMN and ALIAS. However, in this translator, + * aliases are translated like in ADQL (so, with the same case sensitivity specification as in ADQL). + * So, this function will never be used to know the case sensitivity to apply to an alias. It is then + * useless to write a special behavior for the ALIAS value. + *

+ * + * @param field The identifier whose the case sensitive to apply is asked. + * + * @return true if the specified identifier must be translated case sensitivity, false otherwise (included if ALIAS or NULL). + */ + public abstract boolean isCaseSensitive(final IdentifierField field); + + /** + *

Get the qualified DB name of the schema containing the given table.

+ * + *

Note: + * This function will, by default, add double quotes if the schema name must be case sensitive in the SQL query. + * This information is provided by {@link #isCaseSensitive(IdentifierField)}. + *

+ * + * @param table A table of the schema whose the qualified DB name is asked. + * + * @return The qualified (with DB catalog name prefix if any, and with double quotes if needed) DB schema name, + * or an empty string if there is no schema or no DB name. + */ + public String getQualifiedSchemaName(final DBTable table){ + if (table == null || table.getDBSchemaName() == null) + return ""; + + StringBuffer buf = new StringBuffer(); + + if (table.getDBCatalogName() != null) + appendIdentifier(buf, table.getDBCatalogName(), IdentifierField.CATALOG).append('.'); + + appendIdentifier(buf, table.getDBSchemaName(), IdentifierField.SCHEMA); + + return buf.toString(); + } + + /** + *

Get the qualified DB name of the given table.

+ * + *

Note: + * This function will, by default, add double quotes if the table name must be case sensitive in the SQL query. + * This information is provided by {@link #isCaseSensitive(IdentifierField)}. + *

+ * + * @param table The table whose the qualified DB name is asked. + * + * @return The qualified (with DB catalog and schema prefix if any, and with double quotes if needed) DB table name, + * or an empty string if the given table is NULL or if there is no DB name. + * + * @see #getTableName(DBTable, boolean) + */ + public String getQualifiedTableName(final DBTable table){ + return getTableName(table, true); + } + + /** + *

Get the DB name of the given table. + * The second parameter lets specify whether the table name must be prefixed by the qualified schema name or not.

+ * + *

Note: + * This function will, by default, add double quotes if the table name must be case sensitive in the SQL query. + * This information is provided by {@link #isCaseSensitive(IdentifierField)}. + *

+ * + * @param table The table whose the DB name is asked. + * @param withSchema true if the qualified schema name must prefix the table name, false otherwise. + * + * @return The DB table name (prefixed by the qualified schema name if asked, and with double quotes if needed), + * or an empty string if the given table is NULL or if there is no DB name. + * + * @since 2.0 + */ + public String getTableName(final DBTable table, final boolean withSchema){ + if (table == null) + return ""; + + StringBuffer buf = new StringBuffer(); + if (withSchema){ + buf.append(getQualifiedSchemaName(table)); + if (buf.length() > 0) + buf.append('.'); + } + appendIdentifier(buf, table.getDBName(), IdentifierField.TABLE); + + return buf.toString(); + } + + /** + *

Get the DB name of the given column

+ * + *

Note: + * This function will, by default, add double quotes if the column name must be case sensitive in the SQL query. + * This information is provided by {@link #isCaseSensitive(IdentifierField)}. + *

+ * + *

Caution: + * The given column may be NULL and in this case an empty string will be returned. + * But if the given column is not NULL, its DB name MUST NOT BE NULL! + *

+ * + * @param column The column whose the DB name is asked. + * + * @return The DB column name (with double quotes if needed), + * or an empty string if the given column is NULL. + */ + public String getColumnName(final DBColumn column){ + return (column == null) ? "" : appendIdentifier(new StringBuffer(), column.getDBName(), IdentifierField.COLUMN).toString(); + } + + /** + * Appends the given identifier in the given StringBuffer. + * + * @param str The string buffer. + * @param id The identifier to append. + * @param field The type of identifier (column, table, schema, catalog or alias ?). + * + * @return The string buffer + identifier. + */ + public final StringBuffer appendIdentifier(final StringBuffer str, final String id, final IdentifierField field){ + return appendIdentifier(str, id, isCaseSensitive(field)); + } + + /** + * Appends the given identifier to the given StringBuffer. + * + * @param str The string buffer. + * @param id The identifier to append. + * @param caseSensitive true to format the identifier so that preserving the case sensitivity, false otherwise. + * + * @return The string buffer + identifier. + */ + public static final StringBuffer appendIdentifier(final StringBuffer str, final String id, final boolean caseSensitive){ + if (caseSensitive) + return str.append('"').append(id).append('"'); + else + return str.append(id); + } + + @Override + @SuppressWarnings({"unchecked","rawtypes"}) + public String translate(ADQLObject obj) throws TranslationException{ + if (obj instanceof ADQLQuery) + return translate((ADQLQuery)obj); + else if (obj instanceof ADQLList) + return translate((ADQLList)obj); + else if (obj instanceof SelectItem) + return translate((SelectItem)obj); + else if (obj instanceof ColumnReference) + return translate((ColumnReference)obj); + else if (obj instanceof ADQLTable) + return translate((ADQLTable)obj); + else if (obj instanceof ADQLJoin) + return translate((ADQLJoin)obj); + else if (obj instanceof ADQLOperand) + return translate((ADQLOperand)obj); + else if (obj instanceof ADQLConstraint) + return translate((ADQLConstraint)obj); + else + return obj.toADQL(); + } + + @Override + public String translate(ADQLQuery query) throws TranslationException{ + StringBuffer sql = new StringBuffer(translate(query.getSelect())); + + sql.append("\nFROM ").append(translate(query.getFrom())); + + if (!query.getWhere().isEmpty()) + sql.append('\n').append(translate(query.getWhere())); + + if (!query.getGroupBy().isEmpty()) + sql.append('\n').append(translate(query.getGroupBy())); + + if (!query.getHaving().isEmpty()) + sql.append('\n').append(translate(query.getHaving())); + + if (!query.getOrderBy().isEmpty()) + sql.append('\n').append(translate(query.getOrderBy())); + + if (query.getSelect().hasLimit()) + sql.append("\nLimit ").append(query.getSelect().getLimit()); + + return sql.toString(); + } + + /* *************************** */ + /* ****** LIST & CLAUSE ****** */ + /* *************************** */ + @Override + public String translate(ADQLList list) throws TranslationException{ + if (list instanceof ClauseSelect) + return translate((ClauseSelect)list); + else if (list instanceof ClauseConstraints) + return translate((ClauseConstraints)list); + else + return getDefaultADQLList(list); + } + + /** + * Gets the default SQL output for a list of ADQL objects. + * + * @param list List to format into SQL. + * + * @return The corresponding SQL. + * + * @throws TranslationException If there is an error during the translation. + */ + protected String getDefaultADQLList(ADQLList list) throws TranslationException{ + String sql = (list.getName() == null) ? "" : (list.getName() + " "); + + for(int i = 0; i < list.size(); i++) + sql += ((i == 0) ? "" : (" " + list.getSeparator(i) + " ")) + translate(list.get(i)); + + return sql; + } + + @Override + public String translate(ClauseSelect clause) throws TranslationException{ + String sql = null; + + for(int i = 0; i < clause.size(); i++){ + if (i == 0){ + sql = clause.getName() + (clause.distinctColumns() ? " DISTINCT" : ""); + }else + sql += " " + clause.getSeparator(i); + + sql += " " + translate(clause.get(i)); + } + + return sql; + } + + @Override + public String translate(ClauseConstraints clause) throws TranslationException{ + if (clause instanceof ConstraintsGroup) + return "(" + getDefaultADQLList(clause) + ")"; + else + return getDefaultADQLList(clause); + } + + @Override + public String translate(SelectItem item) throws TranslationException{ + if (item instanceof SelectAllColumns) + return translate((SelectAllColumns)item); + + StringBuffer translation = new StringBuffer(translate(item.getOperand())); + if (item.hasAlias()){ + translation.append(" AS "); + appendIdentifier(translation, item.getAlias(), item.isCaseSensitive()); + }else{ + translation.append(" AS "); + appendIdentifier(translation, item.getName(), true); + } + + return translation.toString(); + } + + @Override + public String translate(SelectAllColumns item) throws TranslationException{ + HashMap mapAlias = new HashMap(); + + // Fetch the full list of columns to display: + Iterable dbCols = null; + if (item.getAdqlTable() != null && item.getAdqlTable().getDBLink() != null){ + ADQLTable table = item.getAdqlTable(); + dbCols = table.getDBLink(); + if (table.hasAlias()){ + String key = getQualifiedTableName(table.getDBLink()); + mapAlias.put(key, table.isCaseSensitive(IdentifierField.ALIAS) ? ("\"" + table.getAlias() + "\"") : table.getAlias()); + } + }else if (item.getQuery() != null){ + try{ + dbCols = item.getQuery().getFrom().getDBColumns(); + }catch(UnresolvedJoinException pe){ + throw new TranslationException("Due to a join problem, the ADQL to SQL translation can not be completed!", pe); + } + ArrayList tables = item.getQuery().getFrom().getTables(); + for(ADQLTable table : tables){ + if (table.hasAlias()){ + String key = getQualifiedTableName(table.getDBLink()); + mapAlias.put(key, table.isCaseSensitive(IdentifierField.ALIAS) ? ("\"" + table.getAlias() + "\"") : table.getAlias()); + } + } + } + + // Write the DB name of all these columns: + if (dbCols != null){ + StringBuffer cols = new StringBuffer(); + for(DBColumn col : dbCols){ + if (cols.length() > 0) + cols.append(','); + if (col.getTable() != null){ + String fullDbName = getQualifiedTableName(col.getTable()); + if (mapAlias.containsKey(fullDbName)) + appendIdentifier(cols, mapAlias.get(fullDbName), false).append('.'); + else + cols.append(fullDbName).append('.'); + } + appendIdentifier(cols, col.getDBName(), IdentifierField.COLUMN); + cols.append(" AS \"").append(col.getADQLName()).append('\"'); + } + return (cols.length() > 0) ? cols.toString() : item.toADQL(); + }else{ + return item.toADQL(); + } + } + + @Override + public String translate(ColumnReference ref) throws TranslationException{ + if (ref instanceof ADQLOrder) + return translate((ADQLOrder)ref); + else + return getDefaultColumnReference(ref); + } + + /** + * Gets the default SQL output for a column reference. + * + * @param ref The column reference to format into SQL. + * + * @return The corresponding SQL. + * + * @throws TranslationException If there is an error during the translation. + */ + protected String getDefaultColumnReference(ColumnReference ref) throws TranslationException{ + if (ref.isIndex()){ + return "" + ref.getColumnIndex(); + }else{ + if (ref.getDBLink() == null){ + return (ref.isCaseSensitive() ? ("\"" + ref.getColumnName() + "\"") : ref.getColumnName()); + }else{ + DBColumn dbCol = ref.getDBLink(); + StringBuffer colName = new StringBuffer(); + // Use the table alias if any: + if (ref.getAdqlTable() != null && ref.getAdqlTable().hasAlias()) + appendIdentifier(colName, ref.getAdqlTable().getAlias(), ref.getAdqlTable().isCaseSensitive(IdentifierField.ALIAS)).append('.'); + + // Use the DBTable if any: + else if (dbCol.getTable() != null) + colName.append(getQualifiedTableName(dbCol.getTable())).append('.'); + + appendIdentifier(colName, dbCol.getDBName(), IdentifierField.COLUMN); + + return colName.toString(); + } + } + } + + @Override + public String translate(ADQLOrder order) throws TranslationException{ + return getDefaultColumnReference(order) + (order.isDescSorting() ? " DESC" : " ASC"); + } + + /* ************************** */ + /* ****** TABLE & JOIN ****** */ + /* ************************** */ + @Override + public String translate(FromContent content) throws TranslationException{ + if (content instanceof ADQLTable) + return translate((ADQLTable)content); + else if (content instanceof ADQLJoin) + return translate((ADQLJoin)content); + else + return content.toADQL(); + } + + @Override + public String translate(ADQLTable table) throws TranslationException{ + StringBuffer sql = new StringBuffer(); + + // CASE: SUB-QUERY: + if (table.isSubQuery()) + sql.append('(').append(translate(table.getSubQuery())).append(')'); + + // CASE: TABLE REFERENCE: + else{ + // Use the corresponding DB table, if known: + if (table.getDBLink() != null) + sql.append(getQualifiedTableName(table.getDBLink())); + // Otherwise, use the whole table name given in the ADQL query: + else + sql.append(table.getFullTableName()); + } + + // Add the table alias, if any: + if (table.hasAlias()){ + sql.append(" AS "); + appendIdentifier(sql, table.getAlias(), table.isCaseSensitive(IdentifierField.ALIAS)); + } + + return sql.toString(); + } + + @Override + public String translate(ADQLJoin join) throws TranslationException{ + StringBuffer sql = new StringBuffer(translate(join.getLeftTable())); + + if (join.isNatural()) + sql.append(" NATURAL"); + + sql.append(' ').append(join.getJoinType()).append(' ').append(translate(join.getRightTable())).append(' '); + + if (!join.isNatural()){ + if (join.getJoinCondition() != null) + sql.append(translate(join.getJoinCondition())); + else if (join.hasJoinedColumns()){ + StringBuffer cols = new StringBuffer(); + Iterator it = join.getJoinedColumns(); + while(it.hasNext()){ + ADQLColumn item = it.next(); + if (cols.length() > 0) + cols.append(", "); + if (item.getDBLink() == null) + appendIdentifier(cols, item.getColumnName(), item.isCaseSensitive(IdentifierField.COLUMN)); + else + appendIdentifier(cols, item.getDBLink().getDBName(), IdentifierField.COLUMN); + } + sql.append("USING (").append(cols).append(')'); + } + } + + return sql.toString(); + } + + /* ********************* */ + /* ****** OPERAND ****** */ + /* ********************* */ + @Override + public String translate(ADQLOperand op) throws TranslationException{ + if (op instanceof ADQLColumn) + return translate((ADQLColumn)op); + else if (op instanceof Concatenation) + return translate((Concatenation)op); + else if (op instanceof NegativeOperand) + return translate((NegativeOperand)op); + else if (op instanceof NumericConstant) + return translate((NumericConstant)op); + else if (op instanceof StringConstant) + return translate((StringConstant)op); + else if (op instanceof WrappedOperand) + return translate((WrappedOperand)op); + else if (op instanceof Operation) + return translate((Operation)op); + else if (op instanceof ADQLFunction) + return translate((ADQLFunction)op); + else + return op.toADQL(); + } + + @Override + public String translate(ADQLColumn column) throws TranslationException{ + // Use its DB name if known: + if (column.getDBLink() != null){ + DBColumn dbCol = column.getDBLink(); + StringBuffer colName = new StringBuffer(); + // Use the table alias if any: + if (column.getAdqlTable() != null && column.getAdqlTable().hasAlias()) + appendIdentifier(colName, column.getAdqlTable().getAlias(), column.getAdqlTable().isCaseSensitive(IdentifierField.ALIAS)).append('.'); + + // Use the DBTable if any: + else if (dbCol.getTable() != null && dbCol.getTable().getDBName() != null) + colName.append(getQualifiedTableName(dbCol.getTable())).append('.'); + + // Otherwise, use the prefix of the column given in the ADQL query: + else if (column.getTableName() != null) + colName = column.getFullColumnPrefix().append('.'); + + appendIdentifier(colName, dbCol.getDBName(), IdentifierField.COLUMN); + + return colName.toString(); + } + // Otherwise, use the whole name given in the ADQL query: + else + return column.getFullColumnName(); + } + + @Override + public String translate(Concatenation concat) throws TranslationException{ + return translate((ADQLList)concat); + } + + @Override + public String translate(NegativeOperand negOp) throws TranslationException{ + return "-" + translate(negOp.getOperand()); + } + + @Override + public String translate(NumericConstant numConst) throws TranslationException{ + return numConst.getValue(); + } + + @Override + public String translate(StringConstant strConst) throws TranslationException{ + return "'" + strConst.getValue() + "'"; + } + + @Override + public String translate(WrappedOperand op) throws TranslationException{ + return "(" + translate(op.getOperand()) + ")"; + } + + @Override + public String translate(Operation op) throws TranslationException{ + return translate(op.getLeftOperand()) + op.getOperation().toADQL() + translate(op.getRightOperand()); + } + + /* ************************ */ + /* ****** CONSTRAINT ****** */ + /* ************************ */ + @Override + public String translate(ADQLConstraint cons) throws TranslationException{ + if (cons instanceof Comparison) + return translate((Comparison)cons); + else if (cons instanceof Between) + return translate((Between)cons); + else if (cons instanceof Exists) + return translate((Exists)cons); + else if (cons instanceof In) + return translate((In)cons); + else if (cons instanceof IsNull) + return translate((IsNull)cons); + else if (cons instanceof NotConstraint) + return translate((NotConstraint)cons); + else + return cons.toADQL(); + } + + @Override + public String translate(Comparison comp) throws TranslationException{ + return translate(comp.getLeftOperand()) + " " + comp.getOperator().toADQL() + " " + translate(comp.getRightOperand()); + } + + @Override + public String translate(Between comp) throws TranslationException{ + return translate(comp.getLeftOperand()) + " " + comp.getName() + " " + translate(comp.getMinOperand()) + " AND " + translate(comp.getMaxOperand()); + } + + @Override + public String translate(Exists exists) throws TranslationException{ + return "EXISTS(" + translate(exists.getSubQuery()) + ")"; + } + + @Override + public String translate(In in) throws TranslationException{ + return translate(in.getOperand()) + " " + in.getName() + " (" + (in.hasSubQuery() ? translate(in.getSubQuery()) : translate(in.getValuesList())) + ")"; + } + + @Override + public String translate(IsNull isNull) throws TranslationException{ + return translate(isNull.getColumn()) + " " + isNull.getName(); + } + + @Override + public String translate(NotConstraint notCons) throws TranslationException{ + return "NOT " + translate(notCons.getConstraint()); + } + + /* *********************** */ + /* ****** FUNCTIONS ****** */ + /* *********************** */ + @Override + public String translate(ADQLFunction fct) throws TranslationException{ + if (fct instanceof GeometryFunction) + return translate((GeometryFunction)fct); + else if (fct instanceof MathFunction) + return translate((MathFunction)fct); + else if (fct instanceof SQLFunction) + return translate((SQLFunction)fct); + else if (fct instanceof UserDefinedFunction) + return translate((UserDefinedFunction)fct); + else + return getDefaultADQLFunction(fct); + } + + /** + * Gets the default SQL output for the given ADQL function. + * + * @param fct The ADQL function to format into SQL. + * + * @return The corresponding SQL. + * + * @throws TranslationException If there is an error during the translation. + */ + protected final String getDefaultADQLFunction(ADQLFunction fct) throws TranslationException{ + String sql = fct.getName() + "("; + + for(int i = 0; i < fct.getNbParameters(); i++) + sql += ((i == 0) ? "" : ", ") + translate(fct.getParameter(i)); + + return sql + ")"; + } + + @Override + public String translate(SQLFunction fct) throws TranslationException{ + if (fct.getType() == SQLFunctionType.COUNT_ALL) + return "COUNT(" + (fct.isDistinct() ? "DISTINCT " : "") + "*)"; + else + return fct.getName() + "(" + (fct.isDistinct() ? "DISTINCT " : "") + translate(fct.getParameter(0)) + ")"; + } + + @Override + public String translate(MathFunction fct) throws TranslationException{ + return getDefaultADQLFunction(fct); + } + + @Override + public String translate(UserDefinedFunction fct) throws TranslationException{ + return fct.translate(this); + } + + /* *********************************** */ + /* ****** GEOMETRICAL FUNCTIONS ****** */ + /* *********************************** */ + @Override + public String translate(GeometryFunction fct) throws TranslationException{ + if (fct instanceof AreaFunction) + return translate((AreaFunction)fct); + else if (fct instanceof BoxFunction) + return translate((BoxFunction)fct); + else if (fct instanceof CentroidFunction) + return translate((CentroidFunction)fct); + else if (fct instanceof CircleFunction) + return translate((CircleFunction)fct); + else if (fct instanceof ContainsFunction) + return translate((ContainsFunction)fct); + else if (fct instanceof DistanceFunction) + return translate((DistanceFunction)fct); + else if (fct instanceof ExtractCoord) + return translate((ExtractCoord)fct); + else if (fct instanceof ExtractCoordSys) + return translate((ExtractCoordSys)fct); + else if (fct instanceof IntersectsFunction) + return translate((IntersectsFunction)fct); + else if (fct instanceof PointFunction) + return translate((PointFunction)fct); + else if (fct instanceof PolygonFunction) + return translate((PolygonFunction)fct); + else if (fct instanceof RegionFunction) + return translate((RegionFunction)fct); + else + return getDefaultADQLFunction(fct); + } + + @Override + public String translate(GeometryValue geomValue) throws TranslationException{ + return translate(geomValue.getValue()); + } + + /** + * Convert any type provided by a JDBC driver into a type understandable by the ADQL/TAP library. + * + * @param dbmsType Type returned by a JDBC driver. Note: this value is returned by ResultSetMetadata.getColumnType(int) and correspond to a type of java.sql.Types + * @param rawDbmsTypeName Full name of the type returned by a JDBC driver. Note: this name is returned by ResultSetMetadata.getColumnTypeName(int) ; this name may contain parameters + * @param dbmsTypeName Name of type, without the eventual parameters. Note: this name is extracted from rawDbmsTypeName. + * @param typeParams The eventual type parameters (e.g. char string length). Note: these parameters are extracted from rawDbmsTypeName. + * + * @return The corresponding ADQL/TAP type or NULL if the specified type is unknown. + */ + public abstract DBType convertTypeFromDB(final int dbmsType, final String rawDbmsTypeName, final String dbmsTypeName, final String[] typeParams); + + /** + *

Convert any type provided by the ADQL/TAP library into a type understandable by a JDBC driver.

+ * + *

Note: + * The returned DBMS type may contain some parameters between brackets. + *

+ * + * @param type The ADQL/TAP library's type to convert. + * + * @return The corresponding DBMS type or NULL if the specified type is unknown. + */ + public abstract String convertTypeToDB(final DBType type); + + /** + *

Parse the given JDBC column value as a geometry object and convert it into a {@link Region}.

+ * + *

Note: + * Generally the returned object will be used to get its STC-S expression. + *

+ * + *

Note: + * If the given column value is NULL, NULL will be returned. + *

+ * + *

Important note: + * This function is called ONLY for value of columns flagged as geometries by + * {@link #convertTypeFromDB(int, String, String, String[])}. So the value should always + * be of the expected type and format. However, if it turns out that the type is wrong + * and that the conversion is finally impossible, this function SHOULD throw a + * {@link DataReadException}. + *

+ * + * @param jdbcColValue A JDBC column value (returned by ResultSet.getObject(int)). + * + * @return The corresponding {@link Region} if the given value is a geometry. + * + * @throws ParseException If the given object is not a geometrical object + * or can not be transformed into a {@link Region} object. + */ + public abstract Region translateGeometryFromDB(final Object jdbcColValue) throws ParseException; + + /** + *

Convert the given STC region into a DB column value.

+ * + *

Note: + * This function is used only by the UPLOAD feature, to import geometries provided as STC-S expression in + * a VOTable document inside a DB column. + *

+ * + *

Note: + * If the given region is NULL, NULL will be returned. + *

+ * + * @param region The region to store in the DB. + * + * @return The corresponding DB column object. + * + * @throws ParseException If the given STC Region can not be converted into a DB object. + */ + public abstract Object translateGeometryToDB(final Region region) throws ParseException; + +} diff --git a/src/adql/translator/PgSphereTranslator.java b/src/adql/translator/PgSphereTranslator.java index efe38074322b8e850227a9f5169d2346b23958de..96c509c52744219b63c7560a82988cd06a72d720 100644 --- a/src/adql/translator/PgSphereTranslator.java +++ b/src/adql/translator/PgSphereTranslator.java @@ -16,12 +16,22 @@ package adql.translator; * You should have received a copy of the GNU Lesser General Public License * along with ADQLLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ +import java.sql.SQLException; +import java.util.ArrayList; + +import org.postgresql.util.PGobject; + +import adql.db.DBType; +import adql.db.DBType.DBDatatype; +import adql.db.STCS.Region; +import adql.parser.ParseException; +import adql.query.TextPosition; import adql.query.constraint.Comparison; import adql.query.constraint.ComparisonOperator; - import adql.query.operand.function.geometry.AreaFunction; import adql.query.operand.function.geometry.BoxFunction; import adql.query.operand.function.geometry.CircleFunction; @@ -32,24 +42,25 @@ import adql.query.operand.function.geometry.IntersectsFunction; import adql.query.operand.function.geometry.PointFunction; import adql.query.operand.function.geometry.PolygonFunction; -import adql.translator.PostgreSQLTranslator; -import adql.translator.TranslationException; - /** *

Translates all ADQL objects into the SQL adaptation of Postgres+PgSphere. * Actually only the geometrical functions are translated in this class. * The other functions are managed by {@link PostgreSQLTranslator}.

* - * @author Grégory Mantelet (CDS) - * @version 01/2012 - * - * @see PostgreSQLTranslator + * @author Grégory Mantelet (CDS;ARI) + * @version 1.3 (11/2014) */ public class PgSphereTranslator extends PostgreSQLTranslator { + /** Angle between two points generated while transforming a circle into a polygon. + * This angle is computed by default to get at the end a polygon of 32 points. + * @see #circleToPolygon(double[], double) + * @since 1.3 */ + protected static double ANGLE_CIRCLE_TO_POLYGON = 2 * Math.PI / 32; + /** - * Builds a PgSphereTranslator which takes into account the case sensitivity on column names. - * It means that column names which have been written between double quotes, will be also translated between double quotes. + * Builds a PgSphereTranslator which always translates in SQL all identifiers (schema, table and column) in a case sensitive manner ; + * in other words, schema, table and column names will be surrounded by double quotes in the SQL translation. * * @see PostgreSQLTranslator#PostgreSQLTranslator() */ @@ -58,23 +69,24 @@ public class PgSphereTranslator extends PostgreSQLTranslator { } /** - * Builds a PgSphereTranslator. + * Builds a PgSphereTranslator which always translates in SQL all identifiers (schema, table and column) in the specified case sensitivity ; + * in other words, schema, table and column names will all be surrounded or not by double quotes in the SQL translation. * - * @param column true to take into account the case sensitivity of column names, false otherwise. + * @param allCaseSensitive true to translate all identifiers in a case sensitive manner (surrounded by double quotes), false for case insensitivity. * * @see PostgreSQLTranslator#PostgreSQLTranslator(boolean) */ - public PgSphereTranslator(boolean column){ - super(column); + public PgSphereTranslator(boolean allCaseSensitive){ + super(allCaseSensitive); } /** - * Builds a PgSphereTranslator. + * Builds a PgSphereTranslator which will always translate in SQL identifiers with the defined case sensitivity. * - * @param catalog true to take into account the case sensitivity of catalog names, false otherwise. - * @param schema true to take into account the case sensitivity of schema names, false otherwise. - * @param table true to take into account the case sensitivity of table names, false otherwise. - * @param column true to take into account the case sensitivity of column names, false otherwise. + * @param catalog true to translate catalog names with double quotes (case sensitive in the DBMS), false otherwise. + * @param schema true to translate schema names with double quotes (case sensitive in the DBMS), false otherwise. + * @param table true to translate table names with double quotes (case sensitive in the DBMS), false otherwise. + * @param column true to translate column names with double quotes (case sensitive in the DBMS), false otherwise. * * @see PostgreSQLTranslator#PostgreSQLTranslator(boolean, boolean, boolean, boolean) */ @@ -103,11 +115,12 @@ public class PgSphereTranslator extends PostgreSQLTranslator { public String translate(BoxFunction box) throws TranslationException{ StringBuffer str = new StringBuffer("sbox("); + str.append("spoint(").append("radians(").append(translate(box.getCoord1())).append("-(").append(translate(box.getWidth())).append("/2.0)),"); + str.append("radians(").append(translate(box.getCoord2())).append("-(").append(translate(box.getHeight())).append("/2.0))),"); + str.append("spoint(").append("radians(").append(translate(box.getCoord1())).append("+(").append(translate(box.getWidth())).append("/2.0)),"); - str.append("radians(").append(translate(box.getCoord2())).append("+(").append(translate(box.getHeight())).append("/2.0))),"); + str.append("radians(").append(translate(box.getCoord2())).append("+(").append(translate(box.getHeight())).append("/2.0))))"); - str.append("spoint(").append("radians(").append(translate(box.getCoord1())).append("-(").append(translate(box.getWidth())).append("/2.0)),"); - str.append("radians(").append(translate(box.getCoord2())).append("-(").append(translate(box.getHeight())).append("/2.0))))"); return str.toString(); } @@ -156,8 +169,8 @@ public class PgSphereTranslator extends PostgreSQLTranslator { @Override public String translate(AreaFunction areaFunction) throws TranslationException{ - StringBuffer str = new StringBuffer("degrees(area("); - str.append(translate(areaFunction.getParameter())).append("))"); + StringBuffer str = new StringBuffer("degrees(degrees(area("); + str.append(translate(areaFunction.getParameter())).append(")))"); return str.toString(); } @@ -185,4 +198,534 @@ public class PgSphereTranslator extends PostgreSQLTranslator { return super.translate(comp); } + @Override + public DBType convertTypeFromDB(final int dbmsType, final String rawDbmsTypeName, String dbmsTypeName, final String[] params){ + // If no type is provided return VARCHAR: + if (dbmsTypeName == null || dbmsTypeName.trim().length() == 0) + return new DBType(DBDatatype.VARCHAR, DBType.NO_LENGTH); + + // Put the dbmsTypeName in lower case for the following comparisons: + dbmsTypeName = dbmsTypeName.toLowerCase(); + + if (dbmsTypeName.equals("spoint")) + return new DBType(DBDatatype.POINT); + else if (dbmsTypeName.equals("scircle") || dbmsTypeName.equals("sbox") || dbmsTypeName.equals("spoly")) + return new DBType(DBDatatype.REGION); + else + return super.convertTypeFromDB(dbmsType, rawDbmsTypeName, dbmsTypeName, params); + } + + @Override + public String convertTypeToDB(final DBType type){ + if (type != null){ + if (type.type == DBDatatype.POINT) + return "spoint"; + else if (type.type == DBDatatype.REGION) + return "spoly"; + } + return super.convertTypeToDB(type); + } + + @Override + public Region translateGeometryFromDB(final Object jdbcColValue) throws ParseException{ + // A NULL value stays NULL: + if (jdbcColValue == null) + return null; + // Only a special object is expected: + else if (!(jdbcColValue instanceof PGobject)) + throw new ParseException("Incompatible type! The column value \"" + jdbcColValue.toString() + "\" was supposed to be a geometrical object."); + + PGobject pgo = (PGobject)jdbcColValue; + + // In case one or both of the fields of the given object are NULL: + if (pgo == null || pgo.getType() == null || pgo.getValue() == null || pgo.getValue().length() == 0) + return null; + + // Extract the object type and its value: + String objType = pgo.getType().toLowerCase(); + String geomStr = pgo.getValue(); + + /* Only spoint, scircle, sbox and spoly are supported ; + * these geometries are parsed and transformed in Region instances:*/ + if (objType.equals("spoint")) + return (new PgSphereGeometryParser()).parsePoint(geomStr); + else if (objType.equals("scircle")) + return (new PgSphereGeometryParser()).parseCircle(geomStr); + else if (objType.equals("sbox")) + return (new PgSphereGeometryParser()).parseBox(geomStr); + else if (objType.equals("spoly")) + return (new PgSphereGeometryParser()).parsePolygon(geomStr); + else + throw new ParseException("Unsupported PgSphere type: \"" + objType + "\"! Impossible to convert the column value \"" + geomStr + "\" into a Region."); + } + + @Override + public Object translateGeometryToDB(final Region region) throws ParseException{ + // A NULL value stays NULL: + if (region == null) + return null; + + try{ + PGobject dbRegion = new PGobject(); + StringBuffer buf; + + // Build the PgSphere expression from the given geometry in function of its type: + switch(region.type){ + + case POSITION: + dbRegion.setType("spoint"); + dbRegion.setValue("(" + region.coordinates[0][0] + "d," + region.coordinates[0][1] + "d)"); + break; + + case POLYGON: + dbRegion.setType("spoly"); + buf = new StringBuffer("{"); + for(int i = 0; i < region.coordinates.length; i++){ + if (i > 0) + buf.append(','); + buf.append('(').append(region.coordinates[i][0]).append("d,").append(region.coordinates[i][1]).append("d)"); + } + buf.append('}'); + dbRegion.setValue(buf.toString()); + break; + + case BOX: + dbRegion.setType("spoly"); + buf = new StringBuffer("{"); + // south west + buf.append('(').append(region.coordinates[0][0] - region.width / 2).append("d,").append(region.coordinates[0][1] - region.height / 2).append("d),"); + // north west + buf.append('(').append(region.coordinates[0][0] - region.width / 2).append("d,").append(region.coordinates[0][1] + region.height / 2).append("d),"); + // north east + buf.append('(').append(region.coordinates[0][0] + region.width / 2).append("d,").append(region.coordinates[0][1] + region.height / 2).append("d),"); + // south east + buf.append('(').append(region.coordinates[0][0] + region.width / 2).append("d,").append(region.coordinates[0][1] - region.height / 2).append("d)"); + buf.append('}'); + dbRegion.setValue(buf.toString()); + break; + + case CIRCLE: + dbRegion.setType("spoly"); + dbRegion.setValue(circleToPolygon(region.coordinates[0], region.radius)); + break; + + default: + throw new ParseException("Unsupported geometrical region: \"" + region.type + "\"!"); + } + return dbRegion; + }catch(SQLException e){ + /* This error could never happen! */ + return null; + } + } + + /** + *

Convert the specified circle into a polygon. + * The generated polygon is formatted using the PgSphere syntax.

+ * + *

Note: + * The center coordinates and the radius are expected in degrees. + *

+ * + * @param center Center of the circle ([0]=ra and [1]=dec). + * @param radius Radius of the circle. + * + * @return The PgSphere serialization of the corresponding polygon. + * + * @since 1.3 + */ + protected String circleToPolygon(final double[] center, final double radius){ + double angle = 0, x, y; + StringBuffer buf = new StringBuffer(); + while(angle < 2 * Math.PI){ + x = center[0] + radius * Math.cos(angle); + y = center[1] + radius * Math.sin(angle); + if (buf.length() > 0) + buf.append(','); + buf.append('(').append(x).append("d,").append(y).append("d)"); + angle += ANGLE_CIRCLE_TO_POLYGON; + } + return "{" + buf + "}"; + } + + /** + *

Let parse a geometry serialized with the PgSphere syntax.

+ * + *

+ * There is one function parseXxx(String) for each supported geometry. + * These functions always return a {@link Region} object, + * which is the object representation of an STC region. + *

+ * + *

Only the following geometries are supported:

+ *
    + *
  • spoint => Position
  • + *
  • scircle => Circle
  • + *
  • sbox => Box
  • + *
  • spoly => Polygon
  • + *
+ * + *

+ * This parser supports all the known PgSphere representations of an angle. + * However, it always returns angle (coordinates, radius, width and height) in degrees. + *

+ * + * @author Grégory Mantelet (ARI) + * @version 1.3 (11/2014) + * @since 1.3 + */ + protected static class PgSphereGeometryParser { + /** Position of the next characters to read in the PgSphere expression to parse. */ + private int pos; + /** Full PgSphere expression to parse. */ + private String expr; + /** Last read token (either a string/numeric or a separator). */ + private String token; + /** Buffer used to read tokens. */ + private StringBuffer buffer; + + private static final char OPEN_PAR = '('; + private static final char CLOSE_PAR = ')'; + private static final char COMMA = ','; + private static final char LESS_THAN = '<'; + private static final char GREATER_THAN = '>'; + private static final char OPEN_BRACE = '{'; + private static final char CLOSE_BRACE = '}'; + private static final char DEGREE = 'd'; + private static final char HOUR = 'h'; + private static final char MINUTE = 'm'; + private static final char SECOND = 's'; + + /** + * Exception sent when the end of the expression + * (EOE = End Of Expression) is reached. + * + * @author Grégory Mantelet (ARI) + * @version 1.3 (11/2014) + * @since 1.3 + */ + private static class EOEException extends ParseException { + private static final long serialVersionUID = 1L; + + /** Build a simple EOEException. */ + public EOEException(){ + super("Unexpected End Of PgSphere Expression!"); + } + } + + /** + * Build the PgSphere parser. + */ + public PgSphereGeometryParser(){} + + /** + * Prepare the parser in order to read the given PgSphere expression. + * + * @param newStcs New PgSphere expression to parse from now. + */ + private void init(final String newExpr){ + expr = (newExpr == null) ? "" : newExpr; + token = null; + buffer = new StringBuffer(); + pos = 0; + } + + /** + * Finalize the parsing. + * No more characters (except eventually some space characters) should remain in the PgSphere expression to parse. + * + * @throws ParseException If other non-space characters remains. + */ + private void end() throws ParseException{ + // Skip all spaces: + skipSpaces(); + + // If there is still some characters, they are not expected, and so throw an exception: + if (expr.length() > 0 && pos < expr.length()) + throw new ParseException("Unexpected end of PgSphere region expression: \"" + expr.substring(pos) + "\" was unexpected!", new TextPosition(1, pos, 1, expr.length())); + + // Reset the buffer, token and the PgSphere expression to parse: + buffer = null; + expr = null; + token = null; + } + + /** + * Tool function which skips all next space characters until the next meaningful characters. + */ + private void skipSpaces(){ + while(pos < expr.length() && Character.isWhitespace(expr.charAt(pos))) + pos++; + } + + /** + *

Get the next meaningful word. This word can be a numeric, any string constant or a separator. + * This function returns this token but also stores it in the class attribute {@link #token}.

+ * + *

+ * In case the end of the expression is reached before getting any meaningful character, + * an {@link EOEException} is thrown. + *

+ * + * @return The full read word/token, or NULL if the end has been reached. + */ + private String nextToken() throws EOEException{ + // Skip all spaces: + skipSpaces(); + + if (pos >= expr.length()) + throw new EOEException(); + + // Fetch all characters until word separator (a space or a open/close parenthesis): + buffer.append(expr.charAt(pos++)); + if (!isSyntaxSeparator(buffer.charAt(0))){ + while(pos < expr.length() && !isSyntaxSeparator(expr.charAt(pos))){ + // skip eventual white-spaces: + if (!Character.isWhitespace(expr.charAt(pos))) + buffer.append(expr.charAt(pos)); + pos++; + } + } + + // Save the read token and reset the buffer: + token = buffer.toString(); + buffer.delete(0, token.length()); + + return token; + } + + /** + *

Tell whether the given character is a separator defined in the syntax.

+ * + *

Here, the following characters are considered as separators/specials: + * ',', 'd', 'h', 'm', 's', '(', ')', '<', '>', '{' and '}'.

+ * + * @param c Character to test. + * + * @return true if the given character must be considered as a separator, false otherwise. + */ + private static boolean isSyntaxSeparator(final char c){ + return (c == COMMA || c == DEGREE || c == HOUR || c == MINUTE || c == SECOND || c == OPEN_PAR || c == CLOSE_PAR || c == LESS_THAN || c == GREATER_THAN || c == OPEN_BRACE || c == CLOSE_BRACE); + } + + /** + * Get the next character and ensure it is the same as the character given in parameter. + * If the read character is not matching the expected one, a {@link ParseException} is thrown. + * + * @param expected Expected character. + * + * @throws ParseException If the next character is not matching the given one. + */ + private void nextToken(final char expected) throws ParseException{ + // Skip all spaces: + skipSpaces(); + + // Test whether the end is reached: + if (pos >= expr.length()) + throw new EOEException(); + + // Fetch the next character: + char t = expr.charAt(pos++); + token = new String(new char[]{t}); + + /* Test the the fetched character with the expected one + * and throw an error if they don't match: */ + if (t != expected) + throw new ParseException("Incorrect syntax for \"" + expr + "\"! \"" + expected + "\" was expected instead of \"" + t + "\".", new TextPosition(1, pos - 1, 1, pos)); + } + + /** + * Parse the given PgSphere geometry as a point. + * + * @param pgsphereExpr The PgSphere expression to parse as a point. + * + * @return A {@link Region} implementing a STC Position region. + * + * @throws ParseException If the PgSphere syntax of the given expression is wrong or does not correspond to a point. + */ + public Region parsePoint(final String pgsphereExpr) throws ParseException{ + // Init the parser: + init(pgsphereExpr); + // Parse the expression: + double[] coord = parsePoint(); + // No more character should remain after that: + end(); + // Build the STC Position region: + return new Region(null, coord); + } + + /** + * Internal spoint parsing function. It parses the PgSphere expression stored in this parser as a point. + * + * @return The ra and dec coordinates (in degrees) of the parsed point. + * + * @throws ParseException If the PgSphere syntax of the given expression is wrong or does not correspond to a point. + * + * @see #parseAngle() + * @see #parsePoint(String) + */ + private double[] parsePoint() throws ParseException{ + nextToken(OPEN_PAR); + double x = parseAngle(); + nextToken(COMMA); + double y = parseAngle(); + nextToken(CLOSE_PAR); + return new double[]{x,y}; + } + + /** + * Parse the given PgSphere geometry as a circle. + * + * @param pgsphereExpr The PgSphere expression to parse as a circle. + * + * @return A {@link Region} implementing a STC Circle region. + * + * @throws ParseException If the PgSphere syntax of the given expression is wrong or does not correspond to a circle. + */ + public Region parseCircle(final String pgsphereExpr) throws ParseException{ + // Init the parser: + init(pgsphereExpr); + + // Parse the expression: + nextToken(LESS_THAN); + double[] center = parsePoint(); + nextToken(COMMA); + double radius = parseAngle(); + nextToken(GREATER_THAN); + + // No more character should remain after that: + end(); + + // Build the STC Circle region: + return new Region(null, center, radius); + } + + /** + * Parse the given PgSphere geometry as a box. + * + * @param pgsphereExpr The PgSphere expression to parse as a box. + * + * @return A {@link Region} implementing a STC Box region. + * + * @throws ParseException If the PgSphere syntax of the given expression is wrong or does not correspond to a box. + */ + public Region parseBox(final String pgsphereExpr) throws ParseException{ + // Init the parser: + init(pgsphereExpr); + + // Parse the expression: + nextToken(OPEN_PAR); + double[] southwest = parsePoint(); + nextToken(COMMA); + double[] northeast = parsePoint(); + nextToken(CLOSE_PAR); + + // No more character should remain after that: + end(); + + // Build the STC Box region: + double width = Math.abs(northeast[0] - southwest[0]), height = Math.abs(northeast[1] - southwest[1]); + double[] center = new double[]{northeast[0] - width / 2,northeast[1] - height / 2}; + return new Region(null, center, width, height); + } + + /** + * Parse the given PgSphere geometry as a point. + * + * @param pgsphereExpr The PgSphere expression to parse as a point. + * + * @return A {@link Region} implementing a STC Position region. + * + * @throws ParseException If the PgSphere syntax of the given expression is wrong or does not correspond to a point. + */ + public Region parsePolygon(final String pgsphereExpr) throws ParseException{ + // Init the parser: + init(pgsphereExpr); + + // Parse the expression: + nextToken(OPEN_BRACE); + ArrayList points = new ArrayList(3); + // at least 3 points are expected: + points.add(parsePoint()); + nextToken(COMMA); + points.add(parsePoint()); + nextToken(COMMA); + points.add(parsePoint()); + // but if there are more points, parse and keep them: + while(nextToken().length() == 1 && token.charAt(0) == COMMA) + points.add(parsePoint()); + // the expression must end with a } : + if (token.length() != 1 || token.charAt(0) != CLOSE_BRACE) + throw new ParseException("Incorrect syntax for \"" + expr + "\"! \"}\" was expected instead of \"" + token + "\".", new TextPosition(1, pos - token.length(), 1, pos)); + + // No more character should remain after that: + end(); + + // Build the STC Polygon region: + return new Region(null, points.toArray(new double[points.size()][2])); + } + + /** + *

Read the next tokens as an angle expression and returns the corresponding angle in degrees.

+ * + *

This function supports the 4 following syntaxes:

+ *
    + *
  • RAD: {number}
  • + *
  • DEG: {number}d
  • + *
  • DMS: {number}d {number}m {number}s
  • + *
  • HMS: {number}h {number}m {number}s
  • + *
+ * + * @return The corresponding angle in degrees. + * + * @throws ParseException If the angle syntax is wrong or not supported. + */ + private double parseAngle() throws ParseException{ + int oldPos = pos; + String number = nextToken(); + try{ + double degrees = Double.parseDouble(number); + int sign = (degrees < 0) ? -1 : 1; + degrees = Math.abs(degrees); + + oldPos = pos; + try{ + if (nextToken().length() == 1 && token.charAt(0) == HOUR) + sign *= 15; + else if (token.length() != 1 || token.charAt(0) != DEGREE){ + degrees = degrees * 180 / Math.PI; + pos -= token.length(); + return degrees * sign; + } + + oldPos = pos; + number = nextToken(); + if (nextToken().length() == 1 && token.charAt(0) == MINUTE) + degrees += Double.parseDouble(number) / 60; + else if (token.length() == 1 && token.charAt(0) == SECOND){ + degrees += Double.parseDouble(number) / 3600; + return degrees * sign; + }else{ + pos = oldPos; + return degrees * sign; + } + + oldPos = pos; + number = nextToken(); + if (nextToken().length() == 1 && token.charAt(0) == SECOND) + degrees += Double.parseDouble(number) / 3600; + else + pos = oldPos; + }catch(EOEException ex){ + pos = oldPos; + } + + return degrees * sign; + + }catch(NumberFormatException nfe){ + throw new ParseException("Incorrect numeric syntax: \"" + number + "\"!", new TextPosition(1, pos - token.length(), 1, pos)); + } + } + } + } diff --git a/src/adql/translator/PostgreSQLTranslator.java b/src/adql/translator/PostgreSQLTranslator.java index 58cdc7a3ef768d272c70d268cdca389e68945d3d..4b2d01395a3e049ff05a4901a0ce8b05624c76e9 100644 --- a/src/adql/translator/PostgreSQLTranslator.java +++ b/src/adql/translator/PostgreSQLTranslator.java @@ -17,50 +17,16 @@ package adql.translator; * along with ADQLLibrary. If not, see . * * Copyright 2012-2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomisches Rechen Institute (ARI) + * Astronomisches Rechen Institut (ARI) */ -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; - -import adql.db.DBColumn; -import adql.db.DBTable; -import adql.db.exception.UnresolvedJoin; -import adql.query.ADQLList; -import adql.query.ADQLObject; -import adql.query.ADQLOrder; -import adql.query.ADQLQuery; -import adql.query.ClauseConstraints; -import adql.query.ClauseSelect; -import adql.query.ColumnReference; +import adql.db.DBType; +import adql.db.DBType.DBDatatype; +import adql.db.STCS.Region; +import adql.parser.ParseException; import adql.query.IdentifierField; -import adql.query.SelectAllColumns; -import adql.query.SelectItem; -import adql.query.constraint.ADQLConstraint; -import adql.query.constraint.Between; -import adql.query.constraint.Comparison; -import adql.query.constraint.ConstraintsGroup; -import adql.query.constraint.Exists; -import adql.query.constraint.In; -import adql.query.constraint.IsNull; -import adql.query.constraint.NotConstraint; -import adql.query.from.ADQLJoin; -import adql.query.from.ADQLTable; -import adql.query.from.FromContent; -import adql.query.operand.ADQLColumn; -import adql.query.operand.ADQLOperand; -import adql.query.operand.Concatenation; -import adql.query.operand.NegativeOperand; -import adql.query.operand.NumericConstant; -import adql.query.operand.Operation; import adql.query.operand.StringConstant; -import adql.query.operand.WrappedOperand; -import adql.query.operand.function.ADQLFunction; import adql.query.operand.function.MathFunction; -import adql.query.operand.function.SQLFunction; -import adql.query.operand.function.SQLFunctionType; -import adql.query.operand.function.UserDefinedFunction; import adql.query.operand.function.geometry.AreaFunction; import adql.query.operand.function.geometry.BoxFunction; import adql.query.operand.function.geometry.CentroidFunction; @@ -69,54 +35,61 @@ import adql.query.operand.function.geometry.ContainsFunction; import adql.query.operand.function.geometry.DistanceFunction; import adql.query.operand.function.geometry.ExtractCoord; import adql.query.operand.function.geometry.ExtractCoordSys; -import adql.query.operand.function.geometry.GeometryFunction; -import adql.query.operand.function.geometry.GeometryFunction.GeometryValue; import adql.query.operand.function.geometry.IntersectsFunction; import adql.query.operand.function.geometry.PointFunction; import adql.query.operand.function.geometry.PolygonFunction; import adql.query.operand.function.geometry.RegionFunction; /** - *

Translates all ADQL objects into the SQL adaptation of Postgres.

+ *

Translates all ADQL objects into an SQL interrogation query designed for PostgreSQL.

* - *

IMPORTANT: The geometrical functions are translated exactly as in ADQL. - * You will probably need to extend this translator to correctly manage the geometrical functions. - * An extension is already available for PgSphere: {@link PgSphereTranslator}.

+ *

Important: + * The geometrical functions are translated exactly as in ADQL. + * You will probably need to extend this translator to correctly manage the geometrical functions. + * An extension is already available for PgSphere: {@link PgSphereTranslator}. + *

* * @author Grégory Mantelet (CDS;ARI) - * @version 1.2 (03/2014) + * @version 1.3 (11/2014) * * @see PgSphereTranslator */ -public class PostgreSQLTranslator implements ADQLTranslator { +public class PostgreSQLTranslator extends JDBCTranslator { - protected boolean inSelect = false; + /**

Indicate the case sensitivity to apply to each SQL identifier (only SCHEMA, TABLE and COLUMN).

+ * + *

Note: + * In this implementation, this field is set by the constructor and never modified elsewhere. + * It would be better to never modify it after the construction in order to keep a certain consistency. + *

+ */ protected byte caseSensitivity = 0x00; /** - * Builds a PostgreSQLTranslator which takes into account the case sensitivity on column names. - * It means that column names which have been written between double quotes, will be also translated between double quotes. + * Builds a PostgreSQLTranslator which always translates in SQL all identifiers (schema, table and column) in a case sensitive manner ; + * in other words, schema, table and column names will be surrounded by double quotes in the SQL translation. */ public PostgreSQLTranslator(){ - this(true); + caseSensitivity = 0x0F; } /** - * Builds a PostgreSQLTranslator. + * Builds a PostgreSQLTranslator which always translates in SQL all identifiers (schema, table and column) in the specified case sensitivity ; + * in other words, schema, table and column names will all be surrounded or not by double quotes in the SQL translation. * - * @param column true to take into account the case sensitivity of column names, false otherwise. + * @param allCaseSensitive true to translate all identifiers in a case sensitive manner (surrounded by double quotes), false for case insensitivity. */ - public PostgreSQLTranslator(final boolean column){ - caseSensitivity = IdentifierField.COLUMN.setCaseSensitive(caseSensitivity, column); + public PostgreSQLTranslator(final boolean allCaseSensitive){ + caseSensitivity = allCaseSensitive ? (byte)0x0F : (byte)0x00; } /** - * Builds a PostgreSQLTranslator. + * Builds a PostgreSQLTranslator which will always translate in SQL identifiers with the defined case sensitivity. * - * @param catalog true to take into account the case sensitivity of catalog names, false otherwise. - * @param schema true to take into account the case sensitivity of schema names, false otherwise. - * @param table true to take into account the case sensitivity of table names, false otherwise. - * @param column true to take into account the case sensitivity of column names, false otherwise. + * @param catalog true to translate catalog names with double quotes (case sensitive in the DBMS), false otherwise. + * @param schema true to translate schema names with double quotes (case sensitive in the DBMS), false otherwise. + * @param table true to translate table names with double quotes (case sensitive in the DBMS), false otherwise. + * @param column true to translate column names with double quotes (case sensitive in the DBMS), false otherwise. */ public PostgreSQLTranslator(final boolean catalog, final boolean schema, final boolean table, final boolean column){ caseSensitivity = IdentifierField.CATALOG.setCaseSensitive(caseSensitivity, catalog); @@ -125,521 +98,22 @@ public class PostgreSQLTranslator implements ADQLTranslator { caseSensitivity = IdentifierField.COLUMN.setCaseSensitive(caseSensitivity, column); } - /** - * Appends the full name of the given table to the given StringBuffer. - * - * @param str The string buffer. - * @param dbTable The table whose the full name must be appended. - * - * @return The string buffer + full table name. - */ - public final StringBuffer appendFullDBName(final StringBuffer str, final DBTable dbTable){ - if (dbTable != null){ - if (dbTable.getDBCatalogName() != null) - appendIdentifier(str, dbTable.getDBCatalogName(), IdentifierField.CATALOG).append('.'); - - if (dbTable.getDBSchemaName() != null) - appendIdentifier(str, dbTable.getDBSchemaName(), IdentifierField.SCHEMA).append('.'); - - appendIdentifier(str, dbTable.getDBName(), IdentifierField.TABLE); - } - return str; - } - - /** - * Appends the given identifier in the given StringBuffer. - * - * @param str The string buffer. - * @param id The identifier to append. - * @param field The type of identifier (column, table, schema, catalog or alias ?). - * - * @return The string buffer + identifier. - */ - public final StringBuffer appendIdentifier(final StringBuffer str, final String id, final IdentifierField field){ - return appendIdentifier(str, id, field.isCaseSensitive(caseSensitivity)); - } - - /** - * Appends the given identifier to the given StringBuffer. - * - * @param str The string buffer. - * @param id The identifier to append. - * @param caseSensitive true to format the identifier so that preserving the case sensitivity, false otherwise. - * - * @return The string buffer + identifier. - */ - public static final StringBuffer appendIdentifier(final StringBuffer str, final String id, final boolean caseSensitive){ - if (caseSensitive) - return str.append('\"').append(id).append('\"'); - else - return str.append(id); - } - - @Override - @SuppressWarnings("unchecked") - public String translate(ADQLObject obj) throws TranslationException{ - if (obj instanceof ADQLQuery) - return translate((ADQLQuery)obj); - else if (obj instanceof ADQLList) - return translate((ADQLList)obj); - else if (obj instanceof SelectItem) - return translate((SelectItem)obj); - else if (obj instanceof ColumnReference) - return translate((ColumnReference)obj); - else if (obj instanceof ADQLTable) - return translate((ADQLTable)obj); - else if (obj instanceof ADQLJoin) - return translate((ADQLJoin)obj); - else if (obj instanceof ADQLOperand) - return translate((ADQLOperand)obj); - else if (obj instanceof ADQLConstraint) - return translate((ADQLConstraint)obj); - else - return obj.toADQL(); - } - - @Override - public String translate(ADQLQuery query) throws TranslationException{ - StringBuffer sql = new StringBuffer(translate(query.getSelect())); - - sql.append("\nFROM ").append(translate(query.getFrom())); - - if (!query.getWhere().isEmpty()) - sql.append('\n').append(translate(query.getWhere())); - - if (!query.getGroupBy().isEmpty()) - sql.append('\n').append(translate(query.getGroupBy())); - - if (!query.getHaving().isEmpty()) - sql.append('\n').append(translate(query.getHaving())); - - if (!query.getOrderBy().isEmpty()) - sql.append('\n').append(translate(query.getOrderBy())); - - if (query.getSelect().hasLimit()) - sql.append("\nLimit ").append(query.getSelect().getLimit()); - - return sql.toString(); - } - - /* *************************** */ - /* ****** LIST & CLAUSE ****** */ - /* *************************** */ - @Override - public String translate(ADQLList list) throws TranslationException{ - if (list instanceof ClauseSelect) - return translate((ClauseSelect)list); - else if (list instanceof ClauseConstraints) - return translate((ClauseConstraints)list); - else - return getDefaultADQLList(list); - } - - /** - * Gets the default SQL output for a list of ADQL objects. - * - * @param list List to format into SQL. - * - * @return The corresponding SQL. - * - * @throws TranslationException If there is an error during the translation. - */ - protected String getDefaultADQLList(ADQLList list) throws TranslationException{ - String sql = (list.getName() == null) ? "" : (list.getName() + " "); - - boolean oldInSelect = inSelect; - inSelect = (list.getName() != null) && list.getName().equalsIgnoreCase("select"); - - try{ - for(int i = 0; i < list.size(); i++) - sql += ((i == 0) ? "" : (" " + list.getSeparator(i) + " ")) + translate(list.get(i)); - }finally{ - inSelect = oldInSelect; - } - - return sql; - } - @Override - public String translate(ClauseSelect clause) throws TranslationException{ - String sql = null; - - for(int i = 0; i < clause.size(); i++){ - if (i == 0){ - sql = clause.getName() + (clause.distinctColumns() ? " DISTINCT" : ""); - }else - sql += " " + clause.getSeparator(i); - - sql += " " + translate(clause.get(i)); - } - - return sql; - } - - @Override - public String translate(ClauseConstraints clause) throws TranslationException{ - if (clause instanceof ConstraintsGroup) - return "(" + getDefaultADQLList(clause) + ")"; - else - return getDefaultADQLList(clause); - } - - @Override - public String translate(SelectItem item) throws TranslationException{ - if (item instanceof SelectAllColumns) - return translate((SelectAllColumns)item); - - StringBuffer translation = new StringBuffer(translate(item.getOperand())); - if (item.hasAlias()){ - translation.append(" AS "); - appendIdentifier(translation, item.getAlias(), item.isCaseSensitive()); - }else - translation.append(" AS ").append(item.getName()); - - return translation.toString(); - } - - @Override - public String translate(SelectAllColumns item) throws TranslationException{ - HashMap mapAlias = new HashMap(); - - // Fetch the full list of columns to display: - Iterable dbCols = null; - if (item.getAdqlTable() != null && item.getAdqlTable().getDBLink() != null){ - ADQLTable table = item.getAdqlTable(); - dbCols = table.getDBLink(); - if (table.hasAlias()){ - String key = appendFullDBName(new StringBuffer(), table.getDBLink()).toString(); - mapAlias.put(key, table.isCaseSensitive(IdentifierField.ALIAS) ? ("\"" + table.getAlias() + "\"") : table.getAlias()); - } - }else if (item.getQuery() != null){ - try{ - dbCols = item.getQuery().getFrom().getDBColumns(); - }catch(UnresolvedJoin pe){ - throw new TranslationException("Due to a join problem, the ADQL to SQL translation can not be completed!", pe); - } - ArrayList tables = item.getQuery().getFrom().getTables(); - for(ADQLTable table : tables){ - if (table.hasAlias()){ - String key = appendFullDBName(new StringBuffer(), table.getDBLink()).toString(); - mapAlias.put(key, table.isCaseSensitive(IdentifierField.ALIAS) ? ("\"" + table.getAlias() + "\"") : table.getAlias()); - } - } - } - - // Write the DB name of all these columns: - if (dbCols != null){ - StringBuffer cols = new StringBuffer(); - for(DBColumn col : dbCols){ - if (cols.length() > 0) - cols.append(','); - if (col.getTable() != null){ - String fullDbName = appendFullDBName(new StringBuffer(), col.getTable()).toString(); - if (mapAlias.containsKey(fullDbName)) - appendIdentifier(cols, mapAlias.get(fullDbName), false).append('.'); - else - cols.append(fullDbName).append('.'); - } - appendIdentifier(cols, col.getDBName(), IdentifierField.COLUMN); - cols.append(" AS \"").append(col.getADQLName()).append('\"'); - } - return (cols.length() > 0) ? cols.toString() : item.toADQL(); - }else{ - return item.toADQL(); - } - } - - @Override - public String translate(ColumnReference ref) throws TranslationException{ - if (ref instanceof ADQLOrder) - return translate((ADQLOrder)ref); - else - return getDefaultColumnReference(ref); - } - - /** - * Gets the default SQL output for a column reference. - * - * @param ref The column reference to format into SQL. - * - * @return The corresponding SQL. - * - * @throws TranslationException If there is an error during the translation. - */ - protected String getDefaultColumnReference(ColumnReference ref) throws TranslationException{ - if (ref.isIndex()){ - return "" + ref.getColumnIndex(); - }else{ - if (ref.getDBLink() == null){ - return (ref.isCaseSensitive() ? ("\"" + ref.getColumnName() + "\"") : ref.getColumnName()); - }else{ - DBColumn dbCol = ref.getDBLink(); - StringBuffer colName = new StringBuffer(); - // Use the table alias if any: - if (ref.getAdqlTable() != null && ref.getAdqlTable().hasAlias()) - appendIdentifier(colName, ref.getAdqlTable().getAlias(), ref.getAdqlTable().isCaseSensitive(IdentifierField.ALIAS)).append('.'); - - // Use the DBTable if any: - else if (dbCol.getTable() != null) - appendFullDBName(colName, dbCol.getTable()).append('.'); - - appendIdentifier(colName, dbCol.getDBName(), IdentifierField.COLUMN); - - return colName.toString(); - } - } - } - - @Override - public String translate(ADQLOrder order) throws TranslationException{ - return getDefaultColumnReference(order) + (order.isDescSorting() ? " DESC" : " ASC"); - } - - /* ************************** */ - /* ****** TABLE & JOIN ****** */ - /* ************************** */ - @Override - public String translate(FromContent content) throws TranslationException{ - if (content instanceof ADQLTable) - return translate((ADQLTable)content); - else if (content instanceof ADQLJoin) - return translate((ADQLJoin)content); - else - return content.toADQL(); - } - - @Override - public String translate(ADQLTable table) throws TranslationException{ - StringBuffer sql = new StringBuffer(); - - // CASE: SUB-QUERY: - if (table.isSubQuery()) - sql.append('(').append(translate(table.getSubQuery())).append(')'); - - // CASE: TABLE REFERENCE: - else{ - // Use the corresponding DB table, if known: - if (table.getDBLink() != null) - appendFullDBName(sql, table.getDBLink()); - // Otherwise, use the whole table name given in the ADQL query: - else - sql.append(table.getFullTableName()); - } - - // Add the table alias, if any: - if (table.hasAlias()){ - sql.append(" AS "); - appendIdentifier(sql, table.getAlias(), table.isCaseSensitive(IdentifierField.ALIAS)); - } - - return sql.toString(); - } - - @Override - public String translate(ADQLJoin join) throws TranslationException{ - StringBuffer sql = new StringBuffer(translate(join.getLeftTable())); - - if (join.isNatural()) - sql.append(" NATURAL"); - - sql.append(' ').append(join.getJoinType()).append(' ').append(translate(join.getRightTable())).append(' '); - - if (!join.isNatural()){ - if (join.getJoinCondition() != null) - sql.append(translate(join.getJoinCondition())); - else if (join.hasJoinedColumns()){ - StringBuffer cols = new StringBuffer(); - Iterator it = join.getJoinedColumns(); - while(it.hasNext()){ - ADQLColumn item = it.next(); - if (cols.length() > 0) - cols.append(", "); - if (item.getDBLink() == null) - appendIdentifier(cols, item.getColumnName(), item.isCaseSensitive(IdentifierField.COLUMN)); - else - appendIdentifier(cols, item.getDBLink().getDBName(), IdentifierField.COLUMN); - } - sql.append("USING (").append(cols).append(')'); - } - } - - return sql.toString(); - } - - /* ********************* */ - /* ****** OPERAND ****** */ - /* ********************* */ - @Override - public String translate(ADQLOperand op) throws TranslationException{ - if (op instanceof ADQLColumn) - return translate((ADQLColumn)op); - else if (op instanceof Concatenation) - return translate((Concatenation)op); - else if (op instanceof NegativeOperand) - return translate((NegativeOperand)op); - else if (op instanceof NumericConstant) - return translate((NumericConstant)op); - else if (op instanceof StringConstant) - return translate((StringConstant)op); - else if (op instanceof WrappedOperand) - return translate((WrappedOperand)op); - else if (op instanceof Operation) - return translate((Operation)op); - else if (op instanceof ADQLFunction) - return translate((ADQLFunction)op); - else - return op.toADQL(); - } - - @Override - public String translate(ADQLColumn column) throws TranslationException{ - // Use its DB name if known: - if (column.getDBLink() != null){ - DBColumn dbCol = column.getDBLink(); - StringBuffer colName = new StringBuffer(); - // Use the table alias if any: - if (column.getAdqlTable() != null && column.getAdqlTable().hasAlias()) - appendIdentifier(colName, column.getAdqlTable().getAlias(), column.getAdqlTable().isCaseSensitive(IdentifierField.ALIAS)).append('.'); - - // Use the DBTable if any: - else if (dbCol.getTable() != null && dbCol.getTable().getDBName() != null) - appendFullDBName(colName, dbCol.getTable()).append('.'); - - // Otherwise, use the prefix of the column given in the ADQL query: - else if (column.getTableName() != null) - colName = column.getFullColumnPrefix().append('.'); - - appendIdentifier(colName, dbCol.getDBName(), IdentifierField.COLUMN); - - return colName.toString(); - } - // Otherwise, use the whole name given in the ADQL query: - else - return column.getFullColumnName(); - } - - @Override - public String translate(Concatenation concat) throws TranslationException{ - return translate((ADQLList)concat); - } - - @Override - public String translate(NegativeOperand negOp) throws TranslationException{ - return "-" + translate(negOp.getOperand()); - } - - @Override - public String translate(NumericConstant numConst) throws TranslationException{ - return numConst.getValue(); + public boolean isCaseSensitive(final IdentifierField field){ + return field == null ? false : field.isCaseSensitive(caseSensitivity); } @Override public String translate(StringConstant strConst) throws TranslationException{ - return "'" + strConst.getValue() + "'"; - } - - @Override - public String translate(WrappedOperand op) throws TranslationException{ - return "(" + translate(op.getOperand()) + ")"; - } - - @Override - public String translate(Operation op) throws TranslationException{ - return translate(op.getLeftOperand()) + op.getOperation().toADQL() + translate(op.getRightOperand()); - } - - /* ************************ */ - /* ****** CONSTRAINT ****** */ - /* ************************ */ - @Override - public String translate(ADQLConstraint cons) throws TranslationException{ - if (cons instanceof Comparison) - return translate((Comparison)cons); - else if (cons instanceof Between) - return translate((Between)cons); - else if (cons instanceof Exists) - return translate((Exists)cons); - else if (cons instanceof In) - return translate((In)cons); - else if (cons instanceof IsNull) - return translate((IsNull)cons); - else if (cons instanceof NotConstraint) - return translate((NotConstraint)cons); - else - return cons.toADQL(); - } - - @Override - public String translate(Comparison comp) throws TranslationException{ - return translate(comp.getLeftOperand()) + " " + comp.getOperator().toADQL() + " " + translate(comp.getRightOperand()); - } - - @Override - public String translate(Between comp) throws TranslationException{ - return translate(comp.getLeftOperand()) + " BETWEEN " + translate(comp.getMinOperand()) + " AND " + translate(comp.getMaxOperand()); - } - - @Override - public String translate(Exists exists) throws TranslationException{ - return "EXISTS(" + translate(exists.getSubQuery()) + ")"; - } - - @Override - public String translate(In in) throws TranslationException{ - return translate(in.getOperand()) + " " + in.getName() + " (" + (in.hasSubQuery() ? translate(in.getSubQuery()) : translate(in.getValuesList())) + ")"; - } - - @Override - public String translate(IsNull isNull) throws TranslationException{ - return translate(isNull.getColumn()) + " IS " + (isNull.isNotNull() ? "NOT " : "") + "NULL"; - } - - @Override - public String translate(NotConstraint notCons) throws TranslationException{ - return "NOT " + translate(notCons.getConstraint()); - } - - /* *********************** */ - /* ****** FUNCTIONS ****** */ - /* *********************** */ - @Override - public String translate(ADQLFunction fct) throws TranslationException{ - if (fct instanceof GeometryFunction) - return translate((GeometryFunction)fct); - else if (fct instanceof MathFunction) - return translate((MathFunction)fct); - else if (fct instanceof SQLFunction) - return translate((SQLFunction)fct); - else if (fct instanceof UserDefinedFunction) - return translate((UserDefinedFunction)fct); + // Deal with the special escaping syntax of Postgres: + /* A string containing characters to escape must be prefixed by an E. + * Without this prefix, Potsgres does not escape the concerned characters and + * consider backslashes as normal characters. + * For instance: E'foo\tfoo2'. */ + if (strConst.getValue() != null && strConst.getValue().contains("\\")) + return "E'" + strConst.getValue() + "'"; else - return getDefaultADQLFunction(fct); - } - - /** - * Gets the default SQL output for the given ADQL function. - * - * @param fct The ADQL function to format into SQL. - * - * @return The corresponding SQL. - * - * @throws TranslationException If there is an error during the translation. - */ - protected String getDefaultADQLFunction(ADQLFunction fct) throws TranslationException{ - String sql = fct.getName() + "("; - - for(int i = 0; i < fct.getNbParameters(); i++) - sql += ((i == 0) ? "" : ", ") + translate(fct.getParameter(i)); - - return sql + ")"; - } - - @Override - public String translate(SQLFunction fct) throws TranslationException{ - if (fct.getType() == SQLFunctionType.COUNT_ALL) - return "COUNT(" + (fct.isDistinct() ? "DISTINCT " : "") + "*)"; - else - return fct.getName() + "(" + (fct.isDistinct() ? "DISTINCT " : "") + translate(fct.getParameter(0)) + ")"; + return super.translate(strConst); } @Override @@ -658,125 +132,168 @@ public class PostgreSQLTranslator implements ADQLTranslator { } } - @Override - public String translate(UserDefinedFunction fct) throws TranslationException{ - return getDefaultADQLFunction(fct); - } - - /* *********************************** */ - /* ****** GEOMETRICAL FUNCTIONS ****** */ - /* *********************************** */ - @Override - public String translate(GeometryFunction fct) throws TranslationException{ - if (fct instanceof AreaFunction) - return translate((AreaFunction)fct); - else if (fct instanceof BoxFunction) - return translate((BoxFunction)fct); - else if (fct instanceof CentroidFunction) - return translate((CentroidFunction)fct); - else if (fct instanceof CircleFunction) - return translate((CircleFunction)fct); - else if (fct instanceof ContainsFunction) - return translate((ContainsFunction)fct); - else if (fct instanceof DistanceFunction) - return translate((DistanceFunction)fct); - else if (fct instanceof ExtractCoord) - return translate((ExtractCoord)fct); - else if (fct instanceof ExtractCoordSys) - return translate((ExtractCoordSys)fct); - else if (fct instanceof IntersectsFunction) - return translate((IntersectsFunction)fct); - else if (fct instanceof PointFunction) - return translate((PointFunction)fct); - else if (fct instanceof PolygonFunction) - return translate((PolygonFunction)fct); - else if (fct instanceof RegionFunction) - return translate((RegionFunction)fct); - else - return getDefaultGeometryFunction(fct); - } - - /** - *

Gets the default SQL output for the given geometrical function.

- * - *

Note: By default, only the ADQL serialization is returned.

- * - * @param fct The geometrical function to translate. - * - * @return The corresponding SQL. - * - * @throws TranslationException If there is an error during the translation. - */ - protected String getDefaultGeometryFunction(GeometryFunction fct) throws TranslationException{ - if (inSelect) - return "'" + fct.toADQL().replaceAll("'", "''") + "'"; - else - return getDefaultADQLFunction(fct); - } - - @Override - public String translate(GeometryValue geomValue) throws TranslationException{ - return translate(geomValue.getValue()); - } - @Override public String translate(ExtractCoord extractCoord) throws TranslationException{ - return getDefaultGeometryFunction(extractCoord); + return getDefaultADQLFunction(extractCoord); } @Override public String translate(ExtractCoordSys extractCoordSys) throws TranslationException{ - return getDefaultGeometryFunction(extractCoordSys); + return getDefaultADQLFunction(extractCoordSys); } @Override public String translate(AreaFunction areaFunction) throws TranslationException{ - return getDefaultGeometryFunction(areaFunction); + return getDefaultADQLFunction(areaFunction); } @Override public String translate(CentroidFunction centroidFunction) throws TranslationException{ - return getDefaultGeometryFunction(centroidFunction); + return getDefaultADQLFunction(centroidFunction); } @Override public String translate(DistanceFunction fct) throws TranslationException{ - return getDefaultGeometryFunction(fct); + return getDefaultADQLFunction(fct); } @Override public String translate(ContainsFunction fct) throws TranslationException{ - return getDefaultGeometryFunction(fct); + return getDefaultADQLFunction(fct); } @Override public String translate(IntersectsFunction fct) throws TranslationException{ - return getDefaultGeometryFunction(fct); + return getDefaultADQLFunction(fct); } @Override public String translate(BoxFunction box) throws TranslationException{ - return getDefaultGeometryFunction(box); + return getDefaultADQLFunction(box); } @Override public String translate(CircleFunction circle) throws TranslationException{ - return getDefaultGeometryFunction(circle); + return getDefaultADQLFunction(circle); } @Override public String translate(PointFunction point) throws TranslationException{ - return getDefaultGeometryFunction(point); + return getDefaultADQLFunction(point); } @Override public String translate(PolygonFunction polygon) throws TranslationException{ - return getDefaultGeometryFunction(polygon); + return getDefaultADQLFunction(polygon); } @Override public String translate(RegionFunction region) throws TranslationException{ - return getDefaultGeometryFunction(region); + return getDefaultADQLFunction(region); + } + + @Override + public DBType convertTypeFromDB(final int dbmsType, final String rawDbmsTypeName, String dbmsTypeName, final String[] params){ + // If no type is provided return VARCHAR: + if (dbmsTypeName == null || dbmsTypeName.trim().length() == 0) + return new DBType(DBDatatype.VARCHAR, DBType.NO_LENGTH); + + // Put the dbmsTypeName in lower case for the following comparisons: + dbmsTypeName = dbmsTypeName.toLowerCase(); + + // Extract the length parameter (always the first one): + int lengthParam = DBType.NO_LENGTH; + if (params != null && params.length > 0){ + try{ + lengthParam = Integer.parseInt(params[0]); + }catch(NumberFormatException nfe){} + } + + // SMALLINT + if (dbmsTypeName.equals("smallint") || dbmsTypeName.equals("int2") || dbmsTypeName.equals("smallserial") || dbmsTypeName.equals("serial2") || dbmsTypeName.equals("boolean") || dbmsTypeName.equals("bool")) + return new DBType(DBDatatype.SMALLINT); + // INTEGER + else if (dbmsTypeName.equals("integer") || dbmsTypeName.equals("int") || dbmsTypeName.equals("int4") || dbmsTypeName.equals("serial") || dbmsTypeName.equals("serial4")) + return new DBType(DBDatatype.INTEGER); + // BIGINT + else if (dbmsTypeName.equals("bigint") || dbmsTypeName.equals("int8") || dbmsTypeName.equals("bigserial") || dbmsTypeName.equals("bigserial8")) + return new DBType(DBDatatype.BIGINT); + // REAL + else if (dbmsTypeName.equals("real") || dbmsTypeName.equals("float4")) + return new DBType(DBDatatype.REAL); + // DOUBLE + else if (dbmsTypeName.equals("double precision") || dbmsTypeName.equals("float8")) + return new DBType(DBDatatype.DOUBLE); + // BINARY + else if (dbmsTypeName.equals("bit")) + return new DBType(DBDatatype.BINARY, lengthParam); + // VARBINARY + else if (dbmsTypeName.equals("bit varying") || dbmsTypeName.equals("varbit")) + return new DBType(DBDatatype.VARBINARY, lengthParam); + // CHAR + else if (dbmsTypeName.equals("char") || dbmsTypeName.equals("character")) + return new DBType(DBDatatype.CHAR, lengthParam); + // VARCHAR + else if (dbmsTypeName.equals("varchar") || dbmsTypeName.equals("character varying")) + return new DBType(DBDatatype.VARCHAR, lengthParam); + // BLOB + else if (dbmsTypeName.equals("bytea")) + return new DBType(DBDatatype.BLOB); + // CLOB + else if (dbmsTypeName.equals("text")) + return new DBType(DBDatatype.CLOB); + // TIMESTAMP + else if (dbmsTypeName.equals("timestamp") || dbmsTypeName.equals("timestamptz") || dbmsTypeName.equals("time") || dbmsTypeName.equals("timetz") || dbmsTypeName.equals("date")) + return new DBType(DBDatatype.TIMESTAMP); + // Default: + else + return new DBType(DBDatatype.VARCHAR, DBType.NO_LENGTH); + } + + @Override + public String convertTypeToDB(final DBType type){ + if (type == null) + return "VARCHAR"; + + switch(type.type){ + + case SMALLINT: + case INTEGER: + case REAL: + case BIGINT: + case CHAR: + case VARCHAR: + case TIMESTAMP: + return type.type.toString(); + + case DOUBLE: + return "DOUBLE PRECISION"; + + case BINARY: + case VARBINARY: + return "bytea"; + + case BLOB: + return "bytea"; + + case CLOB: + return "TEXT"; + + case POINT: + case REGION: + default: + return "VARCHAR"; + } + } + + @Override + public Region translateGeometryFromDB(final Object jdbcColValue) throws ParseException{ + throw new ParseException("Unsupported geometrical value! The value \"" + jdbcColValue + "\" can not be parsed as a region."); + } + + @Override + public Object translateGeometryToDB(final Region region) throws ParseException{ + throw new ParseException("Geometries can not be uploaded in the database in this implementation!"); } } diff --git a/src/cds/utils/TextualSearchList.java b/src/cds/utils/TextualSearchList.java index dfbff861dff639e2e287660ad31c36893d57ac73..3dd9888b0f0f866f36f2a590c9cf09dcc2d78b4c 100644 --- a/src/cds/utils/TextualSearchList.java +++ b/src/cds/utils/TextualSearchList.java @@ -17,7 +17,7 @@ package cds.utils; * along with ADQLLibrary. If not, see . * * Copyright 2012-2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomisches Rechen Institute (ARI) + * Astronomisches Rechen Institut (ARI) */ import java.util.ArrayList; diff --git a/src/org/json/Json4Uws.java b/src/org/json/Json4Uws.java index bfa25e3391b8b4958242832a0be8cb3b7d4507ff..9e84863d5585d672a5166d1422d4473b2c3287a2 100644 --- a/src/org/json/Json4Uws.java +++ b/src/org/json/Json4Uws.java @@ -16,26 +16,26 @@ package org.json; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.util.Iterator; +import uws.ISO8601Format; import uws.job.ErrorSummary; import uws.job.JobList; import uws.job.Result; import uws.job.UWSJob; - import uws.job.user.JobOwner; - import uws.service.UWS; import uws.service.UWSUrl; /** * Useful conversion functions from UWS to JSON. * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (12/2014) */ public final class Json4Uws { @@ -130,11 +130,11 @@ public final class Json4Uws { json.put(UWSJob.PARAM_OWNER, job.getOwner().getPseudo()); json.put(UWSJob.PARAM_QUOTE, job.getQuote()); if (job.getStartTime() != null) - json.put(UWSJob.PARAM_START_TIME, UWSJob.dateFormat.format(job.getStartTime())); + json.put(UWSJob.PARAM_START_TIME, ISO8601Format.format(job.getStartTime())); if (job.getEndTime() != null) - json.put(UWSJob.PARAM_END_TIME, UWSJob.dateFormat.format(job.getEndTime())); + json.put(UWSJob.PARAM_END_TIME, ISO8601Format.format(job.getEndTime())); if (job.getDestructionTime() != null) - json.put(UWSJob.PARAM_DESTRUCTION_TIME, UWSJob.dateFormat.format(job.getDestructionTime())); + json.put(UWSJob.PARAM_DESTRUCTION_TIME, ISO8601Format.format(job.getDestructionTime())); json.put(UWSJob.PARAM_EXECUTION_DURATION, job.getExecutionDuration()); json.put(UWSJob.PARAM_PARAMETERS, getJobParamsJson(job)); json.put(UWSJob.PARAM_RESULTS, getJobResultsJson(job)); @@ -153,8 +153,23 @@ public final class Json4Uws { public final static JSONObject getJobParamsJson(final UWSJob job) throws JSONException{ JSONObject json = new JSONObject(); if (job != null){ - for(String name : job.getAdditionalParameters()) - json.put(name, job.getAdditionalParameterValue(name)); + Object val; + for(String name : job.getAdditionalParameters()){ + // get the raw value: + val = job.getAdditionalParameterValue(name); + // if an array, build a JSON array of strings: + if (val != null && val.getClass().isArray()){ + JSONArray array = new JSONArray(); + for(Object o : (Object[])val){ + if (o != null) + array.put(o.toString()); + } + json.put(name, array); + } + // otherwise, just put the value: + else + json.put(name, val); + } } return json; } diff --git a/src/tap/ADQLExecutor.java b/src/tap/ADQLExecutor.java index 6b70b0cfcd6ec1c04531de85cf26b3a625b3870d..27ec125ac116d6d36c8cb72b14bd871ba2970214 100644 --- a/src/tap/ADQLExecutor.java +++ b/src/tap/ADQLExecutor.java @@ -16,141 +16,203 @@ package tap; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012-2013 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomisches Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; import java.io.OutputStream; -import java.sql.SQLException; import javax.servlet.http.HttpServletResponse; +import tap.data.DataReadException; +import tap.data.TableIterator; import tap.db.DBConnection; import tap.db.DBException; import tap.formatter.OutputFormat; import tap.log.TAPLog; import tap.metadata.TAPSchema; import tap.metadata.TAPTable; +import tap.parameters.DALIUpload; import tap.parameters.TAPParameters; -import tap.upload.TableLoader; +import tap.upload.Uploader; import uws.UWSException; +import uws.UWSToolBox; import uws.job.JobThread; import uws.job.Result; +import uws.service.log.UWSLog.LogLevel; import adql.parser.ADQLParser; import adql.parser.ADQLQueryFactory; import adql.parser.ParseException; -import adql.parser.QueryChecker; import adql.query.ADQLQuery; -import adql.translator.ADQLTranslator; -import adql.translator.TranslationException; /** + *

Let process completely an ADQL query.

* + *

Thus, this class aims to apply the following actions (in the given order):

+ *
    + *
  1. Upload the user tables, if any
  2. + *
  3. Parse the ADQL query (and so, transform it in an object tree)
  4. + *
  5. Execute it in the "database"
  6. + *
  7. Format and write the result
  8. + *
  9. Drop all uploaded tables from the "database"
  10. + *
* - * @author Grégory Mantelet (CDS;ARI) - gmantele@ari.uni-heidelberg.de - * @version 1.1 (12/2013) + *

Job execution mode

* - * @param + *

+ * This executor is able to process queries coming from a synchronous job (the result must be written directly in the HTTP response) + * and from an asynchronous job (the result must be written, generally, in a file). Two start(...) functions let deal with + * the differences between the two job execution modes: {@link #start(AsyncThread)} for asynchronous jobs + * and {@link #start(Thread, String, TAPParameters, HttpServletResponse)} for synchronous jobs. + *

+ * + *

Input/Output formats

+ * + *

Uploaded tables must be provided in VOTable format.

+ * + *

+ * Query results must be formatted in the format specified by the user in the job parameters. A corresponding formatter ({@link OutputFormat}) + * is asked to the description of the TAP service ({@link ServiceConnection}). If none can be found, VOTable will be chosen by default. + *

+ * + *

Executor customization

+ * + *

It is totally possible to customize some parts of the ADQL query processing. However, the main algorithm must remain the same and is implemented + * by {@link #start()}. This function is final, like {@link #start(AsyncThread)} and {@link #start(Thread, String, TAPParameters, HttpServletResponse)}, + * which are just preparing the execution for {@link #start()} in function of the job execution mode (asynchronous or synchronous). + *

+ * + *

Note: + * {@link #start()} is using the Template Method Design Pattern: it defines the skeleton/algorithm of the processing, and defers some steps + * to other functions. + *

+ * + *

+ * So, you are able to customize almost all individual steps of the ADQL query processing: {@link #parseADQL()}, {@link #executeADQL(ADQLQuery)} and + * {@link #writeResult(TableIterator, OutputFormat, OutputStream)}. + *

+ * + *

Note: + * Note that the formatting of the result is done by an OutputFormat and that the executor is just calling the appropriate function of the formatter. + *

+ * + *

+ * There is no way in this executor to customize the upload. However, it does not mean it can not be customized. + * Indeed you can do it easily by extending {@link Uploader} and by providing the new class inside your {@link TAPFactory} implementation + * (see {@link TAPFactory#createUploader(DBConnection)}). + *

+ * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (04/2015) */ -public class ADQLExecutor< R > { +public class ADQLExecutor { - protected final ServiceConnection service; + /** Description of the current TAP service. */ + protected final ServiceConnection service; + /** The logger to use. */ protected final TAPLog logger; + /** The thread which is using this executor. */ protected Thread thread; + /** List of all TAP parameters needed for the query execution (and particularly the ADQL query itself). */ protected TAPParameters tapParams; + /** Description of the ADQL schema containing all the tables uploaded by the user for this specific query execution. + * Note: This attribute is NULL before calling one of the start(...) function. It MAY be NULL also after if no table has been uploaded. */ + protected TAPSchema uploadSchema = null; + + /** The HTTP response in which the query execution must be written. This attribute is NULL if the execution is asynchronous. */ protected HttpServletResponse response; + /** The execution report to fill gradually while the processing of the query. + * Note: This attribute is NULL before calling one of the start(...) function, but it will never be after this call. */ protected TAPExecutionReport report; - private DBConnection dbConn = null; - protected TAPSchema uploadSchema = null; - - public ADQLExecutor(final ServiceConnection service){ + /** Connection to the "database". + * Note: This attribute is NULL before and after the query processing (= call of a start(...) function). */ + private DBConnection dbConn = null; + /** ID of the current query processing step (uploading, parsing, execution, writing result, ...). + * Note: This attribute is NULL before and after the query processing (= call of a start(...) function). */ + private ExecutionProgression progression = null; + /** Date/Time at which the current query processing step has started. */ + private long startStep = -1; + + /** + * Build an {@link ADQLExecutor}. + * + * @param service The description of the TAP service. + */ + public ADQLExecutor(final ServiceConnection service){ this.service = service; this.logger = service.getLogger(); } + /** + * Get the logger used by this executor. + * + * @return The used logger. + */ public final TAPLog getLogger(){ return logger; } + /** + *

Get the report of the query execution. It helps indicating the execution progression and the duration of each step.

+ * + *

Note: + * Before starting the execution (= before the call of a "start(...)" function), this function will return NULL. + * It is set when the query processing starts and remains not NULL after that (even when the execution is finished). + *

+ * + * @return The execution report. + */ public final TAPExecutionReport getExecReport(){ return report; } - public boolean hasUploadedTables(){ - return (uploadSchema != null) && (uploadSchema.getNbTables() > 0); - } - - protected final DBConnection getDBConnection() throws TAPException{ - return (dbConn != null) ? dbConn : (dbConn = service.getFactory().createDBConnection((report != null) ? report.jobID : null)); - } - - public final void closeDBConnection() throws TAPException{ - if (dbConn != null){ - dbConn.close(); - dbConn = null; - } - } - - private final void uploadTables() throws TAPException{ - TableLoader[] tables = tapParams.getTableLoaders(); - if (tables.length > 0){ - logger.info("JOB " + report.jobID + "\tLoading uploaded tables (" + tables.length + ")..."); - long start = System.currentTimeMillis(); - try{ - /* TODO Problem with the DBConnection! One is created here for the Uploader (and dbConn is set) and closed by its uploadTables function (but dbConn is not set to null). - * Ideally, the connection should not be close, or at least dbConn should be set to null just after. */ - uploadSchema = service.getFactory().createUploader(getDBConnection()).upload(tables); - }finally{ - TAPParameters.deleteUploadedTables(tables); - report.setDuration(ExecutionProgression.UPLOADING, System.currentTimeMillis() - start); - } - } - - } - - private final R executeADQL() throws ParseException, InterruptedException, TranslationException, SQLException, TAPException, UWSException{ - long start; - - tapParams.set(TAPJob.PARAM_PROGRESSION, ExecutionProgression.PARSING); - start = System.currentTimeMillis(); - ADQLQuery adql = parseADQL(); - report.setDuration(ExecutionProgression.PARSING, System.currentTimeMillis() - start); - - if (thread.isInterrupted()) - throw new InterruptedException(); - - report.resultingColumns = adql.getResultingColumns(); - - final int limit = adql.getSelect().getLimit(); - final Integer maxRec = tapParams.getMaxRec(); - if (maxRec != null && maxRec > -1){ - if (limit <= -1 || limit > maxRec) - adql.getSelect().setLimit(maxRec + 1); - } - - tapParams.set(TAPJob.PARAM_PROGRESSION, ExecutionProgression.TRANSLATING); - start = System.currentTimeMillis(); - String sqlQuery = translateADQL(adql); - report.setDuration(ExecutionProgression.TRANSLATING, System.currentTimeMillis() - start); - report.sqlTranslation = sqlQuery; - - if (thread.isInterrupted()) - throw new InterruptedException(); + /** + *

Get the object to use in order to write the query result in the appropriate format + * (either the asked one, or else VOTable).

+ * + * @return The appropriate result formatter to use. Can not be NULL! + * + * @throws TAPException If no format corresponds to the asked one and if no default format (for VOTable) can be found. + * + * @see ServiceConnection#getOutputFormat(String) + */ + protected OutputFormat getFormatter() throws TAPException{ + // Search for the corresponding formatter: + String format = tapParams.getFormat(); + OutputFormat formatter = service.getOutputFormat((format == null) ? "votable" : format); + if (format != null && formatter == null) + formatter = service.getOutputFormat("votable"); - tapParams.set(TAPJob.PARAM_PROGRESSION, ExecutionProgression.EXECUTING_SQL); - start = System.currentTimeMillis(); - R result = executeQuery(sqlQuery, adql); - report.setDuration(ExecutionProgression.EXECUTING_SQL, System.currentTimeMillis() - start); + // Format the result: + if (formatter == null) + throw new TAPException("Impossible to format the query result: no formatter has been found for the given MIME type \"" + format + "\" and for the default MIME type \"votable\" (short form) !"); - return result; + return formatter; } - public final TAPExecutionReport start(final AsyncThread thread) throws TAPException, UWSException, InterruptedException, ParseException, TranslationException, SQLException{ + /** + *

Start the asynchronous processing of the ADQL query.

+ * + *

+ * This function initialize the execution report, get the execution parameters (including the query to process) + * and then call {@link #start()}. + *

+ * + * @param thread The asynchronous thread which asks the query processing. + * + * @return The resulting execution report. + * + * @throws UWSException If any error occurs while executing the ADQL query. + * @throws InterruptedException If the job has been interrupted (by the user or a time-out). + * + * @see #start() + */ + public final TAPExecutionReport start(final AsyncThread thread) throws UWSException, InterruptedException{ if (this.thread != null || this.report != null) - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "This ADQLExecutor has already been executed !"); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "This ADQLExecutor has already been executed!"); this.thread = thread; @@ -159,164 +221,480 @@ public class ADQLExecutor< R > { this.report = new TAPExecutionReport(tapJob.getJobId(), false, tapParams); this.response = null; - return start(); + try{ + return start(); + }catch(IOException ioe){ + throw new UWSException(ioe); + }catch(TAPException te){ + throw new UWSException(te.getHttpErrorCode(), te); + } + } + + /** + *

Create the database connection required for the ADQL execution.

+ * + *

Note: This function has no effect if the DB connection already exists.

+ * + * @param jobID ID of the job which will be executed by this {@link ADQLExecutor}. + * This ID will be the database connection ID. + * + * @throws TAPException If the DB connection creation fails. + * + * @see TAPFactory#getConnection(String) + * + * @since 2.0 + */ + public final void initDBConnection(final String jobID) throws TAPException{ + if (dbConn == null) + dbConn = service.getFactory().getConnection(jobID); } - public final TAPExecutionReport start(final Thread thread, final String jobId, final TAPParameters params, final HttpServletResponse response) throws TAPException, UWSException, InterruptedException, ParseException, TranslationException, SQLException{ + /** + *

Start the synchronous processing of the ADQL query.

+ * + *

This function initialize the execution report and then call {@link #start()}.

+ * + * @param thread The synchronous thread which asks the query processing. + * @param jobId ID of the corresponding job. + * @param params All execution parameters (including the query to process). + * @param response Object in which the result or the error must be written. + * + * @return The resulting execution report. + * + * @throws TAPException If any error occurs while executing the ADQL query. + * @throws IOException If any error occurs while writing the result in the given {@link HttpServletResponse}. + * @throws InterruptedException If the job has been interrupted (by the user or a time-out). + * + * @see #start() + */ + public final TAPExecutionReport start(final Thread thread, final String jobId, final TAPParameters params, final HttpServletResponse response) throws TAPException, IOException, InterruptedException{ if (this.thread != null || this.report != null) - throw new TAPException("This ADQLExecutor has already been executed !"); + throw new TAPException("This ADQLExecutor has already been executed!"); this.thread = thread; this.tapParams = params; this.report = new TAPExecutionReport(jobId, true, tapParams); this.response = response; - return start(); + try{ + return start(); + }catch(UWSException ue){ + throw new TAPException(ue, ue.getHttpErrorCode()); + } } - protected final TAPExecutionReport start() throws TAPException, UWSException, InterruptedException, ParseException, TranslationException, SQLException{ + /** + *

Process the ADQL query.

+ * + *

This function calls the following function (in the same order):

+ *
    + *
  1. {@link TAPFactory#getConnection(String)}
  2. + *
  3. {@link #uploadTables()}
  4. + *
  5. {@link #parseADQL()}
  6. + *
  7. {@link #executeADQL(ADQLQuery)}
  8. + *
  9. {@link #writeResult(TableIterator)}
  10. + *
  11. {@link #dropUploadedTables()}
  12. + *
  13. {@link TAPFactory#freeConnection(DBConnection)}
  14. + *
+ * + *

+ * The execution report is updated gradually. Besides a job parameter - progression - is set at each step of the process in order to + * notify the user of the progression of the query execution. This parameter is removed at the end of the execution if it is successful. + *

+ * + *

The "interrupted" flag of the associated thread is often tested so that stopping the execution as soon as possible.

+ * + * @return The updated execution report. + * + * @throws TAPException If any error occurs while executing the ADQL query. + * @throws UWSException If any error occurs while executing the ADQL query. + * @throws IOException If an error happens while writing the result in the specified {@link HttpServletResponse}. + * That kind of error can be thrown only in synchronous mode. + * In asynchronous, the error is stored as job error report and is never propagated. + * @throws InterruptedException If the job has been interrupted (by the user or a time-out). + */ + protected final TAPExecutionReport start() throws TAPException, UWSException, IOException, InterruptedException{ + logger.logTAP(LogLevel.INFO, report, "START_EXEC", (report.synchronous ? "Synchronous" : "Asynchronous") + " execution of an ADQL query STARTED.", null); + + // Save the start time (for reporting usage): long start = System.currentTimeMillis(); + + TableIterator queryResult = null; + try{ - // Upload tables if needed: - if (tapParams != null && tapParams.getTableLoaders() != null && tapParams.getTableLoaders().length > 0){ - tapParams.set(TAPJob.PARAM_PROGRESSION, ExecutionProgression.UPLOADING); + // Get a "database" connection: + initDBConnection(report.jobID); + + // 1. UPLOAD TABLES, if there is any: + if (tapParams.getUploadedTables() != null && tapParams.getUploadedTables().length > 0){ + startStep(ExecutionProgression.UPLOADING); uploadTables(); + endStep(); } if (thread.isInterrupted()) throw new InterruptedException(); - // Parse, translate in SQL and execute the ADQL query: - R queryResult = executeADQL(); - if (queryResult == null || thread.isInterrupted()) + // 2. PARSE THE ADQL QUERY: + startStep(ExecutionProgression.PARSING); + // Parse the query: + ADQLQuery adqlQuery = null; + try{ + adqlQuery = parseADQL(); + }catch(ParseException pe){ + if (report.synchronous) + throw new TAPException("Incorrect ADQL query: " + pe.getMessage(), pe, UWSException.BAD_REQUEST, tapParams.getQuery(), progression); + else + throw new UWSException(UWSException.BAD_REQUEST, pe, "Incorrect ADQL query: " + pe.getMessage()); + } + // List all resulting columns (it will be useful later to format the result): + report.resultingColumns = adqlQuery.getResultingColumns(); + endStep(); + + if (thread.isInterrupted()) throw new InterruptedException(); - // Write the result: - tapParams.set(TAPJob.PARAM_PROGRESSION, ExecutionProgression.WRITING_RESULT); - writeResult(queryResult); + // 3. EXECUTE THE ADQL QUERY: + startStep(ExecutionProgression.EXECUTING_ADQL); + queryResult = executeADQL(adqlQuery); + endStep(); - logger.info("JOB " + report.jobID + " COMPLETED"); - tapParams.set(TAPJob.PARAM_PROGRESSION, ExecutionProgression.FINISHED); + if (thread.isInterrupted()) + throw new InterruptedException(); + // 4. WRITE RESULT: + startStep(ExecutionProgression.WRITING_RESULT); + writeResult(queryResult); + endStep(); + + // Report the COMPLETED status: + tapParams.remove(TAPJob.PARAM_PROGRESSION); report.success = true; + // Set the total duration in the report: + report.setTotalDuration(System.currentTimeMillis() - start); + + // Log and report the end of this execution: + logger.logTAP(LogLevel.INFO, report, "END_EXEC", "ADQL query execution finished.", null); + return report; - }catch(NullPointerException npe){ - npe.printStackTrace(); - throw npe; }finally{ + // Close the result if any: + if (queryResult != null){ + try{ + queryResult.close(); + }catch(DataReadException dre){ + logger.logTAP(LogLevel.WARNING, report, "END_EXEC", "Can not close the database query result!", dre); + } + } + + // Drop all the uploaded tables (they are not supposed to exist after the query execution): try{ dropUploadedTables(); }catch(TAPException e){ - logger.error("JOB " + report.jobID + "\tCan not drop uploaded tables !", e); + logger.logTAP(LogLevel.WARNING, report, "END_EXEC", "Can not drop the uploaded tables from the database!", e); } - try{ - closeDBConnection(); - }catch(TAPException e){ - logger.error("JOB " + report.jobID + "\tCan not close the DB connection !", e); + + // Free the connection (so that giving it back to a pool, if any, otherwise, just free resources): + if (dbConn != null){ + service.getFactory().freeConnection(dbConn); + dbConn = null; } - report.setTotalDuration(System.currentTimeMillis() - start); - logger.queryFinished(report); } } - protected ADQLQuery parseADQL() throws ParseException, InterruptedException, TAPException{ - ADQLQueryFactory queryFactory = service.getFactory().createQueryFactory(); - QueryChecker queryChecker = service.getFactory().createQueryChecker(uploadSchema); - ADQLParser parser; - if (queryFactory == null) - parser = new ADQLParser(queryChecker); - else - parser = new ADQLParser(queryChecker, queryFactory); - parser.setCoordinateSystems(service.getCoordinateSystems()); - parser.setDebug(false); - //logger.info("Job "+report.jobID+" - 1/5 Parsing ADQL...."); - return parser.parseQuery(tapParams.getQuery()); + /** + *

Memorize the time at which the step starts, the step ID and update the job parameter "progression" + * (to notify the user about the progression of the query processing).

+ * + *

Note: + * If for some reason the job parameter "progression" can not be updated, no error will be thrown. A WARNING message + * will be just written in the log. + *

+ * + *

Note: + * This function is designed to work with {@link #endStep()}, which must be called after it, when the step is finished (successfully or not). + *

+ * + * @param progression ID of the starting step. + * + * @see #endStep() + */ + private void startStep(final ExecutionProgression progression){ + // Save the start time (for report usage): + startStep = System.currentTimeMillis(); + // Memorize the current step: + this.progression = progression; + // Update the job parameter "progression", to notify the user about the progression of the query processing: + try{ + tapParams.set(TAPJob.PARAM_PROGRESSION, this.progression); + }catch(UWSException ue){ + // should not happen, but just in case... + logger.logTAP(LogLevel.WARNING, report, "START_STEP", "Can not set/update the informative job parameter \"" + TAPJob.PARAM_PROGRESSION + "\" (this parameter would be just for notification purpose about the execution progression)!", ue); + } } - protected String translateADQL(ADQLQuery query) throws TranslationException, InterruptedException, TAPException{ - ADQLTranslator translator = service.getFactory().createADQLTranslator(); - //logger.info("Job "+report.jobID+" - 2/5 Translating ADQL..."); - return translator.translate(query); + /** + *

Set the duration of the current step in the execution report.

+ * + *

Note: + * The start time and the ID of the step are then forgotten. + *

+ * + *

Note: + * This function is designed to work with {@link #startStep(ExecutionProgression)}, which must be called before it, when the step is starting. + * It marks the end of a step. + *

+ * + * @see #startStep(ExecutionProgression) + */ + private void endStep(){ + if (progression != null){ + // Set the duration of this step in the execution report: + report.setDuration(progression, System.currentTimeMillis() - startStep); + // No start time: + startStep = -1; + // No step for the moment: + progression = null; + } } - protected R executeQuery(String sql, ADQLQuery adql) throws SQLException, InterruptedException, TAPException{ - //logger.info("Job "+report.jobID+" - 3/5 Creating DBConnection...."); - DBConnection dbConn = getDBConnection(); - //logger.info("Job "+report.jobID+" - 4/5 Executing query...\n"+sql); - final long startTime = System.currentTimeMillis(); - R result = dbConn.executeQuery(sql, adql); - if (result == null) - logger.info("JOB " + report.jobID + " - QUERY ABORTED AFTER " + (System.currentTimeMillis() - startTime) + " MS !"); - else - logger.info("JOB " + report.jobID + " - QUERY SUCCESFULLY EXECUTED IN " + (System.currentTimeMillis() - startTime) + " MS !"); - return result; + /** + *

Create in the "database" all tables uploaded by the user (only for this specific query execution).

+ * + *

Note: + * Obviously, nothing is done if no table has been uploaded. + *

+ * + * @throws TAPException If any error occurs while reading the uploaded table + * or while importing them in the database. + */ + private final void uploadTables() throws TAPException{ + // Fetch the tables to upload: + DALIUpload[] tables = tapParams.getUploadedTables(); + + // Upload them, if needed: + if (tables.length > 0){ + logger.logTAP(LogLevel.INFO, report, "UPLOADING", "Loading uploaded tables (" + tables.length + ")", null); + uploadSchema = service.getFactory().createUploader(dbConn).upload(tables); + } } - protected OutputFormat getFormatter() throws TAPException{ - // Search for the corresponding formatter: - String format = tapParams.getFormat(); - OutputFormat formatter = service.getOutputFormat((format == null) ? "votable" : format); - if (format != null && formatter == null) - formatter = service.getOutputFormat("votable"); + /** + *

Parse the ADQL query provided in the parameters by the user.

+ * + *

The query factory and the query checker are got from the TAP factory.

+ * + *

+ * The configuration of this TAP service list all allowed coordinate systems. These are got here and provided to the query checker + * in order to ensure the coordinate systems used in the query are in this list. + *

+ * + *

+ * The row limit specified in the ADQL query (with TOP) is checked and adjusted (if needed). Indeed, this limit + * can not exceed MAXREC given in parameter and the maximum value specified in the configuration of this TAP service. + * In the case no row limit is specified in the query or the given value is greater than MAXREC, (MAXREC+1) is used by default. + * The "+1" aims to detect overflows. + *

+ * + * @return The object representation of the ADQL query. + * + * @throws ParseException If the given ADQL query can not be parsed or if the construction of the object representation has failed. + * @throws InterruptedException If the thread has been interrupted. + * @throws TAPException If the TAP factory is unable to create the ADQL factory or the query checker. + */ + protected ADQLQuery parseADQL() throws ParseException, InterruptedException, TAPException{ + // Log the start of the parsing: + logger.logTAP(LogLevel.INFO, report, "PARSING", "Parsing ADQL: " + tapParams.getQuery().replaceAll("(\t|\r?\n)+", " "), null); + + // Create the ADQL parser: + ADQLParser parser = service.getFactory().createADQLParser(); + if (parser == null){ + logger.logTAP(LogLevel.WARNING, null, "PARSING", "No ADQL parser returned by the TAPFactory! The default implementation is used instead.", null); + parser = new ADQLParser(); + } - // Format the result: - if (formatter == null) - throw new TAPException("Impossible to format the query result: no formatter has been found for the given MIME type \"" + format + "\" and for the default MIME type \"votable\" (short form) !"); + // Set the ADQL factory: + if (parser.getQueryFactory() == null || parser.getQueryFactory().getClass() == ADQLQueryFactory.class) + parser.setQueryFactory(service.getFactory().createQueryFactory()); - return formatter; + // Set the query checker: + if (parser.getQueryChecker() == null) + parser.setQueryChecker(service.getFactory().createQueryChecker(uploadSchema)); + + // Parse the ADQL query: + ADQLQuery query = parser.parseQuery(tapParams.getQuery()); + + // Set or check the row limit: + final int limit = query.getSelect().getLimit(); + final Integer maxRec = tapParams.getMaxRec(); + if (maxRec != null && maxRec > -1){ + if (limit <= -1 || limit > maxRec) + query.getSelect().setLimit(maxRec + 1); + } + + return query; } - protected final void writeResult(R queryResult) throws InterruptedException, TAPException, UWSException{ - OutputFormat formatter = getFormatter(); + /** + *

Execute in "database" the given object representation of an ADQL query.

+ * + *

By default, this function is just calling {@link DBConnection#executeQuery(ADQLQuery)} and then it returns the value returned by this call.

+ * + *

Note: + * An INFO message is logged at the end of the query execution in order to report the result status (success or error) + * and the execution duration. + *

+ * + * @param adql The object representation of the ADQL query to execute. + * + * @return The result of the query, + * or NULL if the query execution has failed. + * + * @throws InterruptedException If the thread has been interrupted. + * @throws TAPException If the {@link DBConnection} has failed to deal with the given ADQL query. + * + * @see DBConnection#executeQuery(ADQLQuery) + */ + protected TableIterator executeADQL(final ADQLQuery adql) throws InterruptedException, TAPException{ + // Log the start of execution: + logger.logTAP(LogLevel.INFO, report, "START_DB_EXECUTION", "ADQL query: " + adql.toADQL().replaceAll("(\t|\r?\n)+", " "), null); + + // Set the fetch size, if any: + if (service.getFetchSize() != null && service.getFetchSize().length >= 1){ + if (report.synchronous && service.getFetchSize().length >= 2) + dbConn.setFetchSize(service.getFetchSize()[1]); + else + dbConn.setFetchSize(service.getFetchSize()[0]); + } + + // Execute the ADQL query: + TableIterator result = dbConn.executeQuery(adql); - // Synchronous case: + // Log the success or failure: + if (result == null) + logger.logTAP(LogLevel.INFO, report, "END_DB_EXECUTION", "Query execution aborted after " + (System.currentTimeMillis() - startStep) + "ms!", null); + else + logger.logTAP(LogLevel.INFO, report, "END_DB_EXECUTION", "Query successfully executed in " + (System.currentTimeMillis() - startStep) + "ms!", null); + + return result; + } + + /** + *

Write the given query result into the appropriate format in the appropriate output + * (HTTP response for a synchronous execution, otherwise a file or any output provided by UWS).

+ * + *

This function prepare the output in function of the execution type (synchronous or asynchronous). + * Once prepared, the result, the output and the formatter to use are given to {@link #writeResult(TableIterator, OutputFormat, OutputStream)} + * which will really process the result formatting and writing. + *

+ * + * @param queryResult The result of the query execution in database. + * + * @throws InterruptedException If the thread has been interrupted. + * @throws IOException If an error happens while writing the result in the {@link HttpServletResponse}. + * That kind of error can be thrown only in synchronous mode. + * In asynchronous, the error is stored as job error report and is never propagated. + * @throws TAPException If an error occurs while getting the appropriate formatter or while formatting or writing (synchronous execution) the result. + * @throws UWSException If an error occurs while getting the output stream or while writing (asynchronous execution) the result. + * + * @see #writeResult(TableIterator, OutputFormat, OutputStream) + */ + protected final void writeResult(final TableIterator queryResult) throws InterruptedException, IOException, TAPException, UWSException{ + // Log the start of the writing: + logger.logTAP(LogLevel.INFO, report, "WRITING_RESULT", "Writing the query result", null); + + // Get the appropriate result formatter: + OutputFormat formatter = getFormatter(); + + // CASE SYNCHRONOUS: if (response != null){ - long start = System.currentTimeMillis(); - try{ - response.setContentType(formatter.getMimeType()); - writeResult(queryResult, formatter, response.getOutputStream()); - }catch(IOException ioe){ - throw new TAPException("Impossible to get the output stream of the HTTP request to write the result of the job " + report.jobID + " !", ioe); - }finally{ - report.setDuration(ExecutionProgression.WRITING_RESULT, System.currentTimeMillis() - start); - } + long start = -1; + + // Set the HTTP content type to the MIME type of the result format: + response.setContentType(formatter.getMimeType()); + + // Set the character encoding: + response.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); + + // Write the formatted result in the HTTP response output: + start = System.currentTimeMillis(); + writeResult(queryResult, formatter, response.getOutputStream()); - }// Asynchronous case: + logger.logTAP(LogLevel.INFO, report, "RESULT_WRITTEN", "Result formatted (in " + formatter.getMimeType() + " ; " + (report.nbRows < 0 ? "?" : report.nbRows) + " rows ; " + ((report.resultingColumns == null) ? "?" : report.resultingColumns.length) + " columns) in " + ((start <= 0) ? "?" : (System.currentTimeMillis() - start)) + "ms!", null); + } + // CASE ASYNCHRONOUS: else{ - long start = System.currentTimeMillis(); + long start = -1, end = -1; try{ + // Create a UWS Result object to store the result + // (the result will be stored in a file and this object is the association between the job and the result file): JobThread jobThread = (JobThread)thread; Result result = jobThread.createResult(); + + // Set the MIME type of the result format in the result description: result.setMimeType(formatter.getMimeType()); + + // Write the formatted result in the file output: + start = System.currentTimeMillis(); writeResult(queryResult, formatter, jobThread.getResultOutput(result)); + end = System.currentTimeMillis(); + + // Set the size (in bytes) of the result in the result description: result.setSize(jobThread.getResultSize(result)); + + // Add the result description and link in the job description: jobThread.publishResult(result); + + logger.logTAP(LogLevel.INFO, report, "RESULT_WRITTEN", "Result formatted (in " + formatter.getMimeType() + " ; " + (report.nbRows < 0 ? "?" : report.nbRows) + " rows ; " + ((report.resultingColumns == null) ? "?" : report.resultingColumns.length) + " columns) in " + ((start <= 0 || end <= 0) ? "?" : (end - start)) + "ms!", null); + }catch(IOException ioe){ - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, ioe, "Impossible to get the output stream of the result file to write the result of the job " + report.jobID + " !"); - }finally{ - report.setDuration(ExecutionProgression.WRITING_RESULT, System.currentTimeMillis() - start); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, ioe, "Impossible to write in the file into the result of the job " + report.jobID + " must be written!"); } } } - protected void writeResult(R queryResult, OutputFormat formatter, OutputStream output) throws InterruptedException, TAPException{ - //logger.info("Job "+report.jobID+" - 5/5 Writing result file..."); + /** + *

Format and write the given result in the given output with the given formatter.

+ * + *

By default, this function is just calling {@link OutputFormat#writeResult(TableIterator, OutputStream, TAPExecutionReport, Thread)}.

+ * + *

Note: + * {@link OutputFormat#writeResult(TableIterator, OutputStream, TAPExecutionReport, Thread)} is often testing the "interrupted" flag of the + * thread in order to stop as fast as possible if the user has cancelled the job or if the thread has been interrupted for another reason. + *

+ * + * @param queryResult Query result to format and to output. + * @param formatter The object able to write the result in the appropriate format. + * @param output The stream in which the result must be written. + * + * @throws InterruptedException If the thread has been interrupted. + * @throws IOException If there is an error while writing the result in the given stream. + * @throws TAPException If there is an error while formatting the result. + */ + protected void writeResult(TableIterator queryResult, OutputFormat formatter, OutputStream output) throws InterruptedException, IOException, TAPException{ formatter.writeResult(queryResult, output, report, thread); } + /** + *

Drop all tables uploaded by the user from the database.

+ * + *

Note: + * By default, if an error occurs while dropping a table from the database, the error will just be logged ; it won't be thrown/propagated. + *

+ * + * @throws TAPException If a grave error occurs. By default, no exception is thrown ; they are just logged. + */ protected void dropUploadedTables() throws TAPException{ if (uploadSchema != null){ // Drop all uploaded tables: - DBConnection dbConn = getDBConnection(); for(TAPTable t : uploadSchema){ try{ - dbConn.dropTable(t); + dbConn.dropUploadedTable(t); }catch(DBException dbe){ - logger.error("JOB " + report.jobID + "\tCan not drop the table \"" + t.getDBName() + "\" (in adql \"" + t.getADQLName() + "\") from the database !", dbe); + logger.logTAP(LogLevel.ERROR, report, "DROP_UPLOAD", "Can not drop the uploaded table \"" + t.getDBName() + "\" (in adql \"" + t.getADQLName() + "\") from the database!", dbe); } } - closeDBConnection(); } } diff --git a/src/tap/AbstractTAPFactory.java b/src/tap/AbstractTAPFactory.java index 91bad42e857dae762b36d571b0a589b5bdf8c75d..624b386a4ab2548b76159c45d54b876c0ef6b7f9 100644 --- a/src/tap/AbstractTAPFactory.java +++ b/src/tap/AbstractTAPFactory.java @@ -16,147 +16,304 @@ package tap; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.util.ArrayList; +import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; import tap.db.DBConnection; +import tap.error.DefaultTAPErrorWriter; import tap.metadata.TAPMetadata; import tap.metadata.TAPSchema; import tap.metadata.TAPTable; import tap.parameters.TAPParameters; - import tap.upload.Uploader; - import uws.UWSException; - import uws.job.ErrorSummary; -import uws.job.JobThread; import uws.job.Result; -import uws.job.UWSJob; - -import uws.job.parameters.UWSParameters; import uws.job.user.JobOwner; - -import uws.service.AbstractUWSFactory; import uws.service.UWSService; import uws.service.backup.UWSBackupManager; +import uws.service.error.ServiceErrorWriter; import adql.db.DBChecker; -import adql.db.DBTable; - +import adql.parser.ADQLParser; import adql.parser.ADQLQueryFactory; +import adql.parser.ParseException; import adql.parser.QueryChecker; +import adql.query.ADQLQuery; -public abstract class AbstractTAPFactory< R > extends AbstractUWSFactory implements TAPFactory { +/** + * Default implementation of most of the {@link TAPFactory} function. + * Only the functions related with the database connection stay abstract. + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (04/2015) + */ +public abstract class AbstractTAPFactory extends TAPFactory { - protected final ServiceConnection service; + /** The error writer to use when any error occurs while executing a resource or to format an error occurring while executing an asynchronous job. */ + protected final ServiceErrorWriter errorWriter; - protected AbstractTAPFactory(ServiceConnection service) throws NullPointerException{ - if (service == null) - throw new NullPointerException("Can not create a TAPFactory without a ServiceConnection instance !"); + /** + * Build a basic TAPFactory. + * Nothing is done except setting the service connection. + * + * @param service Configuration of the TAP service. MUST NOT be NULL + * + * @throws NullPointerException If the given {@link ServiceConnection} is NULL. + * + * @see AbstractTAPFactory#AbstractTAPFactory(ServiceConnection, ServiceErrorWriter) + */ + protected AbstractTAPFactory(ServiceConnection service) throws NullPointerException{ + this(service, new DefaultTAPErrorWriter(service)); + } - this.service = service; + /** + *

Build a basic TAPFactory. + * Nothing is done except setting the service connection and the given error writer.

+ * + *

Then the error writer will be used when creating a UWS service and a job thread.

+ * + * @param service Configuration of the TAP service. MUST NOT be NULL + * @param errorWriter Object to use to format and write the errors for the user. + * + * @throws NullPointerException If the given {@link ServiceConnection} is NULL. + * + * @see TAPFactory#TAPFactory(ServiceConnection) + */ + protected AbstractTAPFactory(final ServiceConnection service, final ServiceErrorWriter errorWriter) throws NullPointerException{ + super(service); + this.errorWriter = errorWriter; } @Override - public UWSService createUWS() throws TAPException, UWSException{ - return new UWSService(this.service.getFactory(), this.service.getFileManager(), this.service.getLogger()); + public final ServiceErrorWriter getErrorWriter(){ + return errorWriter; } + /* *************** */ + /* ADQL MANAGEMENT */ + /* *************** */ + + /** + *

Note: + * Unless the standard implementation - {@link ADQLExecutor} - does not fit exactly your needs, + * it should not be necessary to extend this class and to extend this function (implemented here by default). + *

+ */ @Override - public UWSBackupManager createUWSBackupManager(final UWSService uws) throws TAPException, UWSException{ - return null; + public ADQLExecutor createADQLExecutor() throws TAPException{ + return new ADQLExecutor(service); } + /** + *

Note: + * This function should be extended if you want to customize the ADQL grammar. + *

+ */ @Override - public UWSJob createJob(HttpServletRequest request, JobOwner owner) throws UWSException{ - if (!service.isAvailable()) - throw new UWSException(HttpServletResponse.SC_SERVICE_UNAVAILABLE, service.getAvailability()); + public ADQLParser createADQLParser() throws TAPException{ + return new ADQLParser(); + } - try{ - TAPParameters tapParams = (TAPParameters)createUWSParameters(request); - return new TAPJob(owner, tapParams); - }catch(TAPException te){ - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, te, "Can not create a TAP asynchronous job !"); - } + /** + *

Note: + * This function should be extended if you have customized the creation of any + * {@link ADQLQuery} part ; it could be the addition of one or several user defined function + * or the modification of any ADQL function or clause specific to your implementation. + *

+ */ + @Override + public ADQLQueryFactory createQueryFactory() throws TAPException{ + return new ADQLQueryFactory(); } + /** + *

This implementation gathers all tables published in this TAP service and those uploaded + * by the user. Then it calls {@link #createQueryChecker(Collection)} with this list in order + * to create a query checked. + *

+ * + *

Note: + * This function can not be overridded, but {@link #createQueryChecker(Collection)} can be. + *

+ */ @Override - public UWSJob createJob(String jobId, JobOwner owner, final UWSParameters params, long quote, long startTime, long endTime, List results, ErrorSummary error) throws UWSException{ - if (!service.isAvailable()) - throw new UWSException(HttpServletResponse.SC_SERVICE_UNAVAILABLE, service.getAvailability()); + public final QueryChecker createQueryChecker(final TAPSchema uploadSchema) throws TAPException{ + // Get all tables published in this TAP service: + TAPMetadata meta = service.getTAPMetadata(); + + // Build a list in order to gather all these with the uploaded ones: + ArrayList tables = new ArrayList(meta.getNbTables()); + + // Add all tables published in TAP: + Iterator it = meta.getTables(); + while(it.hasNext()) + tables.add(it.next()); + + // Add all tables uploaded by the user: + if (uploadSchema != null){ + for(TAPTable table : uploadSchema) + tables.add(table); + } + + // Finally, create the query checker: + return createQueryChecker(tables); + } + + /** + *

Create an object able to check the consistency between the ADQL query and the database. + * That's to say, it checks whether the tables and columns used in the query really exist + * in the database.

+ * + *

Note: + * This implementation just create a {@link DBChecker} instance with the list given in parameter. + *

+ * + * @param tables List of all available tables (and indirectly, columns). + * + * @return A new ADQL query checker. + * + * @throws TAPException If any error occurs while creating the query checker. + */ + protected QueryChecker createQueryChecker(final Collection tables) throws TAPException{ try{ - return new TAPJob(jobId, owner, (TAPParameters)params, quote, startTime, endTime, results, error); - }catch(TAPException te){ - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, te, "Can not create a TAP asynchronous job !"); + return new DBChecker(tables, service.getUDFs(), service.getGeometries(), service.getCoordinateSystems()); + }catch(ParseException e){ + throw new TAPException("Unable to build a DBChecker instance! " + e.getMessage(), e, UWSException.INTERNAL_SERVER_ERROR); } } + /* ****** */ + /* UPLOAD */ + /* ****** */ + + /** + *

This implementation just create an {@link Uploader} instance with the given database connection.

+ * + *

Note: + * This function should be overrided if you need to change the DB name of the TAP_UPLOAD schema. + * Indeed, by overriding this function you can specify a given TAPSchema to use as TAP_UPLOAD schema + * in the constructor of {@link Uploader}. But do not forget that this {@link TAPSchema} instance MUST have + * an ADQL name equals to "TAP_UPLOAD", otherwise, a TAPException will be thrown. + *

+ */ @Override - public final JobThread createJobThread(final UWSJob job) throws UWSException{ + public Uploader createUploader(final DBConnection dbConn) throws TAPException{ + return new Uploader(service, dbConn); + } + + /* ************** */ + /* UWS MANAGEMENT */ + /* ************** */ + + /** + *

This implementation just create a {@link UWSService} instance.

+ * + *

Note: + * This implementation is largely enough for a TAP service. It is not recommended to override + * this function. + *

+ */ + @Override + public UWSService createUWS() throws TAPException{ try{ - return new AsyncThread((TAPJob)job, createADQLExecutor()); - }catch(TAPException te){ - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, te, "Impossible to create an AsyncThread !"); + UWSService uws = new UWSService(this, this.service.getFileManager(), this.service.getLogger()); + uws.setName("TAP/async"); + uws.setErrorWriter(errorWriter); + return uws; + }catch(UWSException ue){ + throw new TAPException("Can not create a UWS service (asynchronous resource of TAP)!", ue, UWSException.INTERNAL_SERVER_ERROR); } } - public ADQLExecutor createADQLExecutor() throws TAPException{ - return new ADQLExecutor(service); + /** + *

This implementation does not provided a backup manager. + * It means that no asynchronous job will be restored and backuped.

+ * + *

You must override this function if you want enable the backup feature.

+ */ + @Override + public UWSBackupManager createUWSBackupManager(final UWSService uws) throws TAPException{ + return null; } /** - * Extracts the parameters from the given request (multipart or not). - * This function is used only to set UWS parameters, not to create a TAP query (for that, see {@link TAPParameters}). + *

This implementation provides a basic {@link TAPJob} instance.

* - * @see uws.service.AbstractUWSFactory#extractParameters(javax.servlet.http.HttpServletRequest, uws.service.UWS) + *

+ * If you need to add or modify the behavior of some functions of a {@link TAPJob}, + * you must override this function and return your own extension of {@link TAPJob}. + *

*/ @Override - public UWSParameters createUWSParameters(HttpServletRequest request) throws UWSException{ + protected TAPJob createTAPJob(final HttpServletRequest request, final JobOwner owner) throws UWSException{ try{ - return new TAPParameters(request, service, getExpectedAdditionalParameters(), getInputParamControllers()); + TAPParameters tapParams = createTAPParameters(request); + return new TAPJob(owner, tapParams); }catch(TAPException te){ - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, te); + if (te.getCause() != null && te.getCause() instanceof UWSException) + throw (UWSException)te.getCause(); + else + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, te, "Can not create a TAP asynchronous job!"); } } + /** + *

This implementation provides a basic {@link TAPJob} instance.

+ * + *

+ * If you need to add or modify the behavior of some functions of a {@link TAPJob}, + * you must override this function and return your own extension of {@link TAPJob}. + *

+ */ @Override - public UWSParameters createUWSParameters(Map params) throws UWSException{ + protected TAPJob createTAPJob(final String jobId, final JobOwner owner, final TAPParameters params, final long quote, final long startTime, final long endTime, final List results, final ErrorSummary error) throws UWSException{ try{ - return new TAPParameters(service, params, getExpectedAdditionalParameters(), getInputParamControllers()); + return new TAPJob(jobId, owner, params, quote, startTime, endTime, results, error); }catch(TAPException te){ - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, te); + if (te.getCause() != null && te.getCause() instanceof UWSException) + throw (UWSException)te.getCause(); + else + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, te, "Can not create a TAP asynchronous job !"); } } + /** + *

This implementation extracts standard TAP parameters from the given request.

+ * + *

+ * Non-standard TAP parameters are added in a map inside the returned {@link TAPParameters} object + * and are accessible with {@link TAPParameters#get(String)} and {@link TAPParameters#getAdditionalParameters()}. + * However, if you want to manage them in another way, you must extend {@link TAPParameters} and override + * this function in order to return an instance of your extension. + *

+ */ @Override - public ADQLQueryFactory createQueryFactory() throws TAPException{ - return new ADQLQueryFactory(); + public TAPParameters createTAPParameters(final HttpServletRequest request) throws TAPException{ + return new TAPParameters(request, service); } + /** + *

This implementation extracts standard TAP parameters from the given request.

+ * + *

+ * Non-standard TAP parameters are added in a map inside the returned {@link TAPParameters} object + * and are accessible with {@link TAPParameters#get(String)} and {@link TAPParameters#getAdditionalParameters()}. + * However, if you want to manage them in another way, you must extend {@link TAPParameters} and override + * this function in order to return an instance of your extension. + *

+ */ @Override - public QueryChecker createQueryChecker(TAPSchema uploadSchema) throws TAPException{ - TAPMetadata meta = service.getTAPMetadata(); - ArrayList tables = new ArrayList(meta.getNbTables()); - Iterator it = meta.getTables(); - while(it.hasNext()) - tables.add(it.next()); - if (uploadSchema != null){ - for(TAPTable table : uploadSchema) - tables.add(table); - } - return new DBChecker(tables); - } - - public Uploader createUploader(final DBConnection dbConn) throws TAPException{ - return new Uploader(service, dbConn); + public TAPParameters createTAPParameters(final Map params) throws TAPException{ + return new TAPParameters(service, params); } } diff --git a/src/tap/AsyncThread.java b/src/tap/AsyncThread.java index 4581c0900f4cb93f8b9814e9ead36d9fc690a33d..b97131a13f516fb8293c944f6f4bf06c34ff4640 100644 --- a/src/tap/AsyncThread.java +++ b/src/tap/AsyncThread.java @@ -16,35 +16,63 @@ package tap; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ -import adql.parser.ParseException; -import adql.translator.TranslationException; import uws.UWSException; - import uws.job.JobThread; +import uws.service.error.ServiceErrorWriter; -public class AsyncThread< R > extends JobThread { +/** + * Thread in charge of a TAP job execution. + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (02/2015) + */ +public class AsyncThread extends JobThread { - protected final ADQLExecutor executor; + /** The only object which knows how to execute an ADQL query. */ + protected final ADQLExecutor executor; - public AsyncThread(TAPJob j, ADQLExecutor executor) throws UWSException{ - super(j, "Execute the ADQL query of the TAP request " + j.getJobId()); + /** + * Build a TAP asynchronous job execution. + * + * @param j Description of the job to execute. + * @param executor The object to use for the ADQL execution itself. + * @param errorWriter The object to use to format and to write an execution error for the user. + * + * @throws NullPointerException If the job parameter or the {@link ADQLExecutor} is missing. + */ + public AsyncThread(final TAPJob j, final ADQLExecutor executor, final ServiceErrorWriter errorWriter) throws NullPointerException{ + super(j, "Execute the ADQL query of the TAP request " + j.getJobId(), errorWriter); + if (executor == null) + throw new NullPointerException("Missing ADQLExecutor! Can not create an instance of AsyncThread without."); this.executor = executor; } - @Override - public void interrupt(){ - if (isAlive()){ - try{ - executor.closeDBConnection(); - }catch(TAPException e){ - if (job != null && job.getLogger() != null) - job.getLogger().error("Can not close the DBConnection for the executing job \"" + job.getJobId() + "\" ! => the job will be probably not totally aborted.", e); - } + /** + *

Check whether this thread is able to start right now.

+ * + *

+ * Basically, this function asks to the {@link ADQLExecutor} to get a database connection. If no DB connection is available, + * then this thread can not start and this function return FALSE. In all the other cases, TRUE is returned. + *

+ * + *

Warning: This function will indirectly open and keep a database connection, so that the job can be started just after its call. + * If it turns out that the execution won't start just after this call, the DB connection should be closed in some way in order to save database resources.

+ * + * @return true if this thread can start right now, false otherwise. + * + * @since 2.0 + */ + public final boolean isReadyForExecution(){ + try{ + executor.initDBConnection(job.getJobId()); + return true; + }catch(TAPException te){ + return false; } - super.interrupt(); } @Override @@ -55,12 +83,6 @@ public class AsyncThread< R > extends JobThread { throw ie; }catch(UWSException ue){ throw ue; - }catch(TAPException te){ - throw new UWSException(te.getHttpErrorCode(), te, te.getMessage()); - }catch(ParseException pe){ - throw new UWSException(UWSException.BAD_REQUEST, pe, pe.getMessage()); - }catch(TranslationException te){ - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, te, te.getMessage()); }catch(Exception ex){ throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, ex, "Error while processing the ADQL query of the job " + job.getJobId() + " !"); }finally{ @@ -68,6 +90,11 @@ public class AsyncThread< R > extends JobThread { } } + /** + * Get the description of the job that this thread is executing. + * + * @return The executed job. + */ public final TAPJob getTAPJob(){ return (TAPJob)job; } diff --git a/src/tap/ExecutionProgression.java b/src/tap/ExecutionProgression.java index 618d2104813c7d0584be6f136e4b4390b3ef95a6..4086ccdec638ef9b3139e7dae75949c38e291884 100644 --- a/src/tap/ExecutionProgression.java +++ b/src/tap/ExecutionProgression.java @@ -16,9 +16,16 @@ package tap; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ +/** + * Let describe the current status of a job execution. + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (07/2014) + */ public enum ExecutionProgression{ - PENDING, UPLOADING, PARSING, TRANSLATING, EXECUTING_SQL, WRITING_RESULT, FINISHED; + PENDING, UPLOADING, PARSING, EXECUTING_ADQL, WRITING_RESULT, FINISHED; } diff --git a/src/tap/ServiceConnection.java b/src/tap/ServiceConnection.java index 40e3745205e86a5a6dcea1cdd9fa647eb5459935..dfa94731379eb8a655551504abdbf1394510ca4d 100644 --- a/src/tap/ServiceConnection.java +++ b/src/tap/ServiceConnection.java @@ -16,66 +16,679 @@ package tap; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.util.Collection; import java.util.Iterator; -import tap.file.TAPFileManager; - +import tap.db.DBConnection; import tap.formatter.OutputFormat; - +import tap.log.DefaultTAPLog; import tap.log.TAPLog; - import tap.metadata.TAPMetadata; - import uws.service.UserIdentifier; +import uws.service.file.LocalUWSFileManager; +import uws.service.file.UWSFileManager; +import adql.db.FunctionDef; -public interface ServiceConnection< R > { +/** + *

Description and parameters list of a TAP service.

+ * + *

+ * Through this object, it is possible to configure the different limits and formats, + * but also to list all available tables and columns, to declare geometry features as all allowed user defined functions + * and to say where log and other kinds of files must be stored. + *

+ * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (03/2015) + */ +public interface ServiceConnection { + /** + * List of possible limit units. + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (01/2015) + */ public static enum LimitUnit{ - rows, bytes; + rows("row"), bytes("byte"), kilobytes("kilobyte"), megabytes("megabyte"), gigabytes("gigabyte"); + + private final String str; + + private LimitUnit(final String str){ + this.str = str; + } + + /** + * Tells whether the given unit has the same type (bytes or rows). + * + * @param anotherUnit A unit. + * + * @return true if the given unit has the same type, false otherwise. + * + * @since 1.1 + */ + public boolean isCompatibleWith(final LimitUnit anotherUnit){ + if (this == rows) + return anotherUnit == rows; + else + return anotherUnit != rows; + } + + /** + * Gets the factor to convert into bytes the value expressed in this unit. + * Note: if this unit is not a factor of bytes, 1 is returned (so that the factor does not affect the value). + * + * @return The factor need to convert a value expressed in this unit into bytes, or 1 if not a bytes derived unit. + * + * @since 1.1 + */ + public long bytesFactor(){ + switch(this){ + case bytes: + return 1; + case kilobytes: + return 1000; + case megabytes: + return 1000000; + case gigabytes: + return 1000000000l; + default: + return 1; + } + } + + /** + * Compares the 2 given values (each one expressed in the given unit). + * Conversions are done internally in order to make a correct comparison between the 2 limits. + * + * @param leftLimit Value/Limit of the comparison left part. + * @param leftUnit Unit of the comparison left part value. + * @param rightLimit Value/Limit of the comparison right part. + * @param rightUnit Unit of the comparison right part value. + * + * @return the value 0 if x == y; a value less than 0 if x < y; and a value greater than 0 if x > y + * + * @throws TAPException If the two given units are not compatible. + * + * @see tap.ServiceConnection.LimitUnit#isCompatibleWith(tap.ServiceConnection.LimitUnit) + * @see #bytesFactor() + * @see Integer#compare(int, int) + * @see Long#compare(long, long) + * + * @since 1.1 + */ + public static int compare(final int leftLimit, final LimitUnit leftUnit, final int rightLimit, final LimitUnit rightUnit) throws TAPException{ + if (!leftUnit.isCompatibleWith(rightUnit)) + throw new TAPException("Limit units (" + leftUnit + " and " + rightUnit + ") are not compatible!"); + + if (leftUnit == rows || leftUnit == rightUnit) + return compare(leftLimit, rightLimit); + else + return compare(leftLimit * leftUnit.bytesFactor(), rightLimit * rightUnit.bytesFactor()); + } + + /** + *

(Strict copy of Integer.compare(int,int) of Java 1.7)

+ *

+ * Compares two {@code int} values numerically. + * The value returned is identical to what would be returned by: + *

+ *
+		 *    Integer.valueOf(x).compareTo(Integer.valueOf(y))
+		 * 
+ * + * @param x the first {@code int} to compare + * @param y the second {@code int} to compare + * @return the value {@code 0} if {@code x == y}; + * a value less than {@code 0} if {@code x < y}; and + * a value greater than {@code 0} if {@code x > y} + * + * @since 1.1 + */ + private static int compare(int x, int y){ + return (x < y) ? -1 : ((x == y) ? 0 : 1); + } + + /** + *

(Strict copy of Integer.compare(long,long) of Java 1.7)

+ *

+ * Compares two {@code long} values numerically. + * The value returned is identical to what would be returned by: + *

+ *
+		 *    Long.valueOf(x).compareTo(Long.valueOf(y))
+		 * 
+ * + * @param x the first {@code long} to compare + * @param y the second {@code long} to compare + * @return the value {@code 0} if {@code x == y}; + * a value less than {@code 0} if {@code x < y}; and + * a value greater than {@code 0} if {@code x > y} + * + * @since 1.1 + */ + public static int compare(long x, long y){ + return (x < y) ? -1 : ((x == y) ? 0 : 1); + } + + @Override + public String toString(){ + return str; + } } + /** + * [OPTIONAL] + *

Name of the service provider ; it can be an organization as an individual person.

+ * + *

There is no restriction on the syntax or on the label to use ; this information is totally free

+ * + *

It will be used as additional information (INFO tag) in any VOTable and HTML output.

+ * + * @return The TAP service provider or NULL to leave this field blank. + */ public String getProviderName(); + /** + * [OPTIONAL] + *

Description of the service provider.

+ * + *

It will be used as additional information (INFO tag) in any VOTable output.

+ * + * @return The TAP service description or NULL to leave this field blank. + */ public String getProviderDescription(); + /** + * [MANDATORY] + *

This function tells whether the TAP service is available + * (that's to say, "able to execute requests" ; resources like /availability, /capabilities and /tables may still work).

+ * + *

+ * A message explaining the current state of the TAP service could be provided thanks to {@link #getAvailability()}. + *

+ * + * @return true to enable all TAP resources, false to disable all of them (except /availability). + */ public boolean isAvailable(); + /** + * [OPTIONAL] + *

Get an explanation about the current TAP service state (working or not). + * This message aims to provide more details to the users about the availability of this service, + * or more particularly about its unavailability.

+ * + * @return Explanation about the TAP service state. + */ public String getAvailability(); + /** + * [MANDATORY] + *

This function sets the state of the whole TAP service. + * If true, all TAP resources will be able to execute resources. + * If false, /sync and /async won't answer any more to requests and a HTTP-503 (Service unavailable) + * error will be returned. + *

+ * + * @param isAvailable true to enable all resources, false to forbid /sync and /async (all other resources will still be available). + * @param message A message describing the current state of the service. If NULL, a default message may be set by the library. + * + * @since 2.0 + */ + public void setAvailable(final boolean isAvailable, final String message); + + /** + * [OPTIONAL] + *

Get the limit of the retention period (in seconds).

+ * + *

+ * It is the maximum period while an asynchronous job can leave in the jobs list + * and so can stay on the server. + *

+ * + *

Important notes:

+ *
    + *
  • Exactly 2 values or a NULL object is expected here.
  • + *
  • If NULL, the retention period is not limited and jobs will + * theoretically stay infinitely on the server.
  • + *
  • If not NULL, the 2 values must correspond to the default retention period + * and the maximum retention period.
  • + *
  • The default value is used to set the retention period when a job is created with no user defined retention period.
  • + *
  • The maximum value is used to limit the retention period when specified by the user while creating a job.
  • + *
  • The default value MUST be less or equals the maximum value.
  • + *
  • Both values must be positive. If a negative value is given it will be interpreted as "no limit".
  • + *
+ * + * @return NULL if no limit must be set, or a two-items array ([0]: default value, [1]: maximum value). + */ public int[] getRetentionPeriod(); + /** + * [OPTIONAL] + *

Get the limit of the job execution duration (in milliseconds).

+ * + *

+ * It is the duration of a running job (including the query execution). + * This duration is used for synchronous AND asynchronous jobs. + *

+ * + *

Important notes:

+ *
    + *
  • Exactly 2 values or a NULL object is expected here.
  • + *
  • If NULL, the execution duration is not limited and jobs could + * theoretically run infinitely.
  • + *
  • If not NULL, the 2 values must correspond to the default execution duration + * and the maximum execution duration.
  • + *
  • The default value is used to set the execution duration when a job is created with no user defined execution duration.
  • + *
  • The maximum value is used to limit the execution duration when specified by the user while creating a job.
  • + *
  • The default value MUST be less or equals the maximum value.
  • + *
  • Both values must be positive. If a negative value is given it will be interpreted as "no limit".
  • + *
+ * + * @return NULL if no limit must be set, or a two-items array ([0]: default value, [1]: maximum value). + */ public int[] getExecutionDuration(); + /** + * [OPTIONAL] + *

Get the limit of the job execution result.

+ * + *

+ * This value will limit the size of the query results, either in rows or in bytes. + * The type of limit is defined by the function {@link #getOutputLimitType()}. + *

+ * + *

Important notes:

+ *
    + *
  • Exactly 2 values or a NULL object is expected here.
  • + *
  • If NULL, the output limit is not limited and jobs could theoretically + * return very big files.
  • + *
  • If not NULL, the 2 values must correspond to the default output limit + * and the maximum output limit.
  • + *
  • The default value is used to set the output limit when a job is created with no user defined output limit.
  • + *
  • The maximum value is used to limit the output limit when specified by the user while creating a job.
  • + *
  • The structure of the object returned by this function MUST be the same as the object returned by {@link #getOutputLimitType()}. + * Particularly, the type given by the N-th item of {@link #getOutputLimitType()} must correspond to the N-th limit returned by this function.
  • + *
  • The default value MUST be less or equals the maximum value.
  • + *
  • Both values must be positive. If a negative value is given it will be interpreted as "no limit".
  • + *
+ * + *

Important note: + * Currently, the default implementations of the library is only able to deal with output limits in ROWS.
+ * Anyway, in order to save performances, it is strongly recommended to use ROWS limit rather than in bytes. Indeed, the rows limit can be taken + * into account at the effective execution of the query (so before getting the result), on the contrary of the bytes limit which + * will be applied on the query result. + *

+ * + * @return NULL if no limit must be set, or a two-items array ([0]: default value, [1]: maximum value). + * + * @see #getOutputLimitType() + */ public int[] getOutputLimit(); + /** + * [OPTIONAL] + *

Get the type of each output limit set by this service connection (and accessible with {@link #getOutputLimit()}).

+ * + *

Important notes:

+ *
    + *
  • Exactly 2 values or a NULL object is expected here.
  • + *
  • If NULL, the output limit will be considered as expressed in ROWS.
  • + *
  • The structure of the object returned by this function MUST be the same as the object returned by {@link #getOutputLimit()}. + * Particularly, the type given by the N-th item of this function must correspond to the N-th limit returned by {@link #getOutputLimit()}.
  • + *
+ * + *

Important note: + * Currently, the default implementations of the library is only able to deal with output limits in ROWS.
+ * Anyway, in order to save performances, it is strongly recommended to use ROWS limit rather than in bytes. Indeed, the rows limit can be taken + * into account at the effective execution of the query (so before getting the result), on the contrary of the bytes limit which + * will be applied on the query result. + *

+ * + * @return NULL if limits should be expressed in ROWS, or a two-items array ([0]: type of getOutputLimit()[0], [1]: type of getOutputLimit()[1]). + * + * @see #getOutputLimit() + */ public LimitUnit[] getOutputLimitType(); + /** + * [OPTIONAL] + *

Get the object to use in order to identify users at the origin of requests.

+ * + * @return NULL if no user identification should be done, a {@link UserIdentifier} instance otherwise. + */ public UserIdentifier getUserIdentifier(); + /** + * [MANDATORY] + *

This function let enable or disable the upload capability of this TAP service.

+ * + *

Note: + * If the upload is disabled, the request is aborted and an HTTP-400 error is thrown each time some tables are uploaded. + *

+ * + * @return true to enable the upload capability, false to disable it. + */ public boolean uploadEnabled(); + /** + * [OPTIONAL] + *

Get the maximum size of EACH uploaded table.

+ * + *

+ * This value is expressed either in rows or in bytes. + * The unit limit is defined by the function {@link #getUploadLimitType()}. + *

+ * + *

Important notes:

+ *
    + *
  • Exactly 2 values or a NULL object is expected here.
  • + *
  • If NULL, the upload limit is not limited and uploads could be + * theoretically unlimited.
  • + *
  • If not NULL, the 2 values must correspond to the default upload limit + * and the maximum upload limit.
  • + *
  • The default value is used inform the user about the server wishes.
  • + *
  • The maximum value is used to really limit the upload limit.
  • + *
  • The structure of the object returned by this function MUST be the same as the object returned by {@link #getUploadLimitType()}. + * Particularly, the type given by the N-th item of {@link #getUploadLimitType()} must correspond to the N-th limit returned by this function.
  • + *
  • The default value MUST be less or equals the maximum value.
  • + *
  • Both values must be positive. If a negative value is given it will be interpreted as "no limit".
  • + *
+ * + *

Important note: + * To save performances, it is recommended to use BYTES limit rather than in rows. Indeed, the bytes limit can be taken + * into account at directly when reading the bytes of the request, on the contrary of the rows limit which + * requires to parse the uploaded tables. + *

+ * + * @return NULL if no limit must be set, or a two-items array ([0]: default value, [1]: maximum value). + * + * @see #getUploadLimitType() + */ public int[] getUploadLimit(); + /** + * [OPTIONAL] + *

Get the type of each upload limit set by this service connection (and accessible with {@link #getUploadLimit()}).

+ * + *

Important notes:

+ *
    + *
  • Exactly 2 values or a NULL object is expected here.
  • + *
  • If NULL, the upload limit will be considered as expressed in ROWS.
  • + *
  • The structure of the object returned by this function MUST be the same as the object returned by {@link #getUploadLimit()}. + * Particularly, the type given by the N-th item of this function must correspond to the N-th limit returned by {@link #getUploadLimit()}.
  • + *
+ * + *

Important note: + * To save performances, it is recommended to use BYTES limit rather than in rows. Indeed, the bytes limit can be taken + * into account at directly when reading the bytes of the request, on the contrary of the rows limit which + * requires to parse the uploaded tables. + *

+ * + * @return NULL if limits should be expressed in ROWS, or a two-items array ([0]: type of getUploadLimit()[0], [1]: type of getUploadLimit()[1]). + * + * @see #getUploadLimit() + */ public LimitUnit[] getUploadLimitType(); + /** + * [OPTIONAL] + *

Get the maximum size of the whole set of all tables uploaded in one request. + * This size is expressed in bytes.

+ * + *

IMPORTANT 1: + * This value is always used when the upload capability is enabled. + *

+ * + *

IMPORTANT 2: + * The value returned by this function MUST always be positive. + * A zero or negative value will throw an exception later while + * reading parameters in a request with some uploaded tables. + *

+ * + * @return A positive (>0) value corresponding to the maximum number of bytes of all uploaded tables sent in one request. + */ public int getMaxUploadSize(); + /** + * [MANDATORY] + *

Get the list of all available tables and columns.

+ * + *

+ * This object is really important since it lets the library check ADQL queries properly and set the good type + * and formatting in the query results. + *

+ * + * @return A TAPMetadata object. NULL is not allowed and will throw a grave error at the service initialization. + */ public TAPMetadata getTAPMetadata(); + /** + * [OPTIONAL] + *

Get the list of all allowed coordinate systems.

+ * + * Special values + * + *

Two special values can be returned by this function:

+ *
    + *
  • NULL which means that all coordinate systems are allowed,
  • + *
  • the empty list which means that no coordinate system - except + * the default one (which can be reduced to an empty string) - is allowed.
  • + *
+ * + * List item syntax + * + *

+ * Each item of this list is a pattern and not a simple coordinate system. + * Thus each item MUST respect the following syntax: + *

+ *
{framePattern} {refposPattern} {flavorPattern}
+ *

+ * Contrary to a coordinate system expression, all these 3 information are required. + * Each may take 3 kinds of value: + *

+ *
    + *
  • a single value (i.e. "ICRS"),
  • + *
  • a list of values with the syntax ({value1}|{value2}|...) (i.e. "(ICRS|FK4)"),
  • + *
  • a "*" which means that all values are possible. + *
+ *

+ * For instance: (ICRS|FK4) HELIOCENTER * is a good syntax, + * but not ICRS or ICRS HELIOCENTER. + *

+ * + *

Note: + * Even if not explicitly part of the possible values, the default value of each part (i.e. UNKNOWNFRAME for frame) is always taken into account by the library. + * Particularly, the empty string will always be allowed even if not explicitly listed in the list returned by this function. + *

+ * + * @return NULL to allow ALL coordinate systems, an empty list to allow NO coordinate system, + * or a list of coordinate system patterns otherwise. + */ public Collection getCoordinateSystems(); + /** + * [OPTIONAL] + *

Get the list of all allowed geometrical functions.

+ * + * Special values + * + *

Two special values can be returned by this function:

+ *
    + *
  • NULL which means that all geometrical functions are allowed,
  • + *
  • the empty list which means that no geometrical functions is allowed.
  • + *
+ * + * List item syntax + * + *

+ * Each item of the returned list MUST be a function name (i.e. "CONTAINS", "POINT"). + * It can also be a type of STC region to forbid (i.e. "POSITION", "UNION"). + *

+ * + *

The given names are not case sensitive.

+ * + * @return NULL to allow ALL geometrical functions, an empty list to allow NO geometrical function, + * or a list of geometrical function names otherwise. + * + * @since 2.0 + */ + public Collection getGeometries(); + + /** + * [OPTIONAL] + *

Get the list of all allowed User Defined Functions (UDFs).

+ * + * Special values + * + *

Two special values can be returned by this function:

+ *
    + *
  • NULL which means that all unknown functions (which should be UDFs) are allowed,
  • + *
  • the empty list which means that no unknown functions (which should be UDFs) is allowed.
  • + *
+ * + * List item syntax + * + *

+ * Each item of the returned list MUST be an instance of {@link FunctionDef}. + *

+ * + * @return NULL to allow ALL unknown functions, an empty list to allow NO unknown function, + * or a list of user defined functions otherwise. + * + * @since 2.0 + */ + public Collection getUDFs(); + + /** + * [OPTIONAL] + * + *

Get the maximum number of asynchronous jobs that can run in the same time.

+ * + *

A null or negative value means no limit on the number of running asynchronous jobs.

+ * + * @return Maximum number of running jobs (≤0 => no limit). + * + * @since 2.0 + */ + public int getNbMaxAsyncJobs(); + + /** + * [MANDATORY] + *

Get the logger to use in the whole service when any error, warning or info happens.

+ * + *

IMPORTANT: + * If NULL is returned by this function, grave errors will occur while executing a query or managing an error. + * It is strongly recommended to provide a logger, even a basic implementation. + *

+ * + *

Piece of advice: + * A default implementation like {@link DefaultTAPLog} would be most of time largely enough. + *

+ * + * @return An instance of {@link TAPLog}. + */ public TAPLog getLogger(); - public TAPFactory getFactory(); + /** + * [MANDATORY] + *

Get the object able to build other objects essentials to configure the TAP service or to run every queries.

+ * + *

IMPORTANT: + * If NULL is returned by this function, grave errors will occur while initializing the service. + *

+ * + *

Piece of advice: + * The {@link TAPFactory} is an interface which contains a lot of functions to implement. + * It is rather recommended to extend {@link AbstractTAPFactory}: just 2 functions + * ({@link AbstractTAPFactory#freeConnection(DBConnection)} and {@link AbstractTAPFactory#getConnection(String)}) + * will have to be implemented. + *

+ * + * @return An instance of {@link TAPFactory}. + * + * @see AbstractTAPFactory + */ + public TAPFactory getFactory(); + + /** + * [MANDATORY] + *

Get the object in charge of the files management. + * This object manages log, error, result and backup files of the whole service.

+ * + *

IMPORTANT: + * If NULL is returned by this function, grave errors will occur while initializing the service. + *

+ * + *

Piece of advice: + * The library provides a default implementation of the interface {@link UWSFileManager}: + * {@link LocalUWSFileManager}, which stores all files on the local file-system. + *

+ * + * @return An instance of {@link UWSFileManager}. + */ + public UWSFileManager getFileManager(); - public TAPFileManager getFileManager(); + /** + * [MANDATORY] + *

Get the list of all available output formats.

+ * + *

IMPORTANT:

+ *
    + *
  • All formats of this list MUST have a different MIME type.
  • + *
  • At least one item must correspond to the MIME type "votable".
  • + *
  • If NULL is returned by this function, grave errors will occur while writing the capabilities of this service.
  • + * + * + * @return An iterator on the list of all available output formats. + */ + public Iterator getOutputFormats(); - public Iterator> getOutputFormats(); + /** + * [MANDATORY] + *

    Get the output format having the given MIME type (or short MIME type ~ alias).

    + * + *

    IMPORTANT: + * This function MUST always return an {@link OutputFormat} instance when the MIME type "votable" is given in parameter. + *

    + * + * @param mimeOrAlias MIME type or short MIME type of the format to get. + * + * @return The corresponding {@link OutputFormat} or NULL if not found. + */ + public OutputFormat getOutputFormat(final String mimeOrAlias); - public OutputFormat getOutputFormat(final String mimeOrAlias); + /** + * [OPTIONAL] + *

    Get the size of result blocks to fetch from the database.

    + * + *

    + * Rather than fetching a query result in a whole, it may be possible to specify to the database + * that results may be retrieved by blocks whose the size can be specified by this function. + * If supported by the DBMS and the JDBC driver, this feature may help sparing memory and avoid + * too much waiting time from the TAP /sync users (and thus, avoiding some HTTP client timeouts). + *

    + * + *

    Note: + * Generally, this feature is well supported by DBMS. But for that, the used JDBC driver must use + * the V3 protocol. If anyway, this feature is supported neither by the DBMS, the JDBC driver nor your + * {@link DBConnection}, no error will be thrown if a value is returned by this function: it will be silently + * ignored by the library. + *

    + * + * @return null or an array of 1 or 2 integers. + * If null (or empty array), no attempt to set fetch size will be done and so, ONLY the default + * value of the {@link DBConnection} will be used. + * [0]=fetchSize for async queries, [1]=fetchSize for sync queries. + * If [1] is omitted, it will be considered as equals to [0]. + * If a fetchSize is negative or null, the default value of your JDBC driver will be used. + * + * @since 2.0 + */ + public int[] getFetchSize(); } diff --git a/src/tap/TAPException.java b/src/tap/TAPException.java index 89a4feeef6723558f5d32f9203862329310ec361..edfe745e98908b2e2669acc03bc10d740278d10d 100644 --- a/src/tap/TAPException.java +++ b/src/tap/TAPException.java @@ -16,129 +16,373 @@ package tap; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import uws.UWSException; +/** + *

    Any exception that occurred while a TAP service activity.

    + * + *

    Most of the time this exception wraps another exception (e.g. {@link UWSException}).

    + * + *

    It contains an HTTP status code, set by default to HTTP-500 (Internal Server Error).

    + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (04/2015) + */ public class TAPException extends Exception { private static final long serialVersionUID = 1L; + /** An ADQL query which were executed when the error occurred. */ private String adqlQuery = null; + + /** The ADQL query execution status (e.g. uploading, parsing, executing) just when the error occurred. */ private ExecutionProgression executionStatus = null; + /** The HTTP status code to set in the HTTP servlet response if the exception reaches the servlet. */ private int httpErrorCode = UWSException.INTERNAL_SERVER_ERROR; + /** + * Standard TAP exception: no ADQL query or execution status specified. + * The corresponding HTTP status code will be HTTP-500 (Internal Server Error). + * + * @param message Message explaining the error. + */ public TAPException(String message){ super(message); } + /** + * Standard TAP exception: no ADQL query or execution status specified. + * The corresponding HTTP status code is set by the second parameter. + * + * @param message Message explaining the error. + * @param httpErrorCode HTTP response status code. (if ≤ 0, 500 will be set by default) + */ public TAPException(String message, int httpErrorCode){ super(message); this.httpErrorCode = httpErrorCode; } + /** + * TAP exception with the ADQL query which were executed when the error occurred. + * No execution status specified. + * The corresponding HTTP status code will be HTTP-500 (Internal Server Error). + * + * @param message Message explaining the error. + * @param query The ADQL query which were executed when the error occurred. + */ public TAPException(String message, String query){ super(message); adqlQuery = query; } + /** + * TAP exception with the ADQL query which were executed when the error occurred. + * No execution status specified. + * The corresponding HTTP status code is set by the second parameter. + * + * @param message Message explaining the error. + * @param httpErrorCode HTTP response status code. (if ≤ 0, 500 will be set by default) + * @param query The ADQL query which were executed when the error occurred. + */ public TAPException(String message, int httpErrorCode, String query){ this(message, httpErrorCode); adqlQuery = query; } + /** + * TAP exception with the ADQL query which were executed when the error occurred, + * AND with its execution status (e.g. uploading, parsing, executing, ...). + * The corresponding HTTP status code will be HTTP-500 (Internal Server Error). + * + * @param message Message explaining the error. + * @param query The ADQL query which were executed when the error occurred. + * @param status Execution status/phase of the given ADQL query when the error occurred. + */ public TAPException(String message, String query, ExecutionProgression status){ this(message, query); executionStatus = status; } + /** + * TAP exception with the ADQL query which were executed when the error occurred, + * AND with its execution status (e.g. uploading, parsing, executing, ...). + * The corresponding HTTP status code is set by the second parameter. + * + * @param message Message explaining the error. + * @param httpErrorCode HTTP response status code. (if ≤ 0, 500 will be set by default) + * @param query The ADQL query which were executed when the error occurred. + * @param status Execution status/phase of the given ADQL query when the error occurred. + */ public TAPException(String message, int httpErrorCode, String query, ExecutionProgression status){ this(message, httpErrorCode, query); executionStatus = status; } + /** + *

    TAP exception wrapping the given {@link UWSException}.

    + * + *

    The message of this TAP exception will be exactly the same as the one of the given exception.

    + * + *

    + * Besides, the cause of this TAP exception will be the cause of the given exception ONLY if it has one ; + * otherwise it will the given exception. + *

    + * + *

    The HTTP status code will be the same as the one of the given {@link UWSException}.

    + * + * @param ue The exception to wrap. + */ public TAPException(UWSException ue){ - this(ue.getMessage(), ue.getCause(), ue.getHttpErrorCode()); + this(ue.getMessage(), (ue.getCause() == null ? ue : ue.getCause()), ue.getHttpErrorCode()); } + /** + *

    TAP exception wrapping the given {@link UWSException}.

    + * + *

    The message of this TAP exception will be exactly the same as the one of the given exception.

    + * + *

    + * Besides, the cause of this TAP exception will be the cause of the given exception ONLY if it has one ; + * otherwise it will the given exception. + *

    + * + *

    The HTTP status code will be the one given in second parameter.

    + * + * @param cause The exception to wrap. + * @param httpErrorCode HTTP response status code. (if ≤ 0, 500 will be set by default) + */ public TAPException(UWSException cause, int httpErrorCode){ this(cause); this.httpErrorCode = httpErrorCode; } + /** + *

    TAP exception wrapping the given {@link UWSException} and storing the current ADQL query execution status.

    + * + *

    The message of this TAP exception will be exactly the same as the one of the given exception.

    + * + *

    + * Besides, the cause of this TAP exception will be the cause of the given exception ONLY if it has one ; + * otherwise it will the given exception. + *

    + * + *

    The HTTP status code will be the one given in second parameter.

    + * + * @param cause The exception to wrap. + * @param httpErrorCode HTTP response status code. (if ≤ 0, 500 will be set by default) + * @param status Execution status/phase of the given ADQL query when the error occurred. + */ public TAPException(UWSException cause, int httpErrorCode, ExecutionProgression status){ this(cause, httpErrorCode); this.executionStatus = status; } + /** + * Build a {@link TAPException} with the given cause. The built exception will have NO MESSAGE. + * No execution status specified. + * The corresponding HTTP status code will be HTTP-500 (Internal Server Error). + * + * @param cause The cause of this exception. + */ public TAPException(Throwable cause){ super(cause); } + /** + * Build a {@link TAPException} with the given cause. The built exception will have NO MESSAGE. + * No execution status specified. + * The corresponding HTTP status code is set by the second parameter. + * + * @param cause The cause of this exception. + * @param httpErrorCode HTTP response status code. (if ≤ 0, 500 will be set by default) + */ public TAPException(Throwable cause, int httpErrorCode){ super(cause); this.httpErrorCode = httpErrorCode; } + /** + * Build a {@link TAPException} with the given cause AND with the ADQL query which were executed when the error occurred. + * The built exception will have NO MESSAGE. + * No execution status specified. + * The corresponding HTTP status code will be HTTP-500 (Internal Server Error). + * + * @param cause The cause of this exception. + * @param query The ADQL query which were executed when the error occurred. + */ public TAPException(Throwable cause, String query){ super(cause); adqlQuery = query; } + /** + * Build a {@link TAPException} with the given cause AND with the ADQL query which were executed when the error occurred. + * The built exception will have NO MESSAGE. + * No execution status specified. + * The corresponding HTTP status code is set by the second parameter. + * + * @param cause The cause of this exception. + * @param httpErrorCode HTTP response status code. (if ≤ 0, 500 will be set by default) + * @param query The ADQL query which were executed when the error occurred. + */ public TAPException(Throwable cause, int httpErrorCode, String query){ this(cause, httpErrorCode); adqlQuery = query; } + /** + * Build a {@link TAPException} with the given cause AND with the ADQL query which were executed when the error occurred + * AND with its execution status (e.g. uploading, parsing, executing, ...). + * The built exception will have NO MESSAGE. + * The corresponding HTTP status code will be HTTP-500 (Internal Server Error). + * + * @param cause The cause of this exception. + * @param query The ADQL query which were executed when the error occurred. + * @param status Execution status/phase of the given ADQL query when the error occurred. + */ public TAPException(Throwable cause, String query, ExecutionProgression status){ this(cause, query); executionStatus = status; } + /** + * Build a {@link TAPException} with the given cause AND with the ADQL query which were executed when the error occurred + * AND with its execution status (e.g. uploading, parsing, executing, ...). + * The built exception will have NO MESSAGE. + * The corresponding HTTP status code is set by the second parameter. + * + * @param cause The cause of this exception. + * @param httpErrorCode HTTP response status code. (if ≤ 0, 500 will be set by default) + * @param query The ADQL query which were executed when the error occurred. + * @param status Execution status/phase of the given ADQL query when the error occurred. + */ public TAPException(Throwable cause, int httpErrorCode, String query, ExecutionProgression status){ this(cause, httpErrorCode, query); executionStatus = status; } + /** + * Build a {@link TAPException} with the given message and cause. + * No execution status specified. + * The corresponding HTTP status code will be HTTP-500 (Internal Server Error). + * + * @param message Message of this exception. + * @param cause The cause of this exception. + */ public TAPException(String message, Throwable cause){ super(message, cause); } + /** + * Build a {@link TAPException} with the given message and cause. + * No execution status specified. + * The corresponding HTTP status code is set by the third parameter. + * + * @param message Message of this exception. + * @param cause The cause of this exception. + * @param httpErrorCode HTTP response status code. (if ≤ 0, 500 will be set by default) + */ public TAPException(String message, Throwable cause, int httpErrorCode){ super(message, cause); this.httpErrorCode = httpErrorCode; } + /** + * Build a {@link TAPException} with the given message and cause, + * AND with the ADQL query which were executed when the error occurred. + * No execution status specified. + * The corresponding HTTP status code will be HTTP-500 (Internal Server Error). + * + * @param message Message of this exception. + * @param cause The cause of this exception. + * @param query The ADQL query which were executed when the error occurred. + */ public TAPException(String message, Throwable cause, String query){ super(message, cause); adqlQuery = query; } + /** + * Build a {@link TAPException} with the given message and cause, + * AND with the ADQL query which were executed when the error occurred. + * No execution status specified. + * The corresponding HTTP status code is set by the third parameter. + * + * @param message Message of this exception. + * @param cause The cause of this exception. + * @param httpErrorCode HTTP response status code. (if ≤ 0, 500 will be set by default) + * @param query The ADQL query which were executed when the error occurred. + */ public TAPException(String message, Throwable cause, int httpErrorCode, String query){ this(message, cause, httpErrorCode); adqlQuery = query; } + /** + * Build a {@link TAPException} with the given message and cause, + * AND with the ADQL query which were executed when the error occurred + * AND with its execution status (e.g. uploading, parsing, executing, ...). + * No execution status specified. + * The corresponding HTTP status code will be HTTP-500 (Internal Server Error). + * + * @param message Message of this exception. + * @param cause The cause of this exception. + * @param query The ADQL query which were executed when the error occurred. + * @param status Execution status/phase of the given ADQL query when the error occurred. + */ public TAPException(String message, Throwable cause, String query, ExecutionProgression status){ this(message, cause, query); executionStatus = status; } + /** + * Build a {@link TAPException} with the given message and cause, + * AND with the ADQL query which were executed when the error occurred + * AND with its execution status (e.g. uploading, parsing, executing, ...). + * No execution status specified. + * The corresponding HTTP status code is set by the third parameter. + * + * @param message Message of this exception. + * @param cause The cause of this exception. + * @param httpErrorCode HTTP response status code. (if ≤ 0, 500 will be set by default) + * @param query The ADQL query which were executed when the error occurred. + * @param status Execution status/phase of the given ADQL query when the error occurred. + */ public TAPException(String message, Throwable cause, int httpErrorCode, String query, ExecutionProgression status){ this(message, cause, httpErrorCode, query); executionStatus = status; } + /** + *

    Get the HTTP status code to set in the HTTP response.

    + * + *

    If the set value is ≤ 0, 500 will be returned instead.

    + * + * @return The HTTP response status code. + */ public int getHttpErrorCode(){ - return httpErrorCode; + return (httpErrorCode <= 0) ? UWSException.INTERNAL_SERVER_ERROR : httpErrorCode; } + /** + * Get the ADQL query which were executed when the error occurred. + * + * @return Executed ADQL query. + */ public String getQuery(){ return adqlQuery; } + /** + * Get the execution status/phase of an ADQL query when the error occurred. + * + * @return ADQL query execution status. + */ public ExecutionProgression getExecutionStatus(){ return executionStatus; } diff --git a/src/tap/TAPExecutionReport.java b/src/tap/TAPExecutionReport.java index bf95af1a449dd836cc4b16f1c65960e90d7db4b4..fe2733f07ac4fe4fc6e2bbb3be59de678e503fb5 100644 --- a/src/tap/TAPExecutionReport.java +++ b/src/tap/TAPExecutionReport.java @@ -16,90 +16,167 @@ package tap; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ -import adql.db.DBColumn; - import tap.parameters.TAPParameters; +import adql.db.DBColumn; +/** + *

    Report the execution (including the parsing and the output writing) of an ADQL query. + * It gives information on the job parameters, the job ID, whether it is a synchronous task or not, times of each execution step (uploading, parsing, executing and writing), + * the resulting columns and the success or not of the execution.

    + * + *

    This report is completely filled by {@link ADQLExecutor}, and aims to be used/read only at the end of the job or when it is definitely finished.

    + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (04/2015) + */ public class TAPExecutionReport { + /** ID of the job whose the execution is reported here. */ public final String jobID; + + /** Indicate whether this execution is done in a synchronous or asynchronous job. */ public final boolean synchronous; + + /** List of all parameters provided in the user request. */ public final TAPParameters parameters; - public String sqlTranslation = null; + /** List of all resulting columns. Empty array, if not yet known. */ public DBColumn[] resultingColumns = new DBColumn[0]; - protected final long[] durations = new long[]{-1,-1,-1,-1,-1}; + /** Total number of written rows. + * @since 2.0 */ + public long nbRows = -1; + + /** Duration of all execution steps. For the moment only 4 steps (in the order): uploading, parsing, executing and writing. */ + protected final long[] durations = new long[]{-1,-1,-1,-1}; + + /** Total duration of the job execution. */ protected long totalDuration = -1; + /** Indicate whether this job has ended successfully or not. At the beginning or while executing, this field is always FALSE. */ public boolean success = false; + /** + * Build an empty execution report. + * + * @param jobID ID of the job whose the execution must be described here. + * @param synchronous true if the job is synchronous, false otherwise. + * @param params List of all parameters provided by the user for the execution. + */ public TAPExecutionReport(final String jobID, final boolean synchronous, final TAPParameters params){ this.jobID = jobID; this.synchronous = synchronous; parameters = params; } + /** + *

    Map the execution progression with an index inside the {@link #durations} array.

    + * + *

    Warning: for the moment, only {@link ExecutionProgression#UPLOADING}, {@link ExecutionProgression#PARSING}, + * {@link ExecutionProgression#EXECUTING_ADQL} and {@link ExecutionProgression#WRITING_RESULT} are managed.

    + * + * @param tapProgression Execution progression. + * + * @return Index in the array {@link #durations}, or -1 if the given execution progression is not managed. + */ protected int getIndexDuration(final ExecutionProgression tapProgression){ switch(tapProgression){ case UPLOADING: return 0; case PARSING: return 1; - case TRANSLATING: + case EXECUTING_ADQL: return 2; - case EXECUTING_SQL: - return 3; case WRITING_RESULT: - return 4; + return 3; default: return -1; } } - public final long getDuration(final ExecutionProgression tapProgression){ - int indDuration = getIndexDuration(tapProgression); + /** + * Get the duration corresponding to the given job execution step. + * + * @param tapStep Job execution step. + * + * @return The corresponding duration (in ms), or -1 if this step has not been (yet) processed. + * + * @see #getIndexDuration(ExecutionProgression) + */ + public final long getDuration(final ExecutionProgression tapStep){ + int indDuration = getIndexDuration(tapStep); if (indDuration < 0 || indDuration >= durations.length) return -1; else return durations[indDuration]; } - public final void setDuration(final ExecutionProgression tapProgression, final long duration){ - int indDuration = getIndexDuration(tapProgression); + /** + * Set the duration corresponding to the given execution step. + * + * @param tapStep Job execution step. + * @param duration Duration (in ms) of the given execution step. + */ + public final void setDuration(final ExecutionProgression tapStep, final long duration){ + int indDuration = getIndexDuration(tapStep); if (indDuration < 0 || indDuration >= durations.length) return; else durations[indDuration] = duration; } + /** + * Get the execution of the UPLOAD step. + * @return Duration (in ms). + * @see #getDuration(ExecutionProgression) + */ public final long getUploadDuration(){ return getDuration(ExecutionProgression.UPLOADING); } + /** + * Get the execution of the PARSE step. + * @return Duration (in ms). + * @see #getDuration(ExecutionProgression) + */ public final long getParsingDuration(){ return getDuration(ExecutionProgression.PARSING); } - public final long getTranslationDuration(){ - return getDuration(ExecutionProgression.TRANSLATING); - } - + /** + * Get the execution of the EXECUTION step. + * @return Duration (in ms). + * @see #getDuration(ExecutionProgression) + */ public final long getExecutionDuration(){ - return getDuration(ExecutionProgression.EXECUTING_SQL); + return getDuration(ExecutionProgression.EXECUTING_ADQL); } + /** + * Get the execution of the FORMAT step. + * @return Duration (in ms). + * @see #getDuration(ExecutionProgression) + */ public final long getFormattingDuration(){ return getDuration(ExecutionProgression.WRITING_RESULT); } + /** + * Get the total duration of the job execution. + * @return Duration (in ms). + */ public final long getTotalDuration(){ return totalDuration; } + /** + * Set the total duration of the job execution. + * @param duration Duration (in ms) to set. + */ public final void setTotalDuration(final long duration){ totalDuration = duration; } diff --git a/src/tap/TAPFactory.java b/src/tap/TAPFactory.java index 785be4795659e53d11bc2b0ffecf32477784f5dd..f4f6b7a46a43753e8d3093437b02ef6490d7f351 100644 --- a/src/tap/TAPFactory.java +++ b/src/tap/TAPFactory.java @@ -16,43 +16,469 @@ package tap; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ -import tap.db.DBConnection; +import java.util.List; +import java.util.Map; -import tap.metadata.TAPSchema; +import javax.servlet.http.HttpServletRequest; +import tap.db.DBConnection; +import tap.metadata.TAPSchema; +import tap.parameters.TAPParameters; import tap.upload.Uploader; - import uws.UWSException; - +import uws.job.ErrorSummary; +import uws.job.JobThread; +import uws.job.Result; +import uws.job.UWSJob; +import uws.job.parameters.UWSParameters; +import uws.job.user.JobOwner; import uws.service.UWSFactory; import uws.service.UWSService; - import uws.service.backup.UWSBackupManager; - +import uws.service.error.ServiceErrorWriter; +import uws.service.file.UWSFileManager; +import uws.service.request.RequestParser; +import adql.parser.ADQLParser; import adql.parser.ADQLQueryFactory; import adql.parser.QueryChecker; +import adql.query.ADQLQuery; + +/** + *

    Let build essential objects of the TAP service.

    + * + *

    Basically, it means answering to the following questions:

    + *
      + *
    • how to connect to the database? ({@link DBConnection})
    • + *
    • which UWS implementation (default implementation provided by default) to use? ({@link UWSService})
    • + *
    • whether and how UWS/asynchronous jobs must be backuped and restored? ({@link UWSBackupManager})
    • + *
    • how to create asynchronous jobs? ({@link TAPJob})
    • + *
    • whether and how tables must be updated? ({@link Uploader})
    • + *
    • how to execute an ADQL query? ({@link ADQLExecutor}) + *
    • how to parser an ADQL query? ({@link ADQLParser})
    • + *
    • how to check ADQL queries? ({@link QueryChecker})
    • + *
    + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (04/2015) + */ +public abstract class TAPFactory implements UWSFactory { + + /** Connection to the TAP service ; it provides all important service configuration information. */ + protected final ServiceConnection service; + + /** + * Build a basic {@link TAPFactory}. + * Nothing is done except setting the service connection. + * + * @param service Configuration of the TAP service. MUST NOT be NULL + * + * @throws NullPointerException If the given {@link ServiceConnection} is NULL. + */ + protected TAPFactory(final ServiceConnection service) throws NullPointerException{ + if (service == null) + throw new NullPointerException("Can not create a TAPFactory without a ServiceConnection instance !"); + + this.service = service; + } + + /** + *

    Get the object to use when an error must be formatted and written to the user.

    + * + *

    This formatted error will be either written in an HTTP response or in a job error summary.

    + * + * @return The error writer to use. + * + * @since 2.0 + */ + public abstract ServiceErrorWriter getErrorWriter(); + + /* ******************* */ + /* DATABASE CONNECTION */ + /* ******************* */ + + /** + *

    Get a free database connection.

    + * + *

    + * Free means this connection is not currently in use and will be exclusively dedicated to the function/process/thread + * which has asked for it by calling this function. + *

    + * + *

    Note: + * This function can create on the fly a new connection OR get a free one from a connection pool. Considering the + * creation time of a database connection, the second way is recommended. + *

    + * + *

    IMPORTANT: + * The returned connection MUST be freed after having used it. + *

    + * + *

    WARNING: + * Some implementation may free the connection automatically when not used for a specific time. + * So, do not forget to free the connection after use! + *

    + * + * @param jobID ID of the job/thread/process which has asked for this connection. note: The returned connection must then be identified thanks to this ID. + * + * @return A new and free connection to the database. MUST BE NOT NULL, or otherwise a TAPException should be returned. + * + * @throws TAPException If there is any error while getting a free connection. + * + * @since 2.0 + */ + public abstract DBConnection getConnection(final String jobID) throws TAPException; + + /** + *

    Free the given connection.

    + * + *

    + * This function is called by the TAP library when a job/thread does not need this connection any more. It aims + * to free resources associated to the given database connection. + *

    + * + *

    Note: + * This function can just close definitely the connection OR give it back to a connection pool. The implementation is + * here totally free! + *

    + * + * @param conn The connection to close. + * + * @since 2.0 + */ + public abstract void freeConnection(final DBConnection conn); + + /** + *

    Destroy all resources (and particularly DB connections and JDBC driver) allocated in this factory.

    + * + *

    Note: + * This function is called when the TAP service is shutting down. + * After this call, the factory may not be able to provide any closed resources ; its behavior may be unpredictable. + *

    + * + * @since 2.0 + */ + public abstract void destroy(); + + /* *************** */ + /* ADQL MANAGEMENT */ + /* *************** */ + + /** + *

    Create the object able to execute an ADQL query and to write and to format its result.

    + * + *

    Note: + * A default implementation is provided by {@link AbstractTAPFactory} + *

    + * + * @return An ADQL executor. + * + * @throws TAPException If any error occurs while creating an ADQL executor. + */ + public abstract ADQLExecutor createADQLExecutor() throws TAPException; + + /** + *

    Create a parser of ADQL query.

    + * + *

    Warning: + * This parser can be created with a query factory and/or a query checker. + * {@link #createQueryFactory()} will be used only if the default query factory (or none) is set + * in the ADQL parser returned by this function. + * Idem for {@link #createQueryChecker(TAPSchema)}: it will used only if no query checker is set + * in the returned ADQL parser. + *

    + * + *

    Note: + * A default implementation is provided by {@link AbstractTAPFactory}. + *

    + * + * @return An ADQL query parser. + * + * @throws TAPException If any error occurs while creating an ADQL parser. + * + * @since 2.0 + */ + public abstract ADQLParser createADQLParser() throws TAPException; + + /** + *

    Create a factory able to build every part of an {@link ADQLQuery} object.

    + * + *

    Warning: + * This function is used only if the default query factory (or none) is set in the ADQL parser + * returned by {@link #createADQLParser()}. + *

    + * + *

    Note: + * A default implementation is provided by {@link AbstractTAPFactory} + *

    + * + * @return An {@link ADQLQuery} factory. + * + * @throws TAPException If any error occurs while creating the factory. + */ + public abstract ADQLQueryFactory createQueryFactory() throws TAPException; + + /** + *

    Create an object able to check the consistency between the ADQL query and the database. + * That's to say, it checks whether the tables and columns used in the query really exist + * in the database.

    + * + *

    Warning: + * This function is used only if no query checker is set in the ADQL parser + * returned by {@link #createADQLParser()}. + *

    + * + *

    Note: + * A default implementation is provided by {@link AbstractTAPFactory} + *

    + * + * @param uploadSchema ADQL schema containing the description of all uploaded tables. + * + * @return A query checker. + * + * @throws TAPException If any error occurs while creating a query checker. + */ + public abstract QueryChecker createQueryChecker(final TAPSchema uploadSchema) throws TAPException; + + /* ****** */ + /* UPLOAD */ + /* ****** */ + + /** + *

    Create an object able to manage the creation of submitted user tables (in VOTable) into the database.

    + * + *

    Note: + * A default implementation is provided by {@link AbstractTAPFactory}. + *

    + * + * @param dbConn The database connection which has requested an {@link Uploader}. + * + * @return An {@link Uploader}. + * + * @throws TAPException If any error occurs while creating an {@link Uploader} instance. + */ + public abstract Uploader createUploader(final DBConnection dbConn) throws TAPException; + + /* ************** */ + /* UWS MANAGEMENT */ + /* ************** */ + + /** + *

    Create the object which will manage the asynchronous resource of the TAP service. + * This resource is a UWS service.

    + * + *

    Note: + * A default implementation is provided by {@link AbstractTAPFactory}. + *

    + * + * @return A UWS service which will be the asynchronous resource of this TAP service. + * + * @throws TAPException If any error occurs while creating this UWS service. + */ + public abstract UWSService createUWS() throws TAPException; + + /** + *

    Create the object which will manage the backup and restoration of all asynchronous jobs.

    + * + *

    Note: + * This function may return NULL. If it does, asynchronous jobs won't be backuped. + *

    + * + *

    Note: + * A default implementation is provided by {@link AbstractTAPFactory}. + *

    + * + * @param uws The UWS service which has to be backuped and restored. + * + * @return The backup manager to use. MAY be NULL + * + * @throws TAPException If any error occurs while creating this backup manager. + */ + public abstract UWSBackupManager createUWSBackupManager(final UWSService uws) throws TAPException; -import adql.translator.ADQLTranslator; + /** + *

    Creates a (PENDING) UWS job from the given HTTP request.

    + * + *

    + * This implementation just call {@link #createTAPJob(HttpServletRequest, JobOwner)} + * with the given request, in order to ensure that the returned object is always a {@link TAPJob}. + *

    + * + * @see uws.service.AbstractUWSFactory#createJob(javax.servlet.http.HttpServletRequest, uws.job.user.JobOwner) + * @see #createTAPJob(HttpServletRequest, JobOwner) + */ + @Override + public final UWSJob createJob(HttpServletRequest request, JobOwner owner) throws UWSException{ + return createTAPJob(request, owner); + } -public interface TAPFactory< R > extends UWSFactory { + /** + *

    Create a PENDING asynchronous job from the given HTTP request.

    + * + *

    Note: + * A default implementation is provided by {@link AbstractTAPFactory}. + *

    + * + * @param request Request which contains all parameters needed to set correctly the asynchronous job to create. + * @param owner The user which has requested the job creation. + * + * @return A new PENDING asynchronous job. + * + * @throws UWSException If any error occurs while reading the parameters in the request or while creating the job. + */ + protected abstract TAPJob createTAPJob(final HttpServletRequest request, final JobOwner owner) throws UWSException; - public UWSService createUWS() throws TAPException, UWSException; + /** + *

    Creates a UWS job with the following attributes.

    + * + *

    + * This implementation just call {@link #createTAPJob(String, JobOwner, TAPParameters, long, long, long, List, ErrorSummary)} + * with the given parameters, in order to ensure that the returned object is always a {@link TAPJob}. + *

    + * + *

    Note 1: + * This function is mainly used to restore a UWS job at the UWS initialization. + *

    + * + *

    Note 2: + * The job phase is chosen automatically from the given job attributes (i.e. no endTime => PENDING, no result and no error => ABORTED, ...). + *

    + * + * @see uws.service.AbstractUWSFactory#createJob(java.lang.String, uws.job.user.JobOwner, uws.job.parameters.UWSParameters, long, long, long, java.util.List, uws.job.ErrorSummary) + * @see #createTAPJob(String, JobOwner, TAPParameters, long, long, long, List, ErrorSummary) + */ + @Override + public final UWSJob createJob(String jobId, JobOwner owner, final UWSParameters params, long quote, long startTime, long endTime, List results, ErrorSummary error) throws UWSException{ + return createTAPJob(jobId, owner, (TAPParameters)params, quote, startTime, endTime, results, error); + } - public UWSBackupManager createUWSBackupManager(final UWSService uws) throws TAPException, UWSException; + /** + *

    Create a PENDING asynchronous job with the given parameters.

    + * + *

    Note: + * A default implementation is provided in {@link AbstractTAPFactory}. + *

    + * + * @param jobId ID of the job (NOT NULL). + * @param owner Owner of the job. + * @param params List of all input job parameters. + * @param quote Its quote (in seconds). + * @param startTime Date/Time of the start of this job. + * @param endTime Date/Time of the end of this job. + * @param results All results of this job. + * @param error The error which ended the job to create. + * + * @return A new PENDING asynchronous job. + * + * @throws UWSException If there is an error while creating the job. + */ + protected abstract TAPJob createTAPJob(final String jobId, final JobOwner owner, final TAPParameters params, final long quote, final long startTime, final long endTime, final List results, final ErrorSummary error) throws UWSException; - public ADQLExecutor createADQLExecutor() throws TAPException; + /** + *

    Create the thread which will execute the task described by the given UWSJob instance.

    + * + *

    + * This function is definitely implemented here and can not be overridden. The processing of + * an ADQL query must always be the same in a TAP service ; it is completely done by {@link AsyncThread}. + *

    + * + * @see uws.service.UWSFactory#createJobThread(uws.job.UWSJob) + * @see AsyncThread + */ + @Override + public final JobThread createJobThread(final UWSJob job) throws UWSException{ + try{ + return new AsyncThread((TAPJob)job, createADQLExecutor(), getErrorWriter()); + }catch(TAPException te){ + if (te.getCause() != null && te.getCause() instanceof UWSException) + throw (UWSException)te.getCause(); + else + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, te, "Impossible to create an AsyncThread !"); + } + } - public ADQLQueryFactory createQueryFactory() throws TAPException; + /** + *

    Extract the parameters from the given request (multipart or not).

    + * + *

    + * This function is used only to create the set of parameters for a TAP job (synchronous or asynchronous). + * Thus, it just call {@link #createTAPParameters(HttpServletRequest)} with the given request, in order to ensure + * that the returned object is always a {@link TAPParameters}. + *

    + * + * @see #createTAPParameters(HttpServletRequest) + */ + @Override + public final UWSParameters createUWSParameters(HttpServletRequest request) throws UWSException{ + try{ + return createTAPParameters(request); + }catch(TAPException te){ + if (te.getCause() != null && te.getCause() instanceof UWSException) + throw (UWSException)te.getCause(); + else + throw new UWSException(te.getHttpErrorCode(), te); + } + } - public QueryChecker createQueryChecker(TAPSchema uploadSchema) throws TAPException; + /** + *

    Extract all the TAP parameters from the given HTTP request (multipart or not) and return them.

    + * + *

    Note: + * A default implementation is provided by {@link AbstractTAPFactory}. + *

    + * + * @param request The HTTP request containing the TAP parameters to extract. + * + * @return An object gathering all successfully extracted TAP parameters. + * + * @throws TAPException If any error occurs while extracting the parameters. + */ + public abstract TAPParameters createTAPParameters(final HttpServletRequest request) throws TAPException; - public ADQLTranslator createADQLTranslator() throws TAPException; + /** + *

    Identify and gather all identified parameters of the given map inside a {@link TAPParameters} object.

    + * + *

    + * This implementation just call {@link #createTAPParameters(Map)} with the given map, in order to ensure + * that the returned object is always a {@link TAPParameters}. + *

    + * + * @see uws.service.AbstractUWSFactory#createUWSParameters(java.util.Map) + * @see #createTAPParameters(Map) + */ + @Override + public final UWSParameters createUWSParameters(Map params) throws UWSException{ + try{ + return createTAPParameters(params); + }catch(TAPException te){ + if (te.getCause() != null && te.getCause() instanceof UWSException) + throw (UWSException)te.getCause(); + else + throw new UWSException(te.getHttpErrorCode(), te); + } + } - public DBConnection createDBConnection(final String jobID) throws TAPException; + /** + *

    Identify all TAP parameters and gather them inside a {@link TAPParameters} object.

    + * + *

    Note: + * A default implementation is provided by {@link AbstractTAPFactory}. + *

    + * + * @param params Map containing all parameters. + * + * @return An object gathering all successfully identified TAP parameters. + * + * @throws TAPException If any error occurs while creating the {@link TAPParameters} object. + */ + public abstract TAPParameters createTAPParameters(final Map params) throws TAPException; - public Uploader createUploader(final DBConnection dbConn) throws TAPException; + @Override + public RequestParser createRequestParser(final UWSFileManager fileManager) throws UWSException{ + return new TAPRequestParser(fileManager); + } } diff --git a/src/tap/TAPJob.java b/src/tap/TAPJob.java index 52510c5b6290ba737cf07049cf62892d99a3a29c..aa89491428ac40e8d058d38b58fac92a7b6102a1 100644 --- a/src/tap/TAPJob.java +++ b/src/tap/TAPJob.java @@ -16,121 +16,224 @@ package tap; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ +import java.util.Date; import java.util.List; +import tap.log.TAPLog; +import tap.parameters.DALIUpload; import tap.parameters.TAPParameters; -import tap.upload.TableLoader; - import uws.UWSException; - import uws.job.ErrorSummary; +import uws.job.ExecutionPhase; +import uws.job.JobThread; import uws.job.Result; import uws.job.UWSJob; - +import uws.job.parameters.UWSParameters; import uws.job.user.JobOwner; +import uws.service.log.UWSLog.LogLevel; +/** + *

    Description of a TAP job. This class is used for asynchronous but also synchronous queries.

    + * + *

    + * On the contrary to {@link UWSJob}, it is loading parameters from {@link TAPParameters} instances rather than {@link UWSParameters}. + * However, {@link TAPParameters} is an extension of {@link UWSParameters}. That's what allow the UWS library to use both {@link TAPJob} and {@link TAPParameters}. + *

    + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (04/2015) + */ public class TAPJob extends UWSJob { - private static final long serialVersionUID = 1L; + /** Name of the standard TAP parameter which specifies the type of request to execute: "REQUEST". */ public static final String PARAM_REQUEST = "request"; + /** REQUEST value meaning an ADQL query must be executed: "doQuery". */ public static final String REQUEST_DO_QUERY = "doQuery"; + /** REQUEST value meaning VO service capabilities must be returned: "getCapabilities". */ public static final String REQUEST_GET_CAPABILITIES = "getCapabilities"; + /** Name of the standard TAP parameter which specifies the query language: "LANG". (only the ADQL language is supported by default in this version of the library) */ public static final String PARAM_LANGUAGE = "lang"; + /** LANG value meaning ADQL language: "ADQL". */ public static final String LANG_ADQL = "ADQL"; + /** LANG value meaning PQL language: "PQL". (this language is not supported in this version of the library) */ public static final String LANG_PQL = "PQL"; + /** Name of the standard TAP parameter which specifies the version of the TAP protocol that must be used: "VERSION". (only the version 1.0 is supported in this version of the library) */ public static final String PARAM_VERSION = "version"; + /** VERSION value meaning the version 1.0 of TAP: "1.0". */ public static final String VERSION_1_0 = "1.0"; + /** Name of the standard TAP parameter which specifies the output format (format of a query result): "FORMAT". */ public static final String PARAM_FORMAT = "format"; + /** FORMAT value meaning the VOTable format: "votable". */ public static final String FORMAT_VOTABLE = "votable"; + /** Name of the standard TAP parameter which specifies the maximum number of rows that must be returned in the query result: "MAXREC". */ public static final String PARAM_MAX_REC = "maxRec"; + /** Special MAXREC value meaning the number of output rows is not limited. */ public static final int UNLIMITED_MAX_REC = -1; + /** Name of the standard TAP parameter which specifies the query to execute: "QUERY". */ public static final String PARAM_QUERY = "query"; + + /** Name of the standard TAP parameter which defines the tables to upload in the database for the query execution: "UPLOAD". */ public static final String PARAM_UPLOAD = "upload"; + /** Name of the library parameter which informs about a query execution progression: "PROGRESSION". (this parameter is removed once the execution is finished) */ public static final String PARAM_PROGRESSION = "progression"; - protected TAPExecutionReport execReport; + /** Internal query execution report. */ + protected TAPExecutionReport execReport = null; + /** Parameters of this job for its execution. */ protected final TAPParameters tapParams; - public TAPJob(final JobOwner owner, final TAPParameters tapParams) throws UWSException, TAPException{ + /** + *

    Build a pending TAP job with the given parameters.

    + * + *

    Note: if the parameter {@link #PARAM_PHASE} (phase) is given with the value {@link #PHASE_RUN} + * the job execution starts immediately after the job has been added to a job list or after {@link #applyPhaseParam(JobOwner)} is called.

    + * + * @param owner User who owns this job. MAY BE NULL + * @param tapParams Set of parameters. + * + * @throws TAPException If one of the given parameters has a forbidden or wrong value. + */ + public TAPJob(final JobOwner owner, final TAPParameters tapParams) throws TAPException{ super(owner, tapParams); this.tapParams = tapParams; tapParams.check(); - //progression = ExecutionProgression.PENDING; - //loadTAPParams(tapParams); } - public TAPJob(final String jobID, final JobOwner owner, final TAPParameters params, final long quote, final long startTime, final long endTime, final List results, final ErrorSummary error) throws UWSException, TAPException{ + /** + *

    Restore a job in a state defined by the given parameters. + * The phase must be set separately with {@link #setPhase(uws.job.ExecutionPhase, boolean)}, where the second parameter is true.

    + * + * @param jobID ID of the job. + * @param owner User who owns this job. + * @param params Set of not-standard UWS parameters (i.e. what is called by {@link UWSJob} as additional parameters ; they includes all TAP parameters). + * @param quote Quote of this job. + * @param startTime Date/Time at which this job started. (if not null, it means the job execution was finished, so a endTime should be provided) + * @param endTime Date/Time at which this job finished. + * @param results List of results. NULL if the job has not been executed, has been aborted or finished with an error. + * @param error Error with which this job ends. + * + * @throws TAPException If one of the given parameters has a forbidden or wrong value. + */ + public TAPJob(final String jobID, final JobOwner owner, final TAPParameters params, final long quote, final long startTime, final long endTime, final List results, final ErrorSummary error) throws TAPException{ super(jobID, owner, params, quote, startTime, endTime, results, error); this.tapParams = params; this.tapParams.check(); } - /*protected void loadTAPParams(TAPParameters params) { - adqlQuery = params.query; - additionalParameters.put(TAPParameters.PARAM_QUERY, adqlQuery); - - format = (params.format == null)?"application/x-votable+xml":params.format; - additionalParameters.put(TAPParameters.PARAM_FORMAT, format); - - maxRec = params.maxrec; - additionalParameters.put(TAPParameters.PARAM_MAX_REC, maxRec+""); - - upload = params.upload; - tablesToUpload = params.tablesToUpload; - additionalParameters.put(TAPParameters.PARAM_UPLOAD, upload); - }*/ - /** - * @return The tapParams. + * Get the object storing and managing the set of all (UWS and TAP) parameters. + * + * @return The object managing all job parameters. */ public final TAPParameters getTapParams(){ return tapParams; } + /** + *

    Get the value of the REQUEST parameter.

    + * + *

    This value must be {@value #REQUEST_DO_QUERY}.

    + * + * @return REQUEST value. + */ public final String getRequest(){ return tapParams.getRequest(); } + /** + * Get the value of the FORMAT parameter. + * + * @return FORMAT value. + */ public final String getFormat(){ return tapParams.getFormat(); } + /** + *

    Get the value of the LANG parameter.

    + * + *

    This value should always be {@value #LANG_ADQL} in this version of the library

    + * + * @return LANG value. + */ public final String getLanguage(){ return tapParams.getLang(); } + /** + *

    Get the value of the MAXREC parameter.

    + * + *

    If this value is negative, it means the number of output rows is not limited.

    + * + * @return MAXREC value. + */ public final int getMaxRec(){ return tapParams.getMaxRec(); } + /** + * Get the value of the QUERY parameter (i.e. the query, in the language returned by {@link #getLanguage()}, to execute). + * + * @return QUERY value. + */ public final String getQuery(){ return tapParams.getQuery(); } + /** + *

    Get the value of the VERSION parameter.

    + * + *

    This value should be {@value #VERSION_1_0} in this version of the library.

    + * + * @return VERSION value. + */ public final String getVersion(){ return tapParams.getVersion(); } + /** + *

    Get the value of the UPLOAD parameter.

    + * + *

    This value must be formatted as specified by the TAP standard (= a semicolon separated list of DALI uploads).

    + * + * @return UPLOAD value. + */ public final String getUpload(){ return tapParams.getUpload(); } - public final TableLoader[] getTablesToUpload(){ - return tapParams.getTableLoaders(); + /** + *

    Get the list of tables to upload in the database for the query execution.

    + * + *

    The returned array is an interpretation of the UPLOAD parameter.

    + * + * @return List of tables to upload. + */ + public final DALIUpload[] getTablesToUpload(){ + return tapParams.getUploadedTables(); } /** + *

    Get the execution report.

    + * + *

    + * This report is available only during or after the job execution. + * It tells in which step the execution is, and how long was the previous steps. + * It can also give more information about the number of resulting rows and columns. + *

    + * * @return The execReport. */ public final TAPExecutionReport getExecReport(){ @@ -138,63 +241,120 @@ public class TAPJob extends UWSJob { } /** - * @param execReport The execReport to set. + *

    Set the execution report.

    + * + *

    IMPORTANT: + * This function can be called only if the job is running or is being restored, otherwise an exception would be thrown. + * It should not be used by implementors, but only by the internal library processing. + *

    + * + * @param execReport An execution report. + * + * @throws UWSException If this job has never been restored and is not running. */ - public final void setExecReport(TAPExecutionReport execReport) throws UWSException{ - if (getRestorationDate() == null && !isRunning()) + public final void setExecReport(final TAPExecutionReport execReport) throws UWSException{ + if (getRestorationDate() == null && (thread == null || thread.isFinished())) throw new UWSException("Impossible to set an execution report if the job is not in the EXECUTING phase ! Here, the job \"" + jobId + "\" is in the phase " + getPhase()); this.execReport = execReport; } - /* - *

    Starts in an asynchronous manner this ADQLExecutor.

    - *

    The execution will stop after the duration specified in the given {@link TAPJob} - * (see {@link TAPJob#getExecutionDuration()}).

    - * - * @param output - * @return - * @throws IllegalStateException - * @throws InterruptedException - * - public synchronized final boolean startSync(final OutputStream output) throws IllegalStateException, InterruptedException, UWSException { - // TODO Set the output stream so that the result is written directly in the given output ! - start(); - System.out.println("Joining..."); - thread.join(getExecutionDuration()); - System.out.println("Aborting..."); - thread.interrupt(); - thread.join(getTimeToWaitForEnd()); - return thread.isInterrupted(); - }*/ + /** + *

    Create the thread to use for the execution of this job.

    + * + *

    Note: If the job already exists, this function does nothing.

    + * + * @throws NullPointerException If the factory returned NULL rather than the asked {@link JobThread}. + * @throws UWSException If the thread creation fails. + * + * @see TAPFactory#createJobThread(UWSJob) + * + * @since 2.0 + */ + private final void createThread() throws NullPointerException, UWSException{ + if (thread == null){ + thread = getFactory().createJobThread(this); + if (thread == null) + throw new NullPointerException("Missing job work! The thread created by the factory is NULL => The job can't be executed!"); + } + } + + /** + *

    Check whether this job is able to start right now.

    + * + *

    + * Basically, this function try to get a database connection. If none is available, + * then this job can not start and this function return FALSE. In all the other cases, + * TRUE is returned. + *

    + * + *

    Warning: This function will indirectly open and keep a database connection, so that the job can be started just after its call. + * If it turns out that the execution won't start just after this call, the DB connection should be closed in some way in order to save database resources.

    + * + * @return true if this job can start right now, false otherwise. + * + * @since 2.0 + */ + public final boolean isReadyForExecution(){ + return thread != null && ((AsyncThread)thread).isReadyForExecution(); + } @Override - protected void stop(){ - if (!isStopped()){ - //try { - stopping = true; - // TODO closeDBConnection(); - super.stop(); - /*} catch (TAPException e) { - getLogger().error("Impossible to cancel the query execution !", e); - return; - }*/ + public final void start(final boolean useManager) throws UWSException{ + // This job must know its jobs list and this jobs list must know its UWS: + if (getJobList() == null || getJobList().getUWS() == null) + throw new IllegalStateException("A TAPJob can not start if it is not linked to a job list or if its job list is not linked to a UWS."); + + // If already running do nothing: + else if (isRunning()) + return; + + // If asked propagate this request to the execution manager: + else if (useManager){ + // Create its corresponding thread, if not already existing: + createThread(); + // Ask to the execution manager to test whether the job is ready for execution, and if, execute it (by calling this function with "false" as parameter): + getJobList().getExecutionManager().execute(this); + + }// Otherwise start directly the execution: + else{ + // Create its corresponding thread, if not already existing: + createThread(); + if (!isReadyForExecution()){ + UWSException ue = new NoDBConnectionAvailableException(); + ((TAPLog)getLogger()).logDB(LogLevel.ERROR, null, "CONNECTION_LACK", "No more database connection available for the moment!", ue); + getLogger().logJob(LogLevel.ERROR, this, "ERROR", "Asynchronous job " + jobId + " execution aborted: no database connection available!", null); + throw ue; + } + + // Change the job phase: + setPhase(ExecutionPhase.EXECUTING); + + // Set the start time: + setStartTime(new Date()); + + // Run the job: + thread.start(); + (new JobTimeOut()).start(); + + // Log the start of this job: + getLogger().logJob(LogLevel.INFO, this, "START", "Job \"" + jobId + "\" started.", null); } } - /*protected boolean deleteResultFiles(){ - try{ - // TODO service.deleteResults(this); - return true; - }catch(TAPException ex){ - service.log(LogType.ERROR, "Job "+getJobId()+" - Can't delete results files: "+ex.getMessage()); - return false; + /** + * This exception is thrown by a job execution when no database connection are available anymore. + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (02/2015) + * @since 2.0 + */ + public static class NoDBConnectionAvailableException extends UWSException { + private static final long serialVersionUID = 1L; + + public NoDBConnectionAvailableException(){ + super("Service momentarily too busy! Please try again later."); } - }*/ - @Override - public void clearResources(){ - super.clearResources(); - // TODO deleteResultFiles(); } } diff --git a/src/tap/TAPRequestParser.java b/src/tap/TAPRequestParser.java new file mode 100644 index 0000000000000000000000000000000000000000..91c24c6099e227ee1212fbdaccf8738ac06359ac --- /dev/null +++ b/src/tap/TAPRequestParser.java @@ -0,0 +1,216 @@ +package tap; + +/* + * This file is part of TAPLibrary. + * + * TAPLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * TAPLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with TAPLibrary. If not, see . + * + * Copyright 2014 - Astronomisches Rechen Institut (ARI) + */ + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import javax.servlet.http.HttpServletRequest; + +import uws.UWSException; +import uws.UWSToolBox; +import uws.service.file.UWSFileManager; +import uws.service.request.FormEncodedParser; +import uws.service.request.MultipartParser; +import uws.service.request.NoEncodingParser; +import uws.service.request.RequestParser; +import uws.service.request.UploadFile; + +/** + *

    This parser adapts the request parser to use in function of the request content-type:

    + *
      + *
    • application/x-www-form-urlencoded: {@link FormEncodedParser}
    • + *
    • multipart/form-data: {@link MultipartParser}
    • + *
    • other: {@link NoEncodingParser} (the whole request body will be stored as one single parameter)
    • + *
    + * + *

    + * The request body size is limited for the multipart AND the no-encoding parsers. If you want to change this limit, + * you MUST do it for each of these parsers, setting the following static attributes: resp. {@link MultipartParser#SIZE_LIMIT} + * and {@link NoEncodingParser#SIZE_LIMIT}. + *

    + * + *

    Note: + * If you want to change the support other request parsing, you will have to write your own {@link RequestParser} implementation. + *

    + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (12/2014) + * @since 2.0 + */ +public class TAPRequestParser implements RequestParser { + + /** File manager to use to create {@link UploadFile} instances. + * It is required by this new object to execute open, move and delete operations whenever it could be asked. */ + private final UWSFileManager fileManager; + + /** {@link RequestParser} to use when a application/x-www-form-urlencoded request must be parsed. This attribute is set by {@link #parse(HttpServletRequest)} + * only when needed, by calling the function {@link #getFormParser()}. */ + private RequestParser formParser = null; + + /** {@link RequestParser} to use when a multipart/form-data request must be parsed. This attribute is set by {@link #parse(HttpServletRequest)} + * only when needed, by calling the function {@link #getMultipartParser()}. */ + private RequestParser multipartParser = null; + + /** {@link RequestParser} to use when none of the other parsers can be used ; it will then transform the whole request body in a parameter called "JDL" + * (Job Description Language). This attribute is set by {@link #parse(HttpServletRequest)} only when needed, by calling the function + * {@link #getNoEncodingParser()}. */ + private RequestParser noEncodingParser = null; + + /** + * Build a {@link RequestParser} able to choose the most appropriate {@link RequestParser} in function of the request content-type. + * + * @param fileManager The file manager to use in order to store any eventual upload. MUST NOT be NULL + */ + public TAPRequestParser(final UWSFileManager fileManager){ + if (fileManager == null) + throw new NullPointerException("Missing file manager => can not create a TAPRequestParser!"); + this.fileManager = fileManager; + } + + @Override + public Map parse(final HttpServletRequest req) throws UWSException{ + if (req == null) + return new HashMap(); + + // Get the method: + String method = (req.getMethod() == null) ? "" : req.getMethod().toLowerCase(); + + if (method.equals("post") || method.equals("put")){ + Map params = null; + + // Get the parameters: + if (FormEncodedParser.isFormEncodedRequest(req)) + params = getFormParser().parse(req); + else if (MultipartParser.isMultipartContent(req)) + params = getMultipartParser().parse(req); + else + params = getNoEncodingParser().parse(req); + + // Only for POST requests, the parameters specified in the URL must be added: + if (method.equals("post")) + params = UWSToolBox.addGETParameters(req, (params == null) ? new HashMap() : params); + + return params; + }else + return UWSToolBox.addGETParameters(req, new HashMap()); + } + + /** + * Get the {@link RequestParser} to use for application/x-www-form-urlencoded HTTP requests. + * This parser may be created if not already done. + * + * @return The {@link RequestParser} to use for application/x-www-form-urlencoded requests. Never NULL + */ + private synchronized final RequestParser getFormParser(){ + return (formParser != null) ? formParser : (formParser = new FormEncodedParser(){ + @Override + protected void consumeParameter(String name, Object value, final Map allParams){ + // Modify the value if it is an UPLOAD parameter: + if (name != null && name.equalsIgnoreCase("upload")){ + // if no value, ignore this parameter: + if (value == null) + return; + // put in lower case the parameter name: + name = name.toLowerCase(); + // transform the value in a String array: + value = append((String)value, (allParams.containsKey("upload") ? (String[])allParams.get("upload") : null)); + } + + // Update the map, normally: + super.consumeParameter(name, value, allParams); + } + }); + } + + /** + * Get the {@link RequestParser} to use for multipart/form-data HTTP requests. + * This parser may be created if not already done. + * + * @return The {@link RequestParser} to use for multipart/form-data requests. Never NULL + */ + private synchronized final RequestParser getMultipartParser(){ + return (multipartParser != null) ? multipartParser : (multipartParser = new MultipartParser(fileManager){ + @Override + protected void consumeParameter(String name, Object value, final Map allParams){ + // Modify the value if it is an UPLOAD parameter: + if (name != null && name.equalsIgnoreCase(TAPJob.PARAM_UPLOAD)){ + // if no value, ignore this parameter: + if (value == null) + return; + // ignore also parameter having the same name in the same case and which is a file (only strings can be processed as DALI UPLOAD parameter): + else if (name.equals(TAPJob.PARAM_UPLOAD) && value instanceof UploadFile){ + try{ + ((UploadFile)value).deleteFile(); + }catch(IOException ioe){} + return; + } + // use the same case for the parameter name: + name = TAPJob.PARAM_UPLOAD; + // transform the value in a String array: + value = append((String)value, (allParams.containsKey(TAPJob.PARAM_UPLOAD) ? (String[])allParams.get(TAPJob.PARAM_UPLOAD) : null)); + } + + // Update the map, normally: + super.consumeParameter(name, value, allParams); + } + }); + } + + /** + * Get the {@link RequestParser} to use for HTTP requests whose the content type is neither application/x-www-form-urlencoded nor multipart/form-data. + * This parser may be created if not already done. + * + * @return The {@link RequestParser} to use for requests whose the content-type is not supported. Never NULL + */ + private synchronized final RequestParser getNoEncodingParser(){ + return (noEncodingParser == null) ? (noEncodingParser = new NoEncodingParser(fileManager)) : noEncodingParser; + } + + /** + * Create a new array in which the given String is appended at the end of the given array. + * + * @param value String to append in the array. + * @param oldValue The array after which the given String must be appended. + * + * @return The new array containing the values of the array and then the given String. + */ + private final static String[] append(final String value, final String[] oldValue){ + // Create the corresponding array of Strings: + // ...if the array already exists, extend it: + String[] newValue; + if (oldValue != null){ + newValue = new String[oldValue.length + 1]; + for(int i = 0; i < oldValue.length; i++) + newValue[i] = oldValue[i]; + } + // ...otherwise, create a new array: + else + newValue = new String[1]; + + // Add the new value in the array: + newValue[newValue.length - 1] = value; + + // Update the value to put inside the map: + return newValue; + } + +} diff --git a/src/tap/TAPSyncJob.java b/src/tap/TAPSyncJob.java index 726712873fde4feddddd14505922c02c47d49f79..01421a36b60a5fa3f237d139a57aecb36f1924ab 100644 --- a/src/tap/TAPSyncJob.java +++ b/src/tap/TAPSyncJob.java @@ -16,9 +16,11 @@ package tap; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ +import java.io.IOException; import java.util.Date; import javax.servlet.http.HttpServletResponse; @@ -26,26 +28,66 @@ import javax.servlet.http.HttpServletResponse; import tap.parameters.TAPParameters; import uws.UWSException; import uws.job.JobThread; +import uws.service.log.UWSLog.LogLevel; +/** + *

    This class represent a TAP synchronous job. + * A such job must execute an ADQL query and return immediately its result.

    + * + *

    Timeout

    + * + *

    + * The execution of a such job is limited to a short time. Once this time elapsed, the job is stopped. + * For a longer job, an asynchronous job should be used. + *

    + * + *

    Error management

    + * + *

    + * If an error occurs it must be propagated ; it will be written later in the HTTP response on a top level. + *

    + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (02/2015) + */ public class TAPSyncJob { /** The time (in ms) to wait the end of the thread after an interruption. */ protected long waitForStop = 1000; + /** Last generated ID of a synchronous job. */ protected static String lastId = null; - protected final ServiceConnection service; + /** Description of the TAP service in charge of this synchronous job. */ + protected final ServiceConnection service; + /** ID of this job. This ID is also used to identify the thread. */ protected final String ID; + + /** Parameters of the execution. It mainly contains the ADQL query to execute. */ protected final TAPParameters tapParams; + /** The thread in which the query execution will be done. */ protected SyncThread thread; + /** Report of the query execution. It stays NULL until the execution ends. */ protected TAPExecutionReport execReport = null; + /** Date at which this synchronous job has really started. It is NULL when the job has never been started. + * + *

    Note: A synchronous job can be run just once ; so if an attempt of executing it again, the start date will be tested: + * if NULL, the second starting is not considered and an exception is thrown.

    */ private Date startedAt = null; - public TAPSyncJob(final ServiceConnection service, final TAPParameters params) throws NullPointerException{ + /** + * Create a synchronous TAP job. + * + * @param service Description of the TAP service which is in charge of this synchronous job. + * @param params Parameters of the query to execute. It must mainly contain the ADQL query to execute. + * + * @throws NullPointerException If one of the parameters is NULL. + */ + public TAPSyncJob(final ServiceConnection service, final TAPParameters params) throws NullPointerException{ if (params == null) throw new NullPointerException("Missing TAP parameters ! => Impossible to create a synchronous TAP job."); tapParams = params; @@ -63,8 +105,8 @@ public class TAPSyncJob { * *

    By default: "S"+System.currentTimeMillis()+UpperCharacter (UpperCharacter: one upper-case character: A, B, C, ....)

    * - *

    note: DO NOT USE in this function any of the following functions: {@link #getLogger()}, - * {@link #getFileManager()} and {@link #getFactory()}. All of them will return NULL, because this job does not + *

    note: DO NOT USE in this function any of the following functions: {@link ServiceConnection#getLogger()}, + * {@link ServiceConnection#getFileManager()} and {@link ServiceConnection#getFactory()}. All of them will return NULL, because this job does not * yet know its jobs list (which is needed to know the UWS and so, all of the objects returned by these functions).

    * * @return A unique job identifier. @@ -79,94 +121,214 @@ public class TAPSyncJob { return generatedId; } + /** + * Get the ID of this synchronous job. + * + * @return The job ID. + */ public final String getID(){ return ID; } + /** + * Get the TAP parameters provided by the user and which will be used for the execution of this job. + * + * @return Job parameters. + */ public final TAPParameters getTapParams(){ return tapParams; } + /** + * Get the report of the execution of this job. + * This report is NULL if the execution has not yet started. + * + * @return Report of this job execution. + */ public final TAPExecutionReport getExecReport(){ return execReport; } - public synchronized boolean start(final HttpServletResponse response) throws IllegalStateException, UWSException, TAPException{ + /** + *

    Start the execution of this job in order to execute the given ADQL query.

    + * + *

    The execution itself will be processed by an {@link ADQLExecutor} inside a thread ({@link SyncThread}).

    + * + *

    Important: + * No error should be written in this function. If any error occurs it should be thrown, in order to be manager on a top level. + *

    + * + * @param response Response in which the result must be written. + * + * @return true if the execution was successful, false otherwise. + * + * @throws IllegalStateException If this synchronous job has already been started before. + * @throws IOException If any error occurs while writing the query result in the given {@link HttpServletResponse}. + * @throws TAPException If any error occurs while executing the ADQL query. + * + * @see SyncThread + */ + public synchronized boolean start(final HttpServletResponse response) throws IllegalStateException, IOException, TAPException{ if (startedAt != null) - throw new IllegalStateException("Impossible to restart a synchronous TAP query !"); + throw new IllegalStateException("Impossible to restart a synchronous TAP query!"); + + // Log the start of this sync job: + service.getLogger().logTAP(LogLevel.INFO, this, "START", "Synchronous job " + ID + " is starting!", null); - ADQLExecutor executor; + // Create the object having the knowledge about how to execute an ADQL query: + ADQLExecutor executor = service.getFactory().createADQLExecutor(); try{ - executor = service.getFactory().createADQLExecutor(); - }catch(TAPException e){ - // TODO Log this error ! - return true; + executor.initDBConnection(ID); + }catch(TAPException te){ + service.getLogger().logDB(LogLevel.ERROR, null, "CONNECTION_LACK", "No more database connection available for the moment!", te); + service.getLogger().logTAP(LogLevel.ERROR, this, "END", "Synchronous job " + ID + " execution aborted: no database connection available!", null); + throw new TAPException("TAP service too busy! No connection available for the moment. You should try later or create an asynchronous query (which will be executed when enough resources will be available again).", UWSException.SERVICE_UNAVAILABLE); } + + // Give to a thread which will execute the query: thread = new SyncThread(executor, ID, tapParams, response); thread.start(); - boolean timeout = false; + // Wait the end of the thread until the maximum execution duration is reached: + boolean timeout = false; try{ - System.out.println("Joining..."); - thread.join(tapParams.getExecutionDuration()); + // wait the end: + thread.join(tapParams.getExecutionDuration() * 1000); + // if still alive after this duration, interrupt it: if (thread.isAlive()){ timeout = true; - System.out.println("Aborting..."); thread.interrupt(); thread.join(waitForStop); } }catch(InterruptedException ie){ - ; + /* Having a such exception here, is not surprising, because we may have interrupted the thread! */ }finally{ + // Whatever the way the execution stops (normal, cancel or error), an execution report must be fulfilled: execReport = thread.getExecutionReport(); } - if (!thread.isSuccess()){ + // Report any error that may have occurred while the thread execution: + Throwable error = thread.getError(); + // CASE: TIMEOUT + if (timeout && error != null && error instanceof InterruptedException){ + // Log the timeout: if (thread.isAlive()) - throw new TAPException("Time out (=" + tapParams.getExecutionDuration() + "ms) ! However, the thread (synchronous query) can not be stopped !", HttpServletResponse.SC_INTERNAL_SERVER_ERROR); - else if (timeout) - throw new TAPException("Time out ! The execution of this synchronous TAP query was limited to " + tapParams.getExecutionDuration() + "ms.", HttpServletResponse.SC_INTERNAL_SERVER_ERROR); + service.getLogger().logTAP(LogLevel.WARNING, this, "TIME_OUT", "Time out (after " + tapParams.getExecutionDuration() + "ms) for the synchonous job " + ID + ", but the thread can not be interrupted!", null); + else + service.getLogger().logTAP(LogLevel.INFO, this, "TIME_OUT", "Time out (after " + tapParams.getExecutionDuration() + "ms) for the synchonous job " + ID + ".", null); + + // Report the timeout to the user: + throw new TAPException("Time out! The execution of this synchronous TAP query was limited to " + tapParams.getExecutionDuration() + "ms. You should try again but in asynchronous execution.", UWSException.ACCEPTED_BUT_NOT_COMPLETE); + } + // CASE: ERRORS + else if (!thread.isSuccess()){ + // INTERRUPTION: + if (error instanceof InterruptedException){ + // log the unexpected interruption (unexpected because not caused by a timeout): + service.getLogger().logTAP(LogLevel.ERROR, this, "END", "The execution of the synchronous job " + ID + " has been unexpectedly interrupted!", error); + // report the unexpected interruption to the user: + throw new TAPException("The execution of this synchronous job " + ID + " has been unexpectedly aborted!", UWSException.ACCEPTED_BUT_NOT_COMPLETE); + } + // REQUEST ABORTION: + else if (error instanceof IOException){ + // log the unexpected interruption (unexpected because not caused by a timeout): + service.getLogger().logTAP(LogLevel.INFO, this, "END", "Abortion of the synchronous job " + ID + "! Cause: connection with the HTTP client unexpectedly closed.", null); + // throw the error until the TAP instance to notify it about the abortion: + throw (IOException)error; + } + // TAP EXCEPTION: + else if (error instanceof TAPException){ + // log the error: + service.getLogger().logTAP(LogLevel.ERROR, this, "END", "The following error interrupted the execution of the synchronous job " + ID + ".", error); + // report the error to the user: + throw (TAPException)error; + } + // ANY OTHER EXCEPTION: else{ - Throwable t = thread.getError(); - if (t instanceof InterruptedException) - throw new TAPException("The execution of this synchronous TAP query has been unexpectedly aborted !"); - else if (t instanceof UWSException) - throw (UWSException)t; + // log the error: + service.getLogger().logTAP(LogLevel.FATAL, this, "END", "The following GRAVE error interrupted the execution of the synchronous job " + ID + ".", error); + // report the error to the user: + if (error instanceof Error) + throw (Error)error; else - throw new TAPException(t); + throw new TAPException(error); } - } + }else + service.getLogger().logTAP(LogLevel.INFO, this, "END", "Success of the synchronous job " + ID + ".", null); - return thread.isInterrupted(); + return thread.isSuccess(); } - public class SyncThread extends Thread { + /** + *

    Thread which will process the job execution.

    + * + *

    + * Actually, it will basically just call {@link ADQLExecutor#start(Thread, String, TAPParameters, HttpServletResponse)} + * with the given {@link ADQLExecutor} and TAP parameters (containing the ADQL query to execute). + *

    + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (04/2015) + */ + protected class SyncThread extends Thread { - private final String taskDescription; - public final ADQLExecutor executor; + /** Object knowing how to execute an ADQL query and which will execute it by calling {@link ADQLExecutor#start(Thread, String, TAPParameters, HttpServletResponse)}. */ + protected final ADQLExecutor executor; + /** Response in which the query result must be written. No error should be written in it directly at this level ; + * the error must be propagated and it will be written in this HTTP response later on a top level. */ protected final HttpServletResponse response; + /** ID of this thread. It is also the ID of the synchronous job owning this thread. */ protected final String ID; + /** Parameters containing the ADQL query to execute and other execution parameters/options. */ protected final TAPParameters tapParams; + + /** Exception that occurs while executing this thread. NULL if the execution was a success. */ protected Throwable exception = null; + /** Query execution report. NULL if the execution has not yet started. */ protected TAPExecutionReport report = null; - public SyncThread(final ADQLExecutor executor, final String ID, final TAPParameters tapParams, final HttpServletResponse response){ + /** + * Create a thread that will run the given executor with the given parameters. + * + * @param executor Object to execute and which knows how to execute an ADQL query. + * @param ID ID of the synchronous job owning this thread. + * @param tapParams TAP parameters to use to get the query to execute and the execution parameters. + * @param response HTTP response in which the ADQL query result must be written. + */ + public SyncThread(final ADQLExecutor executor, final String ID, final TAPParameters tapParams, final HttpServletResponse response){ super(JobThread.tg, ID); - taskDescription = "Executing the synchronous TAP query " + ID; this.executor = executor; this.ID = ID; this.tapParams = tapParams; this.response = response; } + /** + * Tell whether the execution has ended with success. + * + * @return true if the query has been successfully executed, + * false otherwise (or if this thread is still executed). + */ public final boolean isSuccess(){ return !isAlive() && report != null && exception == null; } + /** + * Get the error that has interrupted/stopped this thread. + * This function returns NULL if the query has been successfully executed. + * + * @return Error that occurs while executing the query + * or NULL if the execution was a success. + */ public final Throwable getError(){ return exception; } + /** + * Get the report of the query execution. + * + * @return Query execution report. + */ public final TAPExecutionReport getExecutionReport(){ return report; } @@ -174,17 +336,30 @@ public class TAPSyncJob { @Override public void run(){ // Log the start of this thread: - executor.getLogger().threadStarted(this, taskDescription); + executor.getLogger().logThread(LogLevel.INFO, thread, "START", "Synchronous thread \"" + ID + "\" started.", null); try{ + // Execute the ADQL query: report = executor.start(this, ID, tapParams, response); - executor.getLogger().threadFinished(this, taskDescription); + + // Log the successful end of this thread: + executor.getLogger().logThread(LogLevel.INFO, thread, "END", "Synchronous thread \"" + ID + "\" successfully ended.", null); + }catch(Throwable e){ + + // Save the exception for later reporting: exception = e; - if (e instanceof InterruptedException){ - // Log the abortion: - executor.getLogger().threadInterrupted(this, taskDescription, e); - } + + // Log the end of the job: + if (e instanceof InterruptedException || e instanceof IOException) + // Abortion: + executor.getLogger().logThread(LogLevel.INFO, this, "END", "Synchronous thread \"" + ID + "\" cancelled.", null); + else if (e instanceof TAPException) + // Error: + executor.getLogger().logThread(LogLevel.ERROR, this, "END", "Synchronous thread \"" + ID + "\" ended with an error.", null); + else + // GRAVE error: + executor.getLogger().logThread(LogLevel.FATAL, this, "END", "Synchronous thread \"" + ID + "\" ended with a FATAL error.", null); } } diff --git a/src/tap/backup/DefaultTAPBackupManager.java b/src/tap/backup/DefaultTAPBackupManager.java index 4e4a86f48d84062a03a162b2a9fd8e1bc00c1366..5e740ddd33e13f8630969c81fd05ae4ec7ba05b1 100644 --- a/src/tap/backup/DefaultTAPBackupManager.java +++ b/src/tap/backup/DefaultTAPBackupManager.java @@ -16,105 +16,288 @@ package tap.backup; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; + +import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; +import org.json.Json4Uws; import tap.ExecutionProgression; import tap.TAPExecutionReport; import tap.TAPJob; +import tap.parameters.DALIUpload; import uws.UWSException; import uws.job.UWSJob; import uws.service.UWS; import uws.service.backup.DefaultUWSBackupManager; +import uws.service.log.UWSLog.LogLevel; +import uws.service.request.UploadFile; +/** + *

    Let backup all TAP asynchronous jobs.

    + * + *

    note: Basically the saved data are the same, but in addition some execution statistics are also added.

    + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (12/2014) + * + * @see DefaultUWSBackupManager + */ public class DefaultTAPBackupManager extends DefaultUWSBackupManager { + /** + * Build a default TAP jobs backup manager. + * + * @param uws The UWS containing all the jobs to backup. + * + * @see DefaultUWSBackupManager#DefaultUWSBackupManager(UWS) + */ public DefaultTAPBackupManager(UWS uws){ super(uws); } + /** + * Build a default TAP jobs backup manager. + * + * @param uws The UWS containing all the jobs to backup. + * @param frequency The backup frequency (in ms ; MUST BE positive and different from 0. + * If negative or 0, the frequency will be automatically set to DEFAULT_FREQUENCY). + * + * @see DefaultUWSBackupManager#DefaultUWSBackupManager(UWS, long) + */ public DefaultTAPBackupManager(UWS uws, long frequency){ super(uws, frequency); } + /** + * Build a default TAP jobs backup manager. + * + * @param uws The UWS containing all the jobs to backup. + * @param byUser Backup mode. + * + * @see DefaultUWSBackupManager#DefaultUWSBackupManager(UWS, boolean) + */ public DefaultTAPBackupManager(UWS uws, boolean byUser) throws UWSException{ super(uws, byUser); } + /** + * Build a default TAP jobs backup manager. + * + * @param uws The UWS containing all the jobs to backup. + * @param byUser Backup mode. + * @param frequency The backup frequency (in ms ; MUST BE positive and different from 0. + * If negative or 0, the frequency will be automatically set to DEFAULT_FREQUENCY). + * + * @see DefaultUWSBackupManager#DefaultUWSBackupManager(UWS, boolean, long) + */ public DefaultTAPBackupManager(UWS uws, boolean byUser, long frequency) throws UWSException{ super(uws, byUser, frequency); } @Override protected JSONObject getJSONJob(UWSJob job, String jlName) throws UWSException, JSONException{ - JSONObject json = super.getJSONJob(job, jlName); + JSONObject jsonJob = Json4Uws.getJson(job); + + // Re-Build the parameters map, by separating the uploads and the "normal" parameters: + JSONArray uploads = new JSONArray(); + JSONObject params = new JSONObject(); + Object val; + for(String name : job.getAdditionalParameters()){ + // get the raw value: + val = job.getAdditionalParameterValue(name); + // if no value, skip this item: + if (val == null) + continue; + // if an array, build a JSON array of strings: + else if (val.getClass().isArray()){ + JSONArray array = new JSONArray(); + for(Object o : (Object[])val){ + if (o != null && o instanceof DALIUpload) + array.put(getDALIUploadJson((DALIUpload)o)); + else if (o != null) + array.put(o.toString()); + } + params.put(name, array); + } + // if upload file: + else if (val instanceof UploadFile) + uploads.put(getUploadJson((UploadFile)val)); + // if DALIUpload: + else if (val instanceof DALIUpload) + params.put(name, getDALIUploadJson((DALIUpload)val)); + // otherwise, just put the value: + else + params.put(name, val); + } + // Deal with the execution report of the job: if (job instanceof TAPJob && ((TAPJob)job).getExecReport() != null){ TAPExecutionReport execReport = ((TAPJob)job).getExecReport(); + // Build the JSON representation of the execution report of this job: JSONObject jsonExecReport = new JSONObject(); jsonExecReport.put("success", execReport.success); - jsonExecReport.put("sql", execReport.sqlTranslation); jsonExecReport.put("uploadduration", execReport.getUploadDuration()); jsonExecReport.put("parsingduration", execReport.getParsingDuration()); - jsonExecReport.put("translationduration", execReport.getTranslationDuration()); jsonExecReport.put("executionduration", execReport.getExecutionDuration()); jsonExecReport.put("formattingduration", execReport.getFormattingDuration()); jsonExecReport.put("totalduration", execReport.getTotalDuration()); - JSONObject params = json.getJSONObject(UWSJob.PARAM_PARAMETERS); - if (params == null) - params = new JSONObject(); + // Add the execution report into the parameters list: params.put("tapexecreport", jsonExecReport); - - json.put(UWSJob.PARAM_PARAMETERS, params); } - return json; + // Add the parameters and the uploads inside the JSON representation of the job: + jsonJob.put(UWSJob.PARAM_PARAMETERS, params); + jsonJob.put("uwsUploads", uploads); + + // Add the job owner: + jsonJob.put(UWSJob.PARAM_OWNER, (job != null && job.getOwner() != null) ? job.getOwner().getID() : null); + + // Add the name of the job list owning the given job: + jsonJob.put("jobListName", jlName); + + return jsonJob; + } + + /** + * Get the JSON representation of the given {@link DALIUpload}. + * + * @param upl The DALI upload specification to serialize in JSON. + * + * @return Its JSON representation. + * + * @throws JSONException If there is an error while building the JSON object. + * + * @since 2.0 + */ + protected JSONObject getDALIUploadJson(final DALIUpload upl) throws JSONException{ + if (upl == null) + return null; + JSONObject o = new JSONObject(); + o.put("label", upl.label); + o.put("uri", upl.uri); + o.put("file", (upl.file == null ? null : upl.file.paramName)); + return o; } @Override protected void restoreOtherJobParams(JSONObject json, UWSJob job) throws UWSException{ - if (job != null && json != null && job instanceof TAPJob){ - TAPJob tapJob = (TAPJob)job; - Object obj = job.getAdditionalParameterValue("tapexecreport"); - if (obj != null){ - if (obj instanceof JSONObject){ - JSONObject jsonExecReport = (JSONObject)obj; - TAPExecutionReport execReport = new TAPExecutionReport(job.getJobId(), false, tapJob.getTapParams()); - String[] keys = JSONObject.getNames(jsonExecReport); - for(String key : keys){ + // 0. Nothing to do in this function if the job is missing OR if it is not an instance of TAPJob: + if (job == null || !(job instanceof TAPJob)) + return; + + // 1. Build correctly the TAP UPLOAD parameter (the value of this parameter should be an array of DALIUpload): + if (json != null && json.has(TAPJob.PARAM_PARAMETERS)){ + try{ + // Retrieve the whole list of parameters: + JSONObject params = json.getJSONObject(TAPJob.PARAM_PARAMETERS); + // If there is an UPLOAD parameter, convert the JSON array into a DALIUpload[] and add it to the job: + if (params.has(TAPJob.PARAM_UPLOAD)){ + // retrieve the JSON array: + JSONArray uploads = params.getJSONArray(TAPJob.PARAM_UPLOAD); + // for each item of this array, build the corresponding DALIUpload and add it into an ArrayList: + DALIUpload upl; + ArrayList lstTAPUploads = new ArrayList(); + for(int i = 0; i < uploads.length(); i++){ try{ - if (key.equalsIgnoreCase("success")) - execReport.success = jsonExecReport.getBoolean(key); - else if (key.equalsIgnoreCase("sql")) - execReport.sqlTranslation = jsonExecReport.getString(key); - else if (key.equalsIgnoreCase("uploadduration")) - execReport.setDuration(ExecutionProgression.UPLOADING, jsonExecReport.getLong(key)); - else if (key.equalsIgnoreCase("parsingduration")) - execReport.setDuration(ExecutionProgression.PARSING, jsonExecReport.getLong(key)); - else if (key.equalsIgnoreCase("translationduration")) - execReport.setDuration(ExecutionProgression.TRANSLATING, jsonExecReport.getLong(key)); - else if (key.equalsIgnoreCase("executionduration")) - execReport.setDuration(ExecutionProgression.EXECUTING_SQL, jsonExecReport.getLong(key)); - else if (key.equalsIgnoreCase("formattingduration")) - execReport.setDuration(ExecutionProgression.WRITING_RESULT, jsonExecReport.getLong(key)); - else if (key.equalsIgnoreCase("totalduration")) - execReport.setTotalDuration(jsonExecReport.getLong(key)); - else - getLogger().warning("The execution report attribute '" + key + "' of the job \"" + job.getJobId() + "\" has been ignored because unknown !"); + upl = getDALIUpload(uploads.getJSONObject(i), job); + if (upl != null) + lstTAPUploads.add(upl); }catch(JSONException je){ - getLogger().error("[restoration] Incorrect JSON format for the execution report serialization of the job \"" + job.getJobId() + "\" (attribute: \"" + key + "\") !", je); + getLogger().logUWS(LogLevel.ERROR, uploads.get(i), "RESTORATION", "Incorrect JSON format for a DALIUpload of the job \"" + job.getJobId() + "\": a JSONObject was expected!", null); } } - tapJob.setExecReport(execReport); - }else if (!(obj instanceof JSONObject)) - getLogger().warning("[restoration] Impossible to restore the execution report of the job \"" + job.getJobId() + "\" because the stored object is not a JSONObject !"); - } + // finally convert the ArrayList into a DALIUpload[] and add it inside the parameters list of the job: + job.addOrUpdateParameter(TAPJob.PARAM_UPLOAD, lstTAPUploads.toArray(new DALIUpload[lstTAPUploads.size()])); + } + }catch(JSONException ex){} + } + + // 2. Get the execution report and add it into the given job: + TAPJob tapJob = (TAPJob)job; + Object obj = job.getAdditionalParameterValue("tapexecreport"); + if (obj != null){ + if (obj instanceof JSONObject){ + JSONObject jsonExecReport = (JSONObject)obj; + TAPExecutionReport execReport = new TAPExecutionReport(job.getJobId(), false, tapJob.getTapParams()); + String[] keys = JSONObject.getNames(jsonExecReport); + for(String key : keys){ + try{ + if (key.equalsIgnoreCase("success")) + execReport.success = jsonExecReport.getBoolean(key); + else if (key.equalsIgnoreCase("uploadduration")) + execReport.setDuration(ExecutionProgression.UPLOADING, jsonExecReport.getLong(key)); + else if (key.equalsIgnoreCase("parsingduration")) + execReport.setDuration(ExecutionProgression.PARSING, jsonExecReport.getLong(key)); + else if (key.equalsIgnoreCase("executionduration")) + execReport.setDuration(ExecutionProgression.EXECUTING_ADQL, jsonExecReport.getLong(key)); + else if (key.equalsIgnoreCase("formattingduration")) + execReport.setDuration(ExecutionProgression.WRITING_RESULT, jsonExecReport.getLong(key)); + else if (key.equalsIgnoreCase("totalduration")) + execReport.setTotalDuration(jsonExecReport.getLong(key)); + else + getLogger().logUWS(LogLevel.WARNING, obj, "RESTORATION", "The execution report attribute '" + key + "' of the job \"" + job.getJobId() + "\" has been ignored because unknown!", null); + }catch(JSONException je){ + getLogger().logUWS(LogLevel.ERROR, obj, "RESTORATION", "Incorrect JSON format for the execution report serialization of the job \"" + job.getJobId() + "\" (attribute: \"" + key + "\")!", je); + } + } + tapJob.setExecReport(execReport); + }else if (!(obj instanceof JSONObject)) + getLogger().logUWS(LogLevel.WARNING, obj, "RESTORATION", "Impossible to restore the execution report of the job \"" + job.getJobId() + "\" because the stored object is not a JSONObject!", null); } } + /** + * Restore a {@link DALIUpload} from its JSON representation. + * + * @param item {@link JSONObject} representing the {@link DALIUpload} to restore. + * @param job The job which owns this upload. + * + * @return The corresponding {@link DALIUpload} or NULL, if an error occurs while converting the JSON. + * + * @since 2.0 + */ + private DALIUpload getDALIUpload(final JSONObject item, final UWSJob job){ + try{ + + // Get its label: + String label = item.getString("label"); + + // Build the DALIUpload object: + /* If the upload spec. IS A FILE, the attribute 'file' should point toward a job parameter + * being an UploadFile. If so, get it and use it to build the DALIUpload: */ + if (item.has("file")){ + Object f = job.getAdditionalParameterValue(item.getString("file")); + if (f == null || !(f instanceof UploadFile)) + getLogger().logUWS(LogLevel.ERROR, item, "RESTORATION", "Incorrect JSON format for the DALIUpload labelled \"" + label + "\" of the job \"" + job.getJobId() + "\": \"" + item.getString("file") + "\" is not pointing a job parameter representing a file!", null); + return new DALIUpload(label, (UploadFile)f); + } + /* If the upload spec. IS A URI, the attribute 'uri' should contain it + * and should be used to build the DALIUpload: */ + else if (item.has("uri")){ + try{ + return new DALIUpload(label, new URI(item.getString("uri")), uws.getFileManager()); + }catch(URISyntaxException e){ + getLogger().logUWS(LogLevel.ERROR, item, "RESTORATION", "Incorrect URI for the DALIUpload labelled \"" + label + "\" of the job \"" + job.getJobId() + "\": \"" + item.getString("uri") + "\"!", null); + } + } + /* If none of this both attribute is provided, it is an error and it is not possible to build the DALIUpload. */ + else + getLogger().logUWS(LogLevel.ERROR, item, "RESTORATION", "Incorrect JSON format for the DALIUpload labelled \"" + label + "\" of the job \"" + job.getJobId() + "\": missing attribute 'file' or 'uri'!", null); + + }catch(JSONException je){ + getLogger().logUWS(LogLevel.ERROR, item, "RESTORATION", "Incorrect JSON format for a DALIUpload of the job \"" + job.getJobId() + "\": missing attribute 'label'!", null); + } + + return null; + } } diff --git a/src/tap/config/ConfigurableServiceConnection.java b/src/tap/config/ConfigurableServiceConnection.java new file mode 100644 index 0000000000000000000000000000000000000000..058740a8575ff94bf8941482da4a2360e9c5e565 --- /dev/null +++ b/src/tap/config/ConfigurableServiceConnection.java @@ -0,0 +1,1594 @@ +package tap.config; + +/* + * This file is part of TAPLibrary. + * + * TAPLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * TAPLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with TAPLibrary. If not, see . + * + * Copyright 2015 - Astronomisches Rechen Institut (ARI) + */ + +import static tap.config.TAPConfiguration.DEFAULT_ASYNC_FETCH_SIZE; +import static tap.config.TAPConfiguration.DEFAULT_DIRECTORY_PER_USER; +import static tap.config.TAPConfiguration.DEFAULT_EXECUTION_DURATION; +import static tap.config.TAPConfiguration.DEFAULT_GROUP_USER_DIRECTORIES; +import static tap.config.TAPConfiguration.DEFAULT_MAX_ASYNC_JOBS; +import static tap.config.TAPConfiguration.DEFAULT_RETENTION_PERIOD; +import static tap.config.TAPConfiguration.DEFAULT_SYNC_FETCH_SIZE; +import static tap.config.TAPConfiguration.DEFAULT_UPLOAD_MAX_FILE_SIZE; +import static tap.config.TAPConfiguration.KEY_ASYNC_FETCH_SIZE; +import static tap.config.TAPConfiguration.KEY_COORD_SYS; +import static tap.config.TAPConfiguration.KEY_DEFAULT_EXECUTION_DURATION; +import static tap.config.TAPConfiguration.KEY_DEFAULT_OUTPUT_LIMIT; +import static tap.config.TAPConfiguration.KEY_DEFAULT_RETENTION_PERIOD; +import static tap.config.TAPConfiguration.KEY_DEFAULT_UPLOAD_LIMIT; +import static tap.config.TAPConfiguration.KEY_DIRECTORY_PER_USER; +import static tap.config.TAPConfiguration.KEY_FILE_MANAGER; +import static tap.config.TAPConfiguration.KEY_FILE_ROOT_PATH; +import static tap.config.TAPConfiguration.KEY_GEOMETRIES; +import static tap.config.TAPConfiguration.KEY_GROUP_USER_DIRECTORIES; +import static tap.config.TAPConfiguration.KEY_LOG_ROTATION; +import static tap.config.TAPConfiguration.KEY_MAX_ASYNC_JOBS; +import static tap.config.TAPConfiguration.KEY_MAX_EXECUTION_DURATION; +import static tap.config.TAPConfiguration.KEY_MAX_OUTPUT_LIMIT; +import static tap.config.TAPConfiguration.KEY_MAX_RETENTION_PERIOD; +import static tap.config.TAPConfiguration.KEY_MAX_UPLOAD_LIMIT; +import static tap.config.TAPConfiguration.KEY_METADATA; +import static tap.config.TAPConfiguration.KEY_METADATA_FILE; +import static tap.config.TAPConfiguration.KEY_MIN_LOG_LEVEL; +import static tap.config.TAPConfiguration.KEY_OUTPUT_FORMATS; +import static tap.config.TAPConfiguration.KEY_PROVIDER_NAME; +import static tap.config.TAPConfiguration.KEY_SERVICE_DESCRIPTION; +import static tap.config.TAPConfiguration.KEY_SYNC_FETCH_SIZE; +import static tap.config.TAPConfiguration.KEY_TAP_FACTORY; +import static tap.config.TAPConfiguration.KEY_UDFS; +import static tap.config.TAPConfiguration.KEY_UPLOAD_ENABLED; +import static tap.config.TAPConfiguration.KEY_UPLOAD_MAX_FILE_SIZE; +import static tap.config.TAPConfiguration.KEY_USER_IDENTIFIER; +import static tap.config.TAPConfiguration.VALUE_ALL; +import static tap.config.TAPConfiguration.VALUE_ANY; +import static tap.config.TAPConfiguration.VALUE_CSV; +import static tap.config.TAPConfiguration.VALUE_DB; +import static tap.config.TAPConfiguration.VALUE_FITS; +import static tap.config.TAPConfiguration.VALUE_HTML; +import static tap.config.TAPConfiguration.VALUE_JSON; +import static tap.config.TAPConfiguration.VALUE_LOCAL; +import static tap.config.TAPConfiguration.VALUE_NONE; +import static tap.config.TAPConfiguration.VALUE_SV; +import static tap.config.TAPConfiguration.VALUE_TEXT; +import static tap.config.TAPConfiguration.VALUE_TSV; +import static tap.config.TAPConfiguration.VALUE_VOT; +import static tap.config.TAPConfiguration.VALUE_VOTABLE; +import static tap.config.TAPConfiguration.VALUE_XML; +import static tap.config.TAPConfiguration.fetchClass; +import static tap.config.TAPConfiguration.getProperty; +import static tap.config.TAPConfiguration.isClassName; +import static tap.config.TAPConfiguration.newInstance; +import static tap.config.TAPConfiguration.parseLimit; + +import java.io.File; +import java.io.IOException; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.Properties; + +import tap.ServiceConnection; +import tap.TAPException; +import tap.TAPFactory; +import tap.db.DBConnection; +import tap.formatter.FITSFormat; +import tap.formatter.HTMLFormat; +import tap.formatter.JSONFormat; +import tap.formatter.OutputFormat; +import tap.formatter.SVFormat; +import tap.formatter.TextFormat; +import tap.formatter.VOTableFormat; +import tap.log.DefaultTAPLog; +import tap.log.TAPLog; +import tap.metadata.TAPMetadata; +import tap.metadata.TableSetParser; +import uk.ac.starlink.votable.DataFormat; +import uk.ac.starlink.votable.VOTableVersion; +import uws.UWSException; +import uws.service.UserIdentifier; +import uws.service.file.LocalUWSFileManager; +import uws.service.file.UWSFileManager; +import uws.service.log.UWSLog.LogLevel; +import adql.db.FunctionDef; +import adql.db.STCS; +import adql.parser.ParseException; +import adql.query.operand.function.UserDefinedFunction; + +/** + *

    Concrete implementation of {@link ServiceConnection}, fully parameterized with a TAP configuration file.

    + * + *

    + * Every aspects of the TAP service are configured here. This instance is also creating the {@link TAPFactory} using the + * TAP configuration file thanks to the implementation {@link ConfigurableTAPFactory}. + *

    + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (04/2015) + * @since 2.0 + */ +public final class ConfigurableServiceConnection implements ServiceConnection { + + /** File manager to use in the TAP service. */ + private UWSFileManager fileManager; + + /** Object to use in the TAP service in order to log different types of messages (e.g. DEBUG, INFO, WARNING, ERROR, FATAL). */ + private TAPLog logger; + + /** Factory which can create different types of objects for the TAP service (e.g. database connection). */ + private TAPFactory tapFactory; + + /** Object gathering all metadata of this TAP service. */ + private final TAPMetadata metadata; + + /** Name of the organization/person providing the TAP service. */ + private final String providerName; + /** Description of the TAP service. */ + private final String serviceDescription; + + /** Indicate whether the TAP service is available or not. */ + private boolean isAvailable = false; // the TAP service must be disabled until the end of its connection initialization + /** Description of the available or unavailable state of the TAP service. */ + private String availability = "TAP service not yet initialized."; + + /** Maximum number of asynchronous jobs that can run simultaneously. */ + private int maxAsyncJobs = DEFAULT_MAX_ASYNC_JOBS; + + /** Array of 2 integers: resp. default and maximum execution duration. + * Both duration are expressed in milliseconds. */ + private int[] executionDuration = new int[2]; + /** Array of 2 integers: resp. default and maximum retention period. + * Both period are expressed in seconds. */ + private int[] retentionPeriod = new int[2]; + + /** List of all available output formatters. */ + private final ArrayList outputFormats; + + /** Array of 2 integers: resp. default and maximum output limit. + * Each limit is expressed in a unit specified in the array {@link #outputLimitTypes}. */ + private int[] outputLimits = new int[]{-1,-1}; + /** Array of 2 limit units: resp. unit of the default output limit and unit of the maximum output limit. */ + private LimitUnit[] outputLimitTypes = new LimitUnit[2]; + + /** Indicate whether the UPLOAD feature is enabled or not. */ + private boolean isUploadEnabled = false; + /** Array of 2 integers: resp. default and maximum upload limit. + * Each limit is expressed in a unit specified in the array {@link #uploadLimitTypes}. */ + private int[] uploadLimits = new int[]{-1,-1}; + /** Array of 2 limit units: resp. unit of the default upload limit and unit of the maximum upload limit. */ + private LimitUnit[] uploadLimitTypes = new LimitUnit[2]; + /** The maximum size of a set of uploaded files. + * This size is expressed in bytes. */ + private int maxUploadSize = DEFAULT_UPLOAD_MAX_FILE_SIZE; + + /** Array of 2 integers: resp. default and maximum fetch size. + * Both sizes are expressed in number of rows. */ + private int[] fetchSize = new int[]{DEFAULT_ASYNC_FETCH_SIZE,DEFAULT_SYNC_FETCH_SIZE}; + + /** The method to use in order to identify a TAP user. */ + private UserIdentifier userIdentifier = null; + + /** List of all allowed coordinate systems. + * If NULL, all coord. sys. are allowed. If empty list, none is allowed. */ + private ArrayList lstCoordSys = null; + + /** List of all allowed ADQL geometrical functions. + * If NULL, all geometries are allowed. If empty list, none is allowed. */ + private ArrayList geometries = null; + private final String GEOMETRY_REGEXP = "(AREA|BOX|CENTROID|CIRCLE|CONTAINS|DISTANCE|COORD1|COORD2|COORDSYS|INTERSECTS|POINT|POLYGON|REGION)"; + + /** List of all known and allowed User Defined Functions. + * If NULL, any unknown function is allowed. If empty list, none is allowed. */ + private Collection udfs = new ArrayList(0); + + /** + * Create a TAP service description thanks to the given TAP configuration file. + * + * @param tapConfig The content of the TAP configuration file. + * + * @throws NullPointerException If the given properties set is NULL. + * @throws TAPException If a property is wrong or missing. + */ + public ConfigurableServiceConnection(final Properties tapConfig) throws NullPointerException, TAPException{ + this(tapConfig, null); + } + + /** + * Create a TAP service description thanks to the given TAP configuration file. + * + * @param tapConfig The content of the TAP configuration file. + * @param webAppRootDir The directory of the Web Application running this TAP service. + * In this directory another directory may be created in order to store all TAP service files + * if none is specified in the given TAP configuration file. + * + * @throws NullPointerException If the given properties set is NULL. + * @throws TAPException If a property is wrong or missing. + */ + public ConfigurableServiceConnection(final Properties tapConfig, final String webAppRootDir) throws NullPointerException, TAPException{ + if (tapConfig == null) + throw new NullPointerException("Missing TAP properties! "); + + // 1. INITIALIZE THE FILE MANAGER: + initFileManager(tapConfig, webAppRootDir); + + // 2. CREATE THE LOGGER: + initLogger(tapConfig); + + // 3. BUILD THE TAP FACTORY: + initFactory(tapConfig); + + // 4. GET THE METADATA: + metadata = initMetadata(tapConfig, webAppRootDir); + + // 5. SET ALL GENERAL SERVICE CONNECTION INFORMATION: + providerName = getProperty(tapConfig, KEY_PROVIDER_NAME); + serviceDescription = getProperty(tapConfig, KEY_SERVICE_DESCRIPTION); + initMaxAsyncJobs(tapConfig); + initRetentionPeriod(tapConfig); + initExecutionDuration(tapConfig); + + // 6. CONFIGURE OUTPUT: + // default output format = VOTable: + outputFormats = new ArrayList(1); + // set output formats: + addOutputFormats(tapConfig); + // set output limits: + initOutputLimits(tapConfig); + // set fetch size: + initFetchSize(tapConfig); + + // 7. CONFIGURE THE UPLOAD: + // is upload enabled ? + isUploadEnabled = Boolean.parseBoolean(getProperty(tapConfig, KEY_UPLOAD_ENABLED)); + // set upload limits: + initUploadLimits(tapConfig); + // set the maximum upload file size: + initMaxUploadSize(tapConfig); + + // 8. SET A USER IDENTIFIER: + initUserIdentifier(tapConfig); + + // 9. CONFIGURE ADQL: + initCoordSys(tapConfig); + initADQLGeometries(tapConfig); + initUDFs(tapConfig); + } + + /** + * Initialize the management of TAP service files using the given TAP configuration file. + * + * @param tapConfig The content of the TAP configuration file. + * @param webAppRootDir The directory of the Web Application running this TAP service. + * This directory may be used only to search the root TAP directory + * if specified with a relative path in the TAP configuration file. + * + * @throws TAPException If a property is wrong or missing, or if an error occurs while creating the file manager. + */ + private void initFileManager(final Properties tapConfig, final String webAppRootDir) throws TAPException{ + // Read the desired file manager: + String fileManagerType = getProperty(tapConfig, KEY_FILE_MANAGER); + if (fileManagerType == null) + throw new TAPException("The property \"" + KEY_FILE_MANAGER + "\" is missing! It is required to create a TAP Service. Two possible values: " + VALUE_LOCAL + " or a class name between {...}."); + else + fileManagerType = fileManagerType.trim(); + + // LOCAL file manager: + if (fileManagerType.equalsIgnoreCase(VALUE_LOCAL)){ + // Read the desired root path: + String rootPath = getProperty(tapConfig, KEY_FILE_ROOT_PATH); + if (rootPath == null) + throw new TAPException("The property \"" + KEY_FILE_ROOT_PATH + "\" is missing! It is required to create a TAP Service. Please provide a path toward a directory which will contain all files related to the service."); + File rootFile = getFile(rootPath, webAppRootDir, KEY_FILE_ROOT_PATH); + + // Determine whether there should be one directory for each user: + String propValue = getProperty(tapConfig, KEY_DIRECTORY_PER_USER); + boolean oneDirectoryPerUser = (propValue == null) ? DEFAULT_DIRECTORY_PER_USER : Boolean.parseBoolean(propValue); + + // Determine whether there should be one directory for each user: + propValue = getProperty(tapConfig, KEY_GROUP_USER_DIRECTORIES); + boolean groupUserDirectories = (propValue == null) ? DEFAULT_GROUP_USER_DIRECTORIES : Boolean.parseBoolean(propValue); + + // Build the Local TAP File Manager: + try{ + fileManager = new LocalUWSFileManager(rootFile, oneDirectoryPerUser, groupUserDirectories); + }catch(UWSException e){ + throw new TAPException("The property \"" + KEY_FILE_ROOT_PATH + "\" (" + rootPath + ") is incorrect: " + e.getMessage()); + } + } + // CUSTOM file manager: + else + fileManager = newInstance(fileManagerType, KEY_FILE_MANAGER, UWSFileManager.class, new Class[]{Properties.class}, new Object[]{tapConfig}); + } + + /** + *

    Resolve the given file name/path.

    + * + *

    Only the URI protocol "file:" is allowed. If the protocol is different a {@link TAPException} is thrown.

    + * + *

    + * If not an absolute URI, the given path may be either relative or absolute. A relative path is always considered + * as relative from the Web Application directory (supposed to be given in 2nd parameter). + *

    + * + * @param filePath URI/Path/Name of the file to get. + * @param webAppRootPath Web Application directory local path. + * @param propertyName Name of the property which gives the given file path. + * + * @return The specified File instance. + * + * @throws TAPException If the given URI is malformed or if the used URI scheme is different from "file:". + */ + protected static final File getFile(final String filePath, final String webAppRootPath, final String propertyName) throws TAPException{ + if (filePath == null) + return null; + + try{ + URI uri = new URI(filePath); + if (uri.isAbsolute()){ + if (uri.getScheme().equalsIgnoreCase("file")) + return new File(uri); + else + throw new TAPException("Incorrect file URI for the property \"" + propertyName + "\": \"" + filePath + "\"! Only URI with the protocol \"file:\" are allowed."); + }else{ + File f = new File(filePath); + if (f.isAbsolute()) + return f; + else + return new File(webAppRootPath, filePath); + } + }catch(URISyntaxException use){ + throw new TAPException("Incorrect file URI for the property \"" + propertyName + "\": \"" + filePath + "\"! Bad syntax for the given file URI.", use); + } + } + + /** + * Initialize the TAP logger with the given TAP configuration file. + * + * @param tapConfig The content of the TAP configuration file. + */ + private void initLogger(final Properties tapConfig){ + // Create the logger: + logger = new DefaultTAPLog(fileManager); + + StringBuffer buf = new StringBuffer("Logger initialized"); + + // Set the minimum log level: + String propValue = getProperty(tapConfig, KEY_MIN_LOG_LEVEL); + if (propValue != null){ + try{ + ((DefaultTAPLog)logger).setMinLogLevel(LogLevel.valueOf(propValue.toUpperCase())); + }catch(IllegalArgumentException iae){} + } + buf.append(" (minimum log level: ").append(((DefaultTAPLog)logger).getMinLogLevel()); + + // Set the log rotation period, if any: + if (fileManager instanceof LocalUWSFileManager){ + propValue = getProperty(tapConfig, KEY_LOG_ROTATION); + if (propValue != null) + ((LocalUWSFileManager)fileManager).setLogRotationFreq(propValue); + buf.append(", log rotation: ").append(((LocalUWSFileManager)fileManager).getLogRotationFreq()); + } + + // Log the successful initialization with set parameters: + buf.append(")."); + logger.info(buf.toString()); + } + + /** + *

    Initialize the {@link TAPFactory} to use.

    + * + *

    + * The built factory is either a {@link ConfigurableTAPFactory} instance (by default) or + * an instance of the class specified in the TAP configuration file. + *

    + * + * @param tapConfig The content of the TAP configuration file. + * + * @throws TAPException If an error occurs while building the specified {@link TAPFactory}. + * + * @see ConfigurableTAPFactory + */ + private void initFactory(final Properties tapConfig) throws TAPException{ + String propValue = getProperty(tapConfig, KEY_TAP_FACTORY); + if (propValue == null) + tapFactory = new ConfigurableTAPFactory(this, tapConfig); + else + tapFactory = newInstance(propValue, KEY_TAP_FACTORY, TAPFactory.class, new Class[]{ServiceConnection.class}, new Object[]{this}); + } + + /** + * Initialize the TAP metadata (i.e. database schemas, tables and columns and their attached metadata). + * + * @param tapConfig The content of the TAP configuration file. + * @param webAppRootDir Web Application directory local path. + * This directory may be used if a relative path is given for an XML metadata file. + * + * @return The extracted TAP metadata. + * + * @throws TAPException If some TAP configuration file properties are wrong or missing, + * or if an error has occurred while extracting the metadata from the database or the XML file. + * + * @see DBConnection#getTAPSchema() + * @see TableSetParser + */ + private TAPMetadata initMetadata(final Properties tapConfig, final String webAppRootDir) throws TAPException{ + // Get the fetching method to use: + String metaFetchType = getProperty(tapConfig, KEY_METADATA); + if (metaFetchType == null) + throw new TAPException("The property \"" + KEY_METADATA + "\" is missing! It is required to create a TAP Service. Three possible values: " + VALUE_XML + " (to get metadata from a TableSet XML document), " + VALUE_DB + " (to fetch metadata from the database schema TAP_SCHEMA) or the name (between {}) of a class extending TAPMetadata."); + + TAPMetadata metadata = null; + + // GET METADATA FROM XML & UPDATE THE DATABASE (schema TAP_SCHEMA only): + if (metaFetchType.equalsIgnoreCase(VALUE_XML)){ + // Get the XML file path: + String xmlFilePath = getProperty(tapConfig, KEY_METADATA_FILE); + if (xmlFilePath == null) + throw new TAPException("The property \"" + KEY_METADATA_FILE + "\" is missing! According to the property \"" + KEY_METADATA + "\", metadata must be fetched from an XML document. The local file path of it MUST be provided using the property \"" + KEY_METADATA_FILE + "\"."); + + // Parse the XML document and build the corresponding metadata: + try{ + metadata = (new TableSetParser()).parse(getFile(xmlFilePath, webAppRootDir, KEY_METADATA_FILE)); + }catch(IOException ioe){ + throw new TAPException("A grave error occurred while reading/parsing the TableSet XML document: \"" + xmlFilePath + "\"!", ioe); + } + + // Update the database: + DBConnection conn = null; + try{ + conn = tapFactory.getConnection("SET_TAP_SCHEMA"); + conn.setTAPSchema(metadata); + }finally{ + if (conn != null) + tapFactory.freeConnection(conn); + } + } + // GET METADATA FROM DATABASE (schema TAP_SCHEMA): + else if (metaFetchType.equalsIgnoreCase(VALUE_DB)){ + DBConnection conn = null; + try{ + conn = tapFactory.getConnection("GET_TAP_SCHEMA"); + metadata = conn.getTAPSchema(); + }finally{ + if (conn != null) + tapFactory.freeConnection(conn); + } + } + // MANUAL ~ TAPMETADATA CLASS + else if (isClassName(metaFetchType)){ + /* 1. Get the metadata */ + // get the class: + Class metaClass = fetchClass(metaFetchType, KEY_METADATA, TAPMetadata.class); + if (metaClass == TAPMetadata.class) + throw new TAPException("Wrong class for the property \"" + KEY_METADATA + "\": \"" + metaClass.getName() + "\"! The class provided in this property MUST EXTEND tap.metadata.TAPMetadata."); + try{ + // get one of the expected constructors: + try{ + // (UWSFileManager, TAPFactory, TAPLog): + Constructor constructor = metaClass.getConstructor(UWSFileManager.class, TAPFactory.class, TAPLog.class); + // create the TAP metadata: + metadata = constructor.newInstance(fileManager, tapFactory, logger); + }catch(NoSuchMethodException nsme){ + // () (empty constructor): + Constructor constructor = metaClass.getConstructor(); + // create the TAP metadata: + metadata = constructor.newInstance(); + } + }catch(NoSuchMethodException nsme){ + throw new TAPException("Missing constructor tap.metadata.TAPMetadata() or tap.metadata.TAPMetadata(uws.service.file.UWSFileManager, tap.TAPFactory, tap.log.TAPLog)! See the value \"" + metaFetchType + "\" of the property \"" + KEY_METADATA + "\"."); + }catch(InstantiationException ie){ + throw new TAPException("Impossible to create an instance of an abstract class: \"" + metaClass.getName() + "\"! See the value \"" + metaFetchType + "\" of the property \"" + KEY_METADATA + "\"."); + }catch(InvocationTargetException ite){ + if (ite.getCause() != null){ + if (ite.getCause() instanceof TAPException) + throw (TAPException)ite.getCause(); + else + throw new TAPException(ite.getCause()); + }else + throw new TAPException(ite); + }catch(Exception ex){ + throw new TAPException("Impossible to create an instance of tap.metadata.TAPMetadata as specified in the property \"" + KEY_METADATA + "\": \"" + metaFetchType + "\"!", ex); + } + + /* 2. Update the database */ + DBConnection conn = null; + try{ + conn = tapFactory.getConnection("SET_TAP_SCHEMA"); + conn.setTAPSchema(metadata); + }finally{ + if (conn != null) + tapFactory.freeConnection(conn); + } + } + // INCORRECT VALUE => ERROR! + else + throw new TAPException("Unsupported value for the property \"" + KEY_METADATA + "\": \"" + metaFetchType + "\"! Only two values are allowed: " + VALUE_XML + " (to get metadata from a TableSet XML document) or " + VALUE_DB + " (to fetch metadata from the database schema TAP_SCHEMA)."); + + return metadata; + } + + /** + * Initialize the maximum number of asynchronous jobs. + * + * @param tapConfig The content of the TAP configuration file. + * + * @throws TAPException If the corresponding TAP configuration property is wrong. + */ + private void initMaxAsyncJobs(final Properties tapConfig) throws TAPException{ + // Get the property value: + String propValue = getProperty(tapConfig, KEY_MAX_ASYNC_JOBS); + try{ + // If a value is provided, cast it into an integer and set the attribute: + maxAsyncJobs = (propValue == null) ? DEFAULT_MAX_ASYNC_JOBS : Integer.parseInt(propValue); + }catch(NumberFormatException nfe){ + throw new TAPException("Integer expected for the property \"" + KEY_MAX_ASYNC_JOBS + "\", instead of: \"" + propValue + "\"!"); + } + } + + /** + * Initialize the default and maximum retention period. + * + * @param tapConfig The content of the TAP configuration file. + * + * @throws TAPException If the corresponding TAP configuration properties are wrong. + */ + private void initRetentionPeriod(final Properties tapConfig) throws TAPException{ + retentionPeriod = new int[2]; + + // Set the default period: + String propValue = getProperty(tapConfig, KEY_DEFAULT_RETENTION_PERIOD); + try{ + retentionPeriod[0] = (propValue == null) ? DEFAULT_RETENTION_PERIOD : Integer.parseInt(propValue); + }catch(NumberFormatException nfe){ + throw new TAPException("Integer expected for the property \"" + KEY_DEFAULT_RETENTION_PERIOD + "\", instead of: \"" + propValue + "\"!"); + } + + // Set the maximum period: + propValue = getProperty(tapConfig, KEY_MAX_RETENTION_PERIOD); + try{ + retentionPeriod[1] = (propValue == null) ? DEFAULT_RETENTION_PERIOD : Integer.parseInt(propValue); + }catch(NumberFormatException nfe){ + throw new TAPException("Integer expected for the property \"" + KEY_MAX_RETENTION_PERIOD + "\", instead of: \"" + propValue + "\"!"); + } + + // The maximum period MUST be greater or equals than the default period. + // If not, the default period is set (so decreased) to the maximum period. + if (retentionPeriod[1] > 0 && retentionPeriod[1] < retentionPeriod[0]) + retentionPeriod[0] = retentionPeriod[1]; + } + + /** + * Initialize the default and maximum execution duration. + * + * @param tapConfig The content of the TAP configuration file. + * + * @throws TAPException If the corresponding TAP configuration properties are wrong. + */ + private void initExecutionDuration(final Properties tapConfig) throws TAPException{ + executionDuration = new int[2]; + + // Set the default duration: + String propValue = getProperty(tapConfig, KEY_DEFAULT_EXECUTION_DURATION); + try{ + executionDuration[0] = (propValue == null) ? DEFAULT_EXECUTION_DURATION : Integer.parseInt(propValue); + }catch(NumberFormatException nfe){ + throw new TAPException("Integer expected for the property \"" + KEY_DEFAULT_EXECUTION_DURATION + "\", instead of: \"" + propValue + "\"!"); + } + + // Set the maximum duration: + propValue = getProperty(tapConfig, KEY_MAX_EXECUTION_DURATION); + try{ + executionDuration[1] = (propValue == null) ? DEFAULT_EXECUTION_DURATION : Integer.parseInt(propValue); + }catch(NumberFormatException nfe){ + throw new TAPException("Integer expected for the property \"" + KEY_MAX_EXECUTION_DURATION + "\", instead of: \"" + propValue + "\"!"); + } + + // The maximum duration MUST be greater or equals than the default duration. + // If not, the default duration is set (so decreased) to the maximum duration. + if (executionDuration[1] > 0 && executionDuration[1] < executionDuration[0]) + executionDuration[0] = executionDuration[1]; + } + + /** + *

    Initialize the list of all output format that the TAP service must support.

    + * + *

    + * This function ensures that at least one VOTable format is part of the returned list, + * even if none has been specified in the TAP configuration file. Indeed, the VOTable format is the only + * format required for a TAP service. + *

    + * + * @param tapConfig The content of the TAP configuration file. + * + * @throws TAPException If the corresponding TAP configuration properties are wrong. + */ + private void addOutputFormats(final Properties tapConfig) throws TAPException{ + // Fetch the value of the property for additional output formats: + String formats = getProperty(tapConfig, KEY_OUTPUT_FORMATS); + + // SPECIAL VALUE "ALL": + if (formats == null || formats.equalsIgnoreCase(VALUE_ALL)){ + outputFormats.add(new VOTableFormat(this, DataFormat.BINARY)); + outputFormats.add(new VOTableFormat(this, DataFormat.BINARY2)); + outputFormats.add(new VOTableFormat(this, DataFormat.TABLEDATA)); + outputFormats.add(new VOTableFormat(this, DataFormat.FITS)); + outputFormats.add(new FITSFormat(this)); + outputFormats.add(new JSONFormat(this)); + outputFormats.add(new SVFormat(this, ",", true)); + outputFormats.add(new SVFormat(this, "\t", true)); + outputFormats.add(new TextFormat(this)); + outputFormats.add(new HTMLFormat(this)); + return; + } + + // LIST OF FORMATS: + // Since it is a comma separated list of output formats, a loop will parse this list comma by comma: + String f; + int indexSep, indexLPar, indexRPar; + boolean hasVotableFormat = false; + while(formats != null && formats.length() > 0){ + // Get a format item from the list: + indexSep = formats.indexOf(','); + // if a comma is after a left parenthesis + indexLPar = formats.indexOf('('); + if (indexSep > 0 && indexLPar > 0 && indexSep > indexLPar){ + indexRPar = formats.indexOf(')', indexLPar); + if (indexRPar > 0) + indexSep = formats.indexOf(',', indexRPar); + else + throw new TAPException("Missing right parenthesis in: \"" + formats + "\"!"); + } + // no comma => only one format + if (indexSep < 0){ + f = formats; + formats = null; + } + // comma at the first position => empty list item => go to the next item + else if (indexSep == 0){ + formats = formats.substring(1).trim(); + continue; + } + // else => get the first format item, and then remove it from the list for the next iteration + else{ + f = formats.substring(0, indexSep).trim(); + formats = formats.substring(indexSep + 1).trim(); + } + + // Identify the format and append it to the output format list of the service: + // FITS + if (f.equalsIgnoreCase(VALUE_FITS)) + outputFormats.add(new FITSFormat(this)); + // JSON + else if (f.equalsIgnoreCase(VALUE_JSON)) + outputFormats.add(new JSONFormat(this)); + // HTML + else if (f.equalsIgnoreCase(VALUE_HTML)) + outputFormats.add(new HTMLFormat(this)); + // TEXT + else if (f.equalsIgnoreCase(VALUE_TEXT)) + outputFormats.add(new TextFormat(this)); + // CSV + else if (f.equalsIgnoreCase(VALUE_CSV)) + outputFormats.add(new SVFormat(this, ",", true)); + // TSV + else if (f.equalsIgnoreCase(VALUE_TSV)) + outputFormats.add(new SVFormat(this, "\t", true)); + // any SV (separated value) format + else if (f.toLowerCase().startsWith(VALUE_SV)){ + // get the separator: + int endSep = f.indexOf(')'); + if (VALUE_SV.length() < f.length() && f.charAt(VALUE_SV.length()) == '(' && endSep > VALUE_SV.length() + 1){ + String separator = f.substring(VALUE_SV.length() + 1, f.length() - 1); + // get the MIME type and its alias, if any of them is provided: + String mimeType = null, shortMimeType = null; + if (endSep + 1 < f.length() && f.charAt(endSep + 1) == ':'){ + int endMime = f.indexOf(':', endSep + 2); + if (endMime < 0) + mimeType = f.substring(endSep + 2, f.length()); + else if (endMime > 0){ + mimeType = f.substring(endSep + 2, endMime); + shortMimeType = f.substring(endMime + 1); + } + } + // add the defined SV(...) format: + outputFormats.add(new SVFormat(this, separator, true, mimeType, shortMimeType)); + }else + throw new TAPException("Missing separator char/string for the SV output format: \"" + f + "\"!"); + } + // VOTABLE + else if (f.toLowerCase().startsWith(VALUE_VOTABLE) || f.toLowerCase().startsWith(VALUE_VOT)){ + // Parse the format: + VOTableFormat votFormat = parseVOTableFormat(f); + + // Add the VOTable format: + outputFormats.add(votFormat); + + // Determine whether the MIME type is the VOTable expected one: + if (votFormat.getShortMimeType().equals("votable") || votFormat.getMimeType().equals("votable")) + hasVotableFormat = true; + } + // custom OutputFormat + else if (isClassName(f)) + outputFormats.add(TAPConfiguration.newInstance(f, KEY_OUTPUT_FORMATS, OutputFormat.class, new Class[]{ServiceConnection.class}, new Object[]{this})); + // unknown format + else + throw new TAPException("Unknown output format: " + f); + } + + // Add by default VOTable format if none is specified: + if (!hasVotableFormat) + outputFormats.add(new VOTableFormat(this)); + } + + /** + *

    Parse the given VOTable format specification.

    + * + *

    This specification is expected to be an item of the property {@link TAPConfiguration#KEY_OUTPUT_FORMATS}.

    + * + * @param propValue A single VOTable format specification. + * + * @return The corresponding configured {@link VOTableFormat} instance. + * + * @throws TAPException If the syntax of the given specification is incorrect, + * or if the specified VOTable version or serialization does not exist. + */ + private VOTableFormat parseVOTableFormat(final String propValue) throws TAPException{ + DataFormat serialization = null; + VOTableVersion votVersion = null; + String mimeType = null, shortMimeType = null; + + // Get the parameters, if any: + int beginSep = propValue.indexOf('('); + if (beginSep > 0){ + int endSep = propValue.indexOf(')'); + if (endSep <= beginSep) + throw new TAPException("Wrong output format specification syntax in: \"" + propValue + "\"! A VOTable parameters list must end with ')'."); + // split the parameters: + String[] params = propValue.substring(beginSep + 1, endSep).split(","); + if (params.length > 2) + throw new TAPException("Wrong number of parameters for the output format VOTable: \"" + propValue + "\"! Only two parameters may be provided: serialization and version."); + else if (params.length >= 1){ + // resolve the serialization format: + params[0] = params[0].trim().toLowerCase(); + if (params[0].length() == 0 || params[0].equals("b") || params[0].equals("binary")) + serialization = DataFormat.BINARY; + else if (params[0].equals("b2") || params[0].equals("binary2")) + serialization = DataFormat.BINARY2; + else if (params[0].equals("td") || params[0].equals("tabledata")) + serialization = DataFormat.TABLEDATA; + else if (params[0].equals("fits")) + serialization = DataFormat.FITS; + else + throw new TAPException("Unsupported VOTable serialization: \"" + params[0] + "\"! Accepted values: 'binary' (or 'b'), 'binary2' (or 'b2'), 'tabledata' (or 'td') and 'fits'."); + // resolve the version: + if (params.length == 2){ + params[1] = params[1].trim(); + if (params[1].equals("1.0") || params[1].equalsIgnoreCase("v1.0")) + votVersion = VOTableVersion.V10; + else if (params[1].equals("1.1") || params[1].equalsIgnoreCase("v1.1")) + votVersion = VOTableVersion.V11; + else if (params[1].equals("1.2") || params[1].equalsIgnoreCase("v1.2")) + votVersion = VOTableVersion.V12; + else if (params[1].equals("1.3") || params[1].equalsIgnoreCase("v1.3")) + votVersion = VOTableVersion.V13; + else + throw new TAPException("Unsupported VOTable version: \"" + params[1] + "\"! Accepted values: '1.0' (or 'v1.0'), '1.1' (or 'v1.1'), '1.2' (or 'v1.2') and '1.3' (or 'v1.3')."); + } + } + } + + // Get the MIME type and its alias, if any: + beginSep = propValue.indexOf(':'); + if (beginSep > 0){ + int endSep = propValue.indexOf(':', beginSep + 1); + if (endSep < 0) + endSep = propValue.length(); + // extract the MIME type, if any: + mimeType = propValue.substring(beginSep + 1, endSep).trim(); + if (mimeType.length() == 0) + mimeType = null; + // extract the short MIME type, if any: + if (endSep < propValue.length()){ + beginSep = endSep; + endSep = propValue.indexOf(':', beginSep + 1); + if (endSep >= 0) + throw new TAPException("Wrong output format specification syntax in: \"" + propValue + "\"! After a MIME type and a short MIME type, no more information is expected."); + else + endSep = propValue.length(); + shortMimeType = propValue.substring(beginSep + 1, endSep).trim(); + if (shortMimeType.length() == 0) + shortMimeType = null; + } + } + + // Create the VOTable format: + VOTableFormat votFormat = new VOTableFormat(this, serialization, votVersion); + votFormat.setMimeType(mimeType, shortMimeType); + + return votFormat; + } + + /** + * Initialize the default and maximum output limits. + * + * @param tapConfig The content of the TAP configuration file. + * + * @throws TAPException If the corresponding TAP configuration properties are wrong. + */ + private void initOutputLimits(final Properties tapConfig) throws TAPException{ + Object[] limit = parseLimit(getProperty(tapConfig, KEY_DEFAULT_OUTPUT_LIMIT), KEY_DEFAULT_OUTPUT_LIMIT, false); + outputLimitTypes[0] = (LimitUnit)limit[1]; // it should be "rows" since the parameter areBytesAllowed of parseLimit =false + setDefaultOutputLimit((Integer)limit[0]); + + limit = parseLimit(getProperty(tapConfig, KEY_MAX_OUTPUT_LIMIT), KEY_DEFAULT_OUTPUT_LIMIT, false); + outputLimitTypes[1] = (LimitUnit)limit[1]; // it should be "rows" since the parameter areBytesAllowed of parseLimit =false + setMaxOutputLimit((Integer)limit[0]); + } + + /** + * Initialize the fetch size for the synchronous and for the asynchronous resources. + * + * @param tapConfig The content of the TAP configuration file. + * + * @throws TAPException If the corresponding TAP configuration properties are wrong. + */ + private void initFetchSize(final Properties tapConfig) throws TAPException{ + fetchSize = new int[2]; + + // Set the fetch size for asynchronous queries: + String propVal = getProperty(tapConfig, KEY_ASYNC_FETCH_SIZE); + if (propVal == null) + fetchSize[0] = DEFAULT_ASYNC_FETCH_SIZE; + else{ + try{ + fetchSize[0] = Integer.parseInt(propVal); + if (fetchSize[0] < 0) + fetchSize[0] = 0; + }catch(NumberFormatException nfe){ + throw new TAPException("Integer expected for the property " + KEY_ASYNC_FETCH_SIZE + ": \"" + propVal + "\"!"); + } + } + + // Set the fetch size for synchronous queries: + propVal = getProperty(tapConfig, KEY_SYNC_FETCH_SIZE); + if (propVal == null) + fetchSize[1] = DEFAULT_SYNC_FETCH_SIZE; + else{ + try{ + fetchSize[1] = Integer.parseInt(propVal); + if (fetchSize[1] < 0) + fetchSize[1] = 0; + }catch(NumberFormatException nfe){ + throw new TAPException("Integer expected for the property " + KEY_SYNC_FETCH_SIZE + ": \"" + propVal + "\"!"); + } + } + } + + /** + * Initialize the default and maximum upload limits. + * + * @param tapConfig The content of the TAP configuration file. + * + * @throws TAPException If the corresponding TAP configuration properties are wrong. + */ + private void initUploadLimits(final Properties tapConfig) throws TAPException{ + Object[] limit = parseLimit(getProperty(tapConfig, KEY_DEFAULT_UPLOAD_LIMIT), KEY_DEFAULT_UPLOAD_LIMIT, true); + uploadLimitTypes[0] = (LimitUnit)limit[1]; + setDefaultUploadLimit((Integer)limit[0]); + + limit = parseLimit(getProperty(tapConfig, KEY_MAX_UPLOAD_LIMIT), KEY_MAX_UPLOAD_LIMIT, true); + if (!((LimitUnit)limit[1]).isCompatibleWith(uploadLimitTypes[0])) + throw new TAPException("The default upload limit (in " + uploadLimitTypes[0] + ") and the maximum upload limit (in " + limit[1] + ") MUST be expressed in the same unit!"); + else + uploadLimitTypes[1] = (LimitUnit)limit[1]; + setMaxUploadLimit((Integer)limit[0]); + } + + /** + * Initialize the maximum size (in bytes) of a VOTable files set upload. + * + * @param tapConfig The content of the TAP configuration file. + * + * @throws TAPException If the corresponding TAP configuration property is wrong. + */ + private void initMaxUploadSize(final Properties tapConfig) throws TAPException{ + String propValue = getProperty(tapConfig, KEY_UPLOAD_MAX_FILE_SIZE); + // If a value is specified... + if (propValue != null){ + // ...parse the value: + Object[] limit = parseLimit(propValue, KEY_UPLOAD_MAX_FILE_SIZE, true); + if (((Integer)limit[0]).intValue() <= 0) + limit[0] = new Integer(TAPConfiguration.DEFAULT_UPLOAD_MAX_FILE_SIZE); + // ...check that the unit is correct (bytes): + if (!LimitUnit.bytes.isCompatibleWith((LimitUnit)limit[1])) + throw new TAPException("The maximum upload file size " + KEY_UPLOAD_MAX_FILE_SIZE + " (here: " + propValue + ") can not be expressed in a unit different from bytes (B, kB, MB, GB)!"); + // ...set the max file size: + int value = (int)((Integer)limit[0] * ((LimitUnit)limit[1]).bytesFactor()); + setMaxUploadSize(value); + } + } + + /** + * Initialize the TAP user identification method. + * + * @param tapConfig The content of the TAP configuration file. + * + * @throws TAPException If the corresponding TAP configuration property is wrong. + */ + private void initUserIdentifier(final Properties tapConfig) throws TAPException{ + // Get the property value: + String propValue = getProperty(tapConfig, KEY_USER_IDENTIFIER); + if (propValue != null) + userIdentifier = newInstance(propValue, KEY_USER_IDENTIFIER, UserIdentifier.class); + } + + /** + * Initialize the list of all allowed coordinate systems. + * + * @param tapConfig The content of the TAP configuration file. + * + * @throws TAPException If the corresponding TAP configuration properties are wrong. + */ + private void initCoordSys(final Properties tapConfig) throws TAPException{ + // Get the property value: + String propValue = getProperty(tapConfig, KEY_COORD_SYS); + + // NO VALUE => ALL COORD SYS ALLOWED! + if (propValue == null) + lstCoordSys = null; + + // "NONE" => ALL COORD SYS FORBIDDEN (= no coordinate system expression is allowed)! + else if (propValue.equalsIgnoreCase(VALUE_NONE)) + lstCoordSys = new ArrayList(0); + + // "ANY" => ALL COORD SYS ALLOWED (= any coordinate system is allowed)! + else if (propValue.equalsIgnoreCase(VALUE_ANY)) + lstCoordSys = null; + + // OTHERWISE, JUST THE ALLOWED ONE ARE LISTED: + else{ + // split all the list items: + String[] items = propValue.split(","); + if (items.length > 0){ + lstCoordSys = new ArrayList(items.length); + for(String item : items){ + item = item.trim(); + // empty item => ignored + if (item.length() <= 0) + continue; + // "NONE" is not allowed inside a list => error! + else if (item.toUpperCase().equals(VALUE_NONE)) + throw new TAPException("The special value \"" + VALUE_NONE + "\" can not be used inside a list! It MUST be used in replacement of a whole list to specify that no value is allowed."); + // "ANY" is not allowed inside a list => error! + else if (item.toUpperCase().equals(VALUE_ANY)) + throw new TAPException("The special value \"" + VALUE_ANY + "\" can not be used inside a list! It MUST be used in replacement of a whole list to specify that any value is allowed."); + // parse the coordinate system regular expression in order to check it: + else{ + try{ + STCS.buildCoordSysRegExp(new String[]{item}); + lstCoordSys.add(item); + }catch(ParseException pe){ + throw new TAPException("Incorrect coordinate system regular expression (\"" + item + "\"): " + pe.getMessage(), pe); + } + } + } + // if finally no item has been specified, consider it as "any coordinate system allowed": + if (lstCoordSys.size() == 0) + lstCoordSys = null; + }else + lstCoordSys = null; + } + } + + /** + * Initialize the list of all allowed ADQL geometrical functions. + * + * @param tapConfig The content of the TAP configuration file. + * + * @throws TAPException If the corresponding TAP configuration properties are wrong. + */ + private void initADQLGeometries(final Properties tapConfig) throws TAPException{ + // Get the property value: + String propValue = getProperty(tapConfig, KEY_GEOMETRIES); + + // NO VALUE => ALL FCT ALLOWED! + if (propValue == null) + geometries = null; + + // "NONE" => ALL FCT FORBIDDEN (= none of these functions are allowed)! + else if (propValue.equalsIgnoreCase(VALUE_NONE)) + geometries = new ArrayList(0); + + // "ANY" => ALL FCT ALLOWED (= all of these functions are allowed)! + else if (propValue.equalsIgnoreCase(VALUE_ANY)) + geometries = null; + + // OTHERWISE, JUST THE ALLOWED ONE ARE LISTED: + else{ + // split all the list items: + String[] items = propValue.split(","); + if (items.length > 0){ + geometries = new ArrayList(items.length); + for(String item : items){ + item = item.trim(); + // empty item => ignored + if (item.length() <= 0) + continue; + // if it is a name of known ADQL geometrical function, add it to the list: + else if (item.toUpperCase().matches(GEOMETRY_REGEXP)) + geometries.add(item.toUpperCase()); + // "NONE" is not allowed inside a list => error! + else if (item.toUpperCase().equals(VALUE_NONE)) + throw new TAPException("The special value \"" + VALUE_NONE + "\" can not be used inside a list! It MUST be used in replacement of a whole list to specify that no value is allowed."); + // "ANY" is not allowed inside a list => error! + else if (item.toUpperCase().equals(VALUE_ANY)) + throw new TAPException("The special value \"" + VALUE_ANY + "\" can not be used inside a list! It MUST be used in replacement of a whole list to specify that any value is allowed."); + // unknown value => error! + else + throw new TAPException("Unknown ADQL geometrical function: \"" + item + "\"!"); + } + // if finally no item has been specified, consider it as "all functions allowed": + if (geometries.size() == 0) + geometries = null; + }else + geometries = null; + } + } + + /** + * Initialize the list of all known and allowed User Defined Functions. + * + * @param tapConfig The content of the TAP configuration file. + * + * @throws TAPException If the corresponding TAP configuration properties are wrong. + */ + private void initUDFs(final Properties tapConfig) throws TAPException{ + // Get the property value: + String propValue = getProperty(tapConfig, KEY_UDFS); + + // NO VALUE => NO UNKNOWN FCT ALLOWED! + if (propValue == null) + udfs = new ArrayList(0); + + // "NONE" => NO UNKNOWN FCT ALLOWED (= none of the unknown functions are allowed)! + else if (propValue.equalsIgnoreCase(VALUE_NONE)) + udfs = new ArrayList(0); + + // "ANY" => ALL UNKNOWN FCT ALLOWED (= all of the unknown functions are allowed)! + else if (propValue.equalsIgnoreCase(VALUE_ANY)) + udfs = null; + + // OTHERWISE, JUST THE ALLOWED ONE ARE LISTED: + else{ + + char c; + int ind = 0; + short nbComma = 0; + boolean within_item = false, within_params = false, within_classpath = false; + StringBuffer buf = new StringBuffer(); + String signature, classpath; + int[] posSignature = new int[]{-1,-1}, posClassPath = new int[]{-1,-1}; + + signature = null; + classpath = null; + buf.delete(0, buf.length()); + + while(ind < propValue.length()){ + // Get the character: + c = propValue.charAt(ind++); + // If space => ignore + if (!within_params && Character.isWhitespace(c)) + continue; + // If inside a parameters list, keep all characters until the list end (')'): + if (within_params){ + if (c == ')') + within_params = false; + buf.append(c); + } + // If inside a classpath, keep all characters until the classpath end ('}'): + else if (within_classpath){ + if (c == '}') + within_classpath = false; + buf.append(c); + } + // If inside an UDF declaration: + else if (within_item){ + switch(c){ + case '(': /* start of a parameters list */ + within_params = true; + buf.append(c); + break; + case '{': /* start of a class name */ + within_classpath = true; + buf.append(c); + break; + case ',': /* separation between the signature and the class name */ + // count commas within this item: + if (++nbComma > 1) + // if more than 1, throw an error: + throw new TAPException("Wrong UDF declaration syntax: only two items (signature and class name) can be given within brackets. (position in the property " + KEY_UDFS + ": " + ind + ")"); + else{ + // end of the signature and start of the class name: + signature = buf.toString(); + buf.delete(0, buf.length()); + posSignature[1] = ind; + posClassPath[0] = ind + 1; + } + break; + case ']': /* end of a UDF declaration */ + within_item = false; + if (nbComma == 0){ + signature = buf.toString(); + posSignature[1] = ind; + }else{ + classpath = (buf.length() == 0 ? null : buf.toString()); + if (classpath != null) + posClassPath[1] = ind; + } + buf.delete(0, buf.length()); + + // no signature... + if (signature == null || signature.length() == 0){ + // ...BUT a class name => error + if (classpath != null) + throw new TAPException("Missing UDF declaration! (position in the property " + KEY_UDFS + ": " + posSignature[0] + "-" + posSignature[1] + ")"); + // ... => ignore this item + else + continue; + } + + // add the new UDF in the list: + try{ + // resolve the function signature: + FunctionDef def = FunctionDef.parse(signature); + // resolve the class name: + if (classpath != null){ + if (isClassName(classpath)){ + Class fctClass = null; + try{ + // fetch the class: + fctClass = fetchClass(classpath, KEY_UDFS, UserDefinedFunction.class); + // set the class inside the UDF definition: + def.setUDFClass(fctClass); + }catch(TAPException te){ + throw new TAPException("Invalid class name for the UDF definition \"" + def + "\": " + te.getMessage() + " (position in the property " + KEY_UDFS + ": " + posClassPath[0] + "-" + posClassPath[1] + ")", te); + }catch(IllegalArgumentException iae){ + throw new TAPException("Invalid class name for the UDF definition \"" + def + "\": missing a constructor with a single parameter of type ADQLOperand[] " + (fctClass != null ? "in the class \"" + fctClass.getName() + "\"" : "") + "! (position in the property " + KEY_UDFS + ": " + posClassPath[0] + "-" + posClassPath[1] + ")"); + } + }else + throw new TAPException("Invalid class name for the UDF definition \"" + def + "\": \"" + classpath + "\" is not a class name (or is not surrounding by {} as expected in this property file)! (position in the property " + KEY_UDFS + ": " + posClassPath[0] + "-" + posClassPath[1] + ")"); + } + // add the UDF: + udfs.add(def); + }catch(ParseException pe){ + throw new TAPException("Wrong UDF declaration syntax: " + pe.getMessage() + " (position in the property " + KEY_UDFS + ": " + posSignature[0] + "-" + posSignature[1] + ")", pe); + } + + // reset some variables: + nbComma = 0; + signature = null; + classpath = null; + break; + default: /* keep all other characters */ + buf.append(c); + break; + } + } + // If outside of everything, just starting a UDF declaration or separate each declaration is allowed: + else{ + switch(c){ + case '[': + within_item = true; + posSignature[0] = ind + 1; + break; + case ',': + break; + default: + throw new TAPException("Wrong UDF declaration syntax: unexpected character at position " + ind + " in the property " + KEY_UDFS + ": \"" + c + "\"! A UDF declaration must have one of the following syntaxes: \"[signature]\" or \"[signature,{className}]\"."); + } + } + } + + // If the parsing is not finished, throw an error: + if (within_item) + throw new TAPException("Wrong UDF declaration syntax: missing closing bracket at position " + propValue.length() + "!"); + } + } + + @Override + public String getProviderName(){ + return providerName; + } + + @Override + public String getProviderDescription(){ + return serviceDescription; + } + + @Override + public boolean isAvailable(){ + return isAvailable; + } + + @Override + public String getAvailability(){ + return availability; + } + + @Override + public void setAvailable(boolean isAvailable, String message){ + this.isAvailable = isAvailable; + availability = message; + } + + @Override + public int[] getRetentionPeriod(){ + return retentionPeriod; + } + + /** + *

    Set the default retention period.

    + * + *

    This period is set by default if the user did not specify one before the execution of his query.

    + * + *

    Important note: + * This function will apply the given retention period only if legal compared to the currently set maximum value. + * In other words, if the given value is less or equals to the current maximum retention period. + *

    + * + * @param period New default retention period (in seconds). + * + * @return true if the given retention period has been successfully set, false otherwise. + */ + public boolean setDefaultRetentionPeriod(final int period){ + if ((retentionPeriod[1] <= 0) || (period > 0 && period <= retentionPeriod[1])){ + retentionPeriod[0] = period; + return true; + }else + return false; + } + + /** + *

    Set the maximum retention period.

    + * + *

    This period limits the default retention period and the retention period specified by a user.

    + * + *

    Important note: + * This function may reduce the default retention period if the current default retention period is bigger + * to the new maximum retention period. In a such case, the default retention period is set to the + * new maximum retention period. + *

    + * + * @param period New maximum retention period (in seconds). + */ + public void setMaxRetentionPeriod(final int period){ + // Decrease the default retention period if it will be bigger than the new maximum retention period: + if (period > 0 && (retentionPeriod[0] <= 0 || period < retentionPeriod[0])) + retentionPeriod[0] = period; + // Set the new maximum retention period: + retentionPeriod[1] = period; + } + + @Override + public int[] getExecutionDuration(){ + return executionDuration; + } + + /** + *

    Set the default execution duration.

    + * + *

    This duration is set by default if the user did not specify one before the execution of his query.

    + * + *

    Important note: + * This function will apply the given execution duration only if legal compared to the currently set maximum value. + * In other words, if the given value is less or equals to the current maximum execution duration. + *

    + * + * @param duration New default execution duration (in milliseconds). + * + * @return true if the given execution duration has been successfully set, false otherwise. + */ + public boolean setDefaultExecutionDuration(final int duration){ + if ((executionDuration[1] <= 0) || (duration > 0 && duration <= executionDuration[1])){ + executionDuration[0] = duration; + return true; + }else + return false; + } + + /** + *

    Set the maximum execution duration.

    + * + *

    This duration limits the default execution duration and the execution duration specified by a user.

    + * + *

    Important note: + * This function may reduce the default execution duration if the current default execution duration is bigger + * to the new maximum execution duration. In a such case, the default execution duration is set to the + * new maximum execution duration. + *

    + * + * @param duration New maximum execution duration (in milliseconds). + */ + public void setMaxExecutionDuration(final int duration){ + // Decrease the default execution duration if it will be bigger than the new maximum execution duration: + if (duration > 0 && (executionDuration[0] <= 0 || duration < executionDuration[0])) + executionDuration[0] = duration; + // Set the new maximum execution duration: + executionDuration[1] = duration; + } + + @Override + public Iterator getOutputFormats(){ + return outputFormats.iterator(); + } + + @Override + public OutputFormat getOutputFormat(final String mimeOrAlias){ + if (mimeOrAlias == null || mimeOrAlias.trim().isEmpty()) + return null; + + for(OutputFormat f : outputFormats){ + if ((f.getMimeType() != null && f.getMimeType().equalsIgnoreCase(mimeOrAlias)) || (f.getShortMimeType() != null && f.getShortMimeType().equalsIgnoreCase(mimeOrAlias))) + return f; + } + return null; + } + + /** + *

    Add the given {@link OutputFormat} in the list of output formats supported by the TAP service.

    + * + *

    Warning: + * No verification is done in order to avoid duplicated output formats in the list. + * NULL objects are merely ignored silently. + *

    + * + * @param newOutputFormat New output format. + */ + public void addOutputFormat(final OutputFormat newOutputFormat){ + if (newOutputFormat != null) + outputFormats.add(newOutputFormat); + } + + /** + * Remove the specified output format. + * + * @param mimeOrAlias Full or short MIME type of the output format to remove. + * + * @return true if the specified format has been found and successfully removed from the list, + * false otherwise. + */ + public boolean removeOutputFormat(final String mimeOrAlias){ + OutputFormat of = getOutputFormat(mimeOrAlias); + if (of != null) + return outputFormats.remove(of); + else + return false; + } + + @Override + public int[] getOutputLimit(){ + return outputLimits; + } + + /** + *

    Set the default output limit.

    + * + *

    This limit is set by default if the user did not specify one before the execution of his query.

    + * + *

    Important note: + * This function will apply the given output limit only if legal compared to the currently set maximum value. + * In other words, if the given value is less or equals to the current maximum output limit. + *

    + * + * @param limit New default output limit (in number of rows). + * + * @return true if the given output limit has been successfully set, false otherwise. + */ + public boolean setDefaultOutputLimit(final int limit){ + if ((outputLimits[1] <= 0) || (limit > 0 && limit <= outputLimits[1])){ + outputLimits[0] = limit; + return true; + }else + return false; + } + + /** + *

    Set the maximum output limit.

    + * + *

    This output limit limits the default output limit and the output limit specified by a user.

    + * + *

    Important note: + * This function may reduce the default output limit if the current default output limit is bigger + * to the new maximum output limit. In a such case, the default output limit is set to the + * new maximum output limit. + *

    + * + * @param limit New maximum output limit (in number of rows). + */ + public void setMaxOutputLimit(final int limit){ + // Decrease the default output limit if it will be bigger than the new maximum output limit: + if (limit > 0 && (outputLimits[0] <= 0 || limit < outputLimits[0])) + outputLimits[0] = limit; + // Set the new maximum output limit: + outputLimits[1] = limit; + } + + @Override + public final LimitUnit[] getOutputLimitType(){ + return new LimitUnit[]{LimitUnit.rows,LimitUnit.rows}; + } + + @Override + public Collection getCoordinateSystems(){ + return lstCoordSys; + } + + @Override + public TAPLog getLogger(){ + return logger; + } + + @Override + public TAPFactory getFactory(){ + return tapFactory; + } + + @Override + public UWSFileManager getFileManager(){ + return fileManager; + } + + @Override + public boolean uploadEnabled(){ + return isUploadEnabled; + } + + public void setUploadEnabled(final boolean enabled){ + isUploadEnabled = enabled; + } + + @Override + public int[] getUploadLimit(){ + return uploadLimits; + } + + @Override + public LimitUnit[] getUploadLimitType(){ + return uploadLimitTypes; + } + + /** + * Set the unit of the upload limit. + * + * @param type Unit of upload limit (rows or bytes). + */ + public void setUploadLimitType(final LimitUnit type){ + if (type != null) + uploadLimitTypes = new LimitUnit[]{type,type}; + } + + /** + *

    Set the default upload limit.

    + * + *

    Important note: + * This function will apply the given upload limit only if legal compared to the currently set maximum value. + * In other words, if the given value is less or equals to the current maximum upload limit. + *

    + * + * @param limit New default upload limit. + * + * @return true if the given upload limit has been successfully set, false otherwise. + */ + public boolean setDefaultUploadLimit(final int limit){ + try{ + if ((uploadLimits[1] <= 0) || (limit > 0 && LimitUnit.compare(limit, uploadLimitTypes[0], uploadLimits[1], uploadLimitTypes[1]) <= 0)){ + uploadLimits[0] = limit; + return true; + } + }catch(TAPException e){} + return false; + } + + /** + *

    Set the maximum upload limit.

    + * + *

    This upload limit limits the default upload limit.

    + * + *

    Important note: + * This function may reduce the default upload limit if the current default upload limit is bigger + * to the new maximum upload limit. In a such case, the default upload limit is set to the + * new maximum upload limit. + *

    + * + * @param limit New maximum upload limit. + */ + public void setMaxUploadLimit(final int limit){ + try{ + // Decrease the default output limit if it will be bigger than the new maximum output limit: + if (limit > 0 && (uploadLimits[0] <= 0 || LimitUnit.compare(limit, uploadLimitTypes[1], uploadLimits[0], uploadLimitTypes[0]) < 0)) + uploadLimits[0] = limit; + // Set the new maximum output limit: + uploadLimits[1] = limit; + }catch(TAPException e){} + } + + @Override + public int getMaxUploadSize(){ + return maxUploadSize; + } + + /** + *

    Set the maximum size of a VOTable files set that can be uploaded in once.

    + * + *

    Warning: + * This size can not be negative or 0. If the given value is in this case, nothing will be done + * and false will be returned. + * On the contrary to the other limits, no "unlimited" limit is possible here ; only the + * maximum value can be set (i.e. maximum positive integer value). + *

    + * + * @param maxSize New maximum size (in bytes). + * + * @return true if the size has been successfully set, false otherwise. + */ + public boolean setMaxUploadSize(final int maxSize){ + // No "unlimited" value possible there: + if (maxSize <= 0) + return false; + + // Otherwise, set the maximum upload file size: + maxUploadSize = maxSize; + return true; + } + + @Override + public int getNbMaxAsyncJobs(){ + return maxAsyncJobs; + } + + @Override + public UserIdentifier getUserIdentifier(){ + return userIdentifier; + } + + @Override + public TAPMetadata getTAPMetadata(){ + return metadata; + } + + @Override + public Collection getGeometries(){ + return geometries; + } + + @Override + public Collection getUDFs(){ + return udfs; + } + + @Override + public int[] getFetchSize(){ + return fetchSize; + } + +} diff --git a/src/tap/config/ConfigurableTAPFactory.java b/src/tap/config/ConfigurableTAPFactory.java new file mode 100644 index 0000000000000000000000000000000000000000..02432f78d919c12b7588d28d34f82c946a1394bd --- /dev/null +++ b/src/tap/config/ConfigurableTAPFactory.java @@ -0,0 +1,334 @@ +package tap.config; + +/* + * This file is part of TAPLibrary. + * + * TAPLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * TAPLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with TAPLibrary. If not, see . + * + * Copyright 2015 - Astronomisches Rechen Institut (ARI) + */ + +import static tap.config.TAPConfiguration.DEFAULT_BACKUP_BY_USER; +import static tap.config.TAPConfiguration.DEFAULT_BACKUP_FREQUENCY; +import static tap.config.TAPConfiguration.KEY_BACKUP_BY_USER; +import static tap.config.TAPConfiguration.KEY_BACKUP_FREQUENCY; +import static tap.config.TAPConfiguration.KEY_DATABASE_ACCESS; +import static tap.config.TAPConfiguration.KEY_DATASOURCE_JNDI_NAME; +import static tap.config.TAPConfiguration.KEY_DB_PASSWORD; +import static tap.config.TAPConfiguration.KEY_DB_USERNAME; +import static tap.config.TAPConfiguration.KEY_JDBC_DRIVER; +import static tap.config.TAPConfiguration.KEY_JDBC_URL; +import static tap.config.TAPConfiguration.KEY_SQL_TRANSLATOR; +import static tap.config.TAPConfiguration.VALUE_JDBC; +import static tap.config.TAPConfiguration.VALUE_JDBC_DRIVERS; +import static tap.config.TAPConfiguration.VALUE_JNDI; +import static tap.config.TAPConfiguration.VALUE_NEVER; +import static tap.config.TAPConfiguration.VALUE_PGSPHERE; +import static tap.config.TAPConfiguration.VALUE_POSTGRESQL; +import static tap.config.TAPConfiguration.VALUE_USER_ACTION; +import static tap.config.TAPConfiguration.getProperty; + +import java.sql.Driver; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.util.Enumeration; +import java.util.Properties; + +import javax.naming.InitialContext; +import javax.naming.NamingException; +import javax.sql.DataSource; + +import tap.AbstractTAPFactory; +import tap.ServiceConnection; +import tap.TAPException; +import tap.TAPFactory; +import tap.backup.DefaultTAPBackupManager; +import tap.db.DBConnection; +import tap.db.JDBCConnection; +import uws.UWSException; +import uws.service.UWSService; +import uws.service.backup.UWSBackupManager; +import uws.service.log.UWSLog.LogLevel; +import adql.translator.JDBCTranslator; +import adql.translator.PgSphereTranslator; +import adql.translator.PostgreSQLTranslator; + +/** + *

    Concrete implementation of a {@link TAPFactory} which is parameterized by a TAP configuration file.

    + * + *

    + * All abstract or NULL-implemented methods/functions left by {@link AbstractTAPFactory} are implemented using values + * of a TAP configuration file. The concerned methods are: {@link #getConnection(String)}, {@link #freeConnection(DBConnection)}, + * {@link #destroy()}, {@link #createADQLTranslator()} and {@link #createUWSBackupManager(UWSService)}. + *

    + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (04/2015) + * @since 2.0 + */ +public final class ConfigurableTAPFactory extends AbstractTAPFactory { + + /* ADQL to SQL translation: */ + /** The {@link JDBCTranslator} to use when a ADQL query must be executed in the database. + * This translator is also used to convert ADQL types into database types. */ + private Class translator; + + /* JNDI DB access: */ + /** The {@link DataSource} to use in order to access the database. + * This attribute is actually used only if the chosen database access method is JNDI. */ + private final DataSource datasource; + + /* Simple JDBC access: */ + /** Classpath of the JDBC driver to use in order to access the database. + * This attribute is actually used only if the chosen database access method is JDBC. */ + private final String driverPath; + /** JDBC URL of the database to access. + * This attribute is actually used only if the chosen database access method is JDBC. */ + private final String dbUrl; + /** Name of the database user to use in order to access the database. + * This attribute is actually used only if the chosen database access method is JDBC. */ + private final String dbUser; + /** Password of the database user to use in order to access the database. + * This attribute is actually used only if the chosen database access method is JDBC. */ + private final String dbPassword; + + /* UWS's jobs backup: */ + /** Indicate whether the jobs must be backuped gathered by user or just all mixed together. */ + private boolean backupByUser; + /** Frequency at which the jobs must be backuped. */ + private long backupFrequency; + + /** + * Build a {@link TAPFactory} using the given TAP service description and TAP configuration file. + * + * @param service The TAP service description. + * @param tapConfig The TAP configuration file containing particularly information about the database access. + * + * @throws NullPointerException If one of the parameter is NULL. + * @throws TAPException If some properties of the TAP configuration file are wrong. + */ + public ConfigurableTAPFactory(ServiceConnection service, final Properties tapConfig) throws NullPointerException, TAPException{ + super(service); + + if (tapConfig == null) + throw new NullPointerException("Missing TAP properties! "); + + /* 1. Configure the database access */ + final String dbAccessMethod = getProperty(tapConfig, KEY_DATABASE_ACCESS); + + // Case a: Missing access method => error! + if (dbAccessMethod == null) + throw new TAPException("The property \"" + KEY_DATABASE_ACCESS + "\" is missing! It is required to connect to the database. Two possible values: \"" + VALUE_JDBC + "\" and \"" + VALUE_JNDI + "\"."); + + // Case b: JDBC ACCESS + else if (dbAccessMethod.equalsIgnoreCase(VALUE_JDBC)){ + // Extract the DB type and deduce the JDBC Driver path: + String jdbcDriver = getProperty(tapConfig, KEY_JDBC_DRIVER); + String dbUrl = getProperty(tapConfig, KEY_JDBC_URL); + if (jdbcDriver == null){ + if (dbUrl == null) + throw new TAPException("The property \"" + KEY_JDBC_URL + "\" is missing! Since the choosen database access method is \"" + VALUE_JDBC + "\", this property is required."); + else if (!dbUrl.startsWith(JDBCConnection.JDBC_PREFIX + ":")) + throw new TAPException("JDBC URL format incorrect! It MUST begins with " + JDBCConnection.JDBC_PREFIX + ":"); + else{ + String dbType = dbUrl.substring(JDBCConnection.JDBC_PREFIX.length() + 1); + if (dbType.indexOf(':') <= 0) + throw new TAPException("JDBC URL format incorrect! Database type name is missing."); + dbType = dbType.substring(0, dbType.indexOf(':')); + + jdbcDriver = VALUE_JDBC_DRIVERS.get(dbType); + if (jdbcDriver == null) + throw new TAPException("No JDBC driver known for the DBMS \"" + dbType + "\"!"); + } + } + // Set the DB connection parameters: + this.driverPath = jdbcDriver; + this.dbUrl = dbUrl; + this.dbUser = getProperty(tapConfig, KEY_DB_USERNAME); + this.dbPassword = getProperty(tapConfig, KEY_DB_PASSWORD); + // Set the other DB connection parameters: + this.datasource = null; + } + // Case c: JNDI ACCESS + else if (dbAccessMethod.equalsIgnoreCase(VALUE_JNDI)){ + // Get the datasource JDNI name: + String dsName = getProperty(tapConfig, KEY_DATASOURCE_JNDI_NAME); + if (dsName == null) + throw new TAPException("The property \"" + KEY_DATASOURCE_JNDI_NAME + "\" is missing! Since the choosen database access method is \"" + VALUE_JNDI + "\", this property is required."); + try{ + // Load the JNDI context: + InitialContext cxt = new InitialContext(); + // Look for the specified datasource: + datasource = (DataSource)cxt.lookup(dsName); + if (datasource == null) + throw new TAPException("No datasource found with the JNDI name \"" + dsName + "\"!"); + // Set the other DB connection parameters: + this.driverPath = null; + this.dbUrl = null; + this.dbUser = null; + this.dbPassword = null; + }catch(NamingException ne){ + throw new TAPException("No datasource found with the JNDI name \"" + dsName + "\"!"); + } + } + // Case d: unsupported value + else + throw new TAPException("Unsupported value for the property " + KEY_DATABASE_ACCESS + ": \"" + dbAccessMethod + "\"! Allowed values: \"" + VALUE_JNDI + "\" or \"" + VALUE_JDBC + "\"."); + + /* 2. Set the ADQLTranslator to use in function of the sql_translator property */ + String sqlTranslator = getProperty(tapConfig, KEY_SQL_TRANSLATOR); + // case a: no translator specified + if (sqlTranslator == null) + throw new TAPException("The property \"" + KEY_SQL_TRANSLATOR + "\" is missing! ADQL queries can not be translated without it. Allowed values: \"" + VALUE_POSTGRESQL + "\", \"" + VALUE_PGSPHERE + "\" or a class path of a class implementing SQLTranslator."); + + // case b: PostgreSQL translator + else if (sqlTranslator.equalsIgnoreCase(VALUE_POSTGRESQL)) + translator = PostgreSQLTranslator.class; + + // case c: PgSphere translator + else if (sqlTranslator.equalsIgnoreCase(VALUE_PGSPHERE)) + translator = PgSphereTranslator.class; + + // case d: a client defined ADQLTranslator (with the provided class name) + else if (TAPConfiguration.isClassName(sqlTranslator)) + translator = TAPConfiguration.fetchClass(sqlTranslator, KEY_SQL_TRANSLATOR, JDBCTranslator.class); + + // case e: unsupported value + else + throw new TAPException("Unsupported value for the property " + KEY_SQL_TRANSLATOR + ": \"" + sqlTranslator + "\" !"); + + /* 3. Test the construction of the ADQLTranslator */ + createADQLTranslator(); + + /* 4. Test the DB connection (note: a translator is needed to create a connection) */ + DBConnection dbConn = getConnection("0"); + freeConnection(dbConn); + + /* 5. Set the UWS Backup Parameter */ + // Set the backup frequency: + String propValue = getProperty(tapConfig, KEY_BACKUP_FREQUENCY); + // determine whether the value is a time period ; if yes, set the frequency: + if (propValue != null){ + try{ + backupFrequency = Long.parseLong(propValue); + if (backupFrequency <= 0) + backupFrequency = DEFAULT_BACKUP_FREQUENCY; + }catch(NumberFormatException nfe){ + // if the value was not a valid numeric time period, try to identify the different textual options: + if (propValue.equalsIgnoreCase(VALUE_NEVER)) + backupFrequency = DefaultTAPBackupManager.MANUAL; + else if (propValue.equalsIgnoreCase(VALUE_USER_ACTION)) + backupFrequency = DefaultTAPBackupManager.AT_USER_ACTION; + else + throw new TAPException("Long expected for the property \"" + KEY_BACKUP_FREQUENCY + "\", instead of: \"" + propValue + "\"!"); + } + }else + backupFrequency = DEFAULT_BACKUP_FREQUENCY; + // Specify whether the backup must be organized by user or not: + propValue = getProperty(tapConfig, KEY_BACKUP_BY_USER); + backupByUser = (propValue == null) ? DEFAULT_BACKUP_BY_USER : Boolean.parseBoolean(propValue); + } + + /** + * Build a {@link JDBCTranslator} instance with the given class ({@link #translator} ; + * specified by the property sql_translator). If the instance can not be build, + * whatever is the reason, a TAPException MUST be thrown. + * + * Note: This function is called at the initialization of {@link ConfigurableTAPFactory} + * in order to check that a translator can be created. + */ + protected JDBCTranslator createADQLTranslator() throws TAPException{ + try{ + return translator.getConstructor().newInstance(); + }catch(Exception ex){ + if (ex instanceof TAPException) + throw (TAPException)ex; + else + throw new TAPException("Impossible to create a JDBCTranslator instance with the empty constructor of \"" + translator.getName() + "\" (see the property " + KEY_SQL_TRANSLATOR + ") for the following reason: " + ex.getMessage()); + } + } + + /** + * Build a {@link JDBCConnection} thanks to the database parameters specified + * in the TAP configuration file (the properties: jdbc_driver_path, db_url, db_user, db_password). + * + * @see JDBCConnection#JDBCConnection(java.sql.Connection, JDBCTranslator, String, tap.log.TAPLog) + * @see JDBCConnection#JDBCConnection(String, String, String, String, JDBCTranslator, String, tap.log.TAPLog) + */ + @Override + public DBConnection getConnection(String jobID) throws TAPException{ + if (datasource != null){ + try{ + return new JDBCConnection(datasource.getConnection(), createADQLTranslator(), jobID, this.service.getLogger()); + }catch(SQLException se){ + throw new TAPException("Impossible to establish a connection to the database using the set up datasource!", se); + } + }else + return new JDBCConnection(driverPath, dbUrl, dbUser, dbPassword, createADQLTranslator(), jobID, this.service.getLogger()); + } + + @Override + public void freeConnection(DBConnection conn){ + try{ + ((JDBCConnection)conn).getInnerConnection().close(); + }catch(SQLException se){ + service.getLogger().error("Can not close properly the connection \"" + conn.getID() + "\"!", se); + } + } + + @Override + public void destroy(){ + // Unregister the JDBC driver, only if registered by the library (i.e. database_access=jdbc): + if (dbUrl != null){ + // Now deregister JDBC drivers in this context's ClassLoader: + // Get the webapp's ClassLoader + ClassLoader cl = Thread.currentThread().getContextClassLoader(); + // Loop through all drivers + Enumeration drivers = DriverManager.getDrivers(); + while(drivers.hasMoreElements()){ + Driver driver = drivers.nextElement(); + if (driver.getClass().getClassLoader() == cl){ + // This driver was registered by the webapp's ClassLoader, so deregister it: + try{ + DriverManager.deregisterDriver(driver); + service.getLogger().logTAP(LogLevel.INFO, null, "STOP", "JDBC driver " + driver.getClass().getName() + " successfully deregistered!", null); + }catch(SQLException ex){ + service.getLogger().logTAP(LogLevel.FATAL, null, "STOP", "Error deregistering JDBC driver " + driver.getClass().getName() + "!", ex); + } + } + } + } + } + + /** + * Build an {@link DefaultTAPBackupManager} thanks to the backup manager parameters specified + * in the TAP configuration file (the properties: backup_frequency, backup_by_user). + * + * Note: If the specified backup_frequency is negative, no backup manager is returned. + * + * @return null if the specified backup frequency is negative, or an instance of {@link DefaultTAPBackupManager} otherwise. + * + * @see tap.AbstractTAPFactory#createUWSBackupManager(uws.service.UWSService) + * @see DefaultTAPBackupManager + */ + @Override + public UWSBackupManager createUWSBackupManager(UWSService uws) throws TAPException{ + try{ + return (backupFrequency < 0) ? null : new DefaultTAPBackupManager(uws, backupByUser, backupFrequency); + }catch(UWSException ex){ + throw new TAPException("Impossible to create a backup manager, because: " + ex.getMessage(), ex); + } + } + +} diff --git a/src/tap/config/ConfigurableTAPServlet.java b/src/tap/config/ConfigurableTAPServlet.java new file mode 100644 index 0000000000000000000000000000000000000000..83f217da7aac5a22a38080f71abd1435453a369f --- /dev/null +++ b/src/tap/config/ConfigurableTAPServlet.java @@ -0,0 +1,234 @@ +package tap.config; + +/* + * This file is part of TAPLibrary. + * + * TAPLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * TAPLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with TAPLibrary. If not, see . + * + * Copyright 2015 - Astronomisches Rechen Institut (ARI) + */ + +import static tap.config.TAPConfiguration.DEFAULT_TAP_CONF_FILE; +import static tap.config.TAPConfiguration.KEY_ADD_TAP_RESOURCES; +import static tap.config.TAPConfiguration.KEY_HOME_PAGE; +import static tap.config.TAPConfiguration.KEY_HOME_PAGE_MIME_TYPE; +import static tap.config.TAPConfiguration.TAP_CONF_PARAMETER; +import static tap.config.TAPConfiguration.getProperty; +import static tap.config.TAPConfiguration.isClassName; +import static tap.config.TAPConfiguration.newInstance; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Properties; + +import javax.servlet.ServletConfig; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServlet; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import tap.ServiceConnection; +import tap.TAPException; +import tap.resource.HomePage; +import tap.resource.TAP; +import tap.resource.TAPResource; + +/** + *

    HTTP servlet fully configured with a TAP configuration file.

    + * + *

    + * This configuration file may be specified in the initial parameter named {@link TAPConfiguration#TAP_CONF_PARAMETER} + * of this servlet inside the WEB-INF/web.xml file. If none is specified, the file {@link TAPConfiguration#DEFAULT_TAP_CONF_FILE} + * will be searched inside the directories of the classpath, and inside WEB-INF and META-INF. + *

    + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (04/2015) + * @since 2.0 + */ +public class ConfigurableTAPServlet extends HttpServlet { + private static final long serialVersionUID = 1L; + + /** TAP object representing the TAP service. */ + private TAP tap = null; + + @Override + public void init(final ServletConfig config) throws ServletException{ + // Nothing to do, if TAP is already initialized: + if (tap != null) + return; + + /* 1. GET THE FILE PATH OF THE TAP CONFIGURATION FILE */ + String tapConfPath = config.getInitParameter(TAP_CONF_PARAMETER); + if (tapConfPath == null || tapConfPath.trim().length() == 0) + tapConfPath = null; + //throw new ServletException("Configuration file path missing! You must set a servlet init parameter whose the name is \"" + TAP_CONF_PARAMETER + "\"."); + + /* 2. OPEN THE CONFIGURATION FILE */ + InputStream input = null; + // CASE: No file specified => search in the classpath for a file having the default name "tap.properties". + if (tapConfPath == null) + input = searchFile(DEFAULT_TAP_CONF_FILE, config); + else{ + File f = new File(tapConfPath); + // CASE: The given path matches to an existing local file. + if (f.exists()){ + try{ + input = new FileInputStream(f); + }catch(IOException ioe){ + throw new ServletException("Impossible to read the TAP configuration file (" + tapConfPath + ")!", ioe); + } + } + // CASE: The given path seems to be relative to the servlet root directory. + else + input = searchFile(tapConfPath, config); + } + // If no file has been found, cancel the servlet loading: + if (input == null) + throw new ServletException("Configuration file not found with the path: \"" + ((tapConfPath == null) ? DEFAULT_TAP_CONF_FILE : tapConfPath) + "\"! Please provide a correct file path in servlet init parameter (\"" + TAP_CONF_PARAMETER + "\") or put your configuration file named \"" + DEFAULT_TAP_CONF_FILE + "\" in a directory of the classpath or in WEB-INF or META-INF."); + + /* 3. PARSE IT INTO A PROPERTIES SET */ + Properties tapConf = new Properties(); + try{ + tapConf.load(input); + }catch(IOException ioe){ + throw new ServletException("Impossible to read the TAP configuration file (" + tapConfPath + ")!", ioe); + }finally{ + try{ + input.close(); + }catch(IOException ioe2){} + } + + /* 4. CREATE THE TAP SERVICE */ + ServiceConnection serviceConn = null; + try{ + // Create the service connection: + serviceConn = new ConfigurableServiceConnection(tapConf, config.getServletContext().getRealPath("")); + // Create all the TAP resources: + tap = new TAP(serviceConn); + }catch(Exception ex){ + tap = null; + if (ex instanceof TAPException) + throw new ServletException(ex.getMessage(), ex.getCause()); + else + throw new ServletException("Impossible to initialize the TAP service!", ex); + } + + /* 4Bis. SET THE HOME PAGE */ + String propValue = getProperty(tapConf, KEY_HOME_PAGE); + if (propValue != null){ + // If it is a class path, replace the current home page by an instance of this class: + if (isClassName(propValue)){ + try{ + tap.setHomePage(newInstance(propValue, KEY_HOME_PAGE, HomePage.class, new Class[]{TAP.class}, new Object[]{tap})); + }catch(TAPException te){ + throw new ServletException(te.getMessage(), te.getCause()); + } + } + // If it is a file URI (null, file inside WebContent, file://..., http://..., etc...): + else{ + // ...set the given URI: + tap.setHomePageURI(propValue); + // ...and its MIME type (if any): + propValue = getProperty(tapConf, KEY_HOME_PAGE_MIME_TYPE); + if (propValue != null) + tap.setHomePageMimeType(propValue); + } + } + + /* 5. SET ADDITIONAL TAP RESOURCES */ + propValue = getProperty(tapConf, KEY_ADD_TAP_RESOURCES); + if (propValue != null){ + // split all list items: + String[] lstResources = propValue.split(","); + for(String addRes : lstResources){ + addRes = addRes.trim(); + // ignore empty items: + if (addRes.length() > 0){ + try{ + // create an instance of the resource: + TAPResource newRes = newInstance(addRes, KEY_ADD_TAP_RESOURCES, TAPResource.class, new Class[]{TAP.class}, new Object[]{tap}); + if (newRes.getName() == null || newRes.getName().trim().length() == 0) + throw new TAPException("TAP resource name missing for the new resource \"" + addRes + "\"! The function getName() of the new TAPResource must return a non-empty and not NULL name. See the property \"" + KEY_ADD_TAP_RESOURCES + "\"."); + // add it into TAP: + tap.addResource(newRes); + }catch(TAPException te){ + throw new ServletException(te.getMessage(), te.getCause()); + } + } + } + } + + /* 6. DEFAULT SERVLET INITIALIZATION */ + super.init(config); + + /* 7. FINALLY MAKE THE SERVICE AVAILABLE */ + serviceConn.setAvailable(true, "TAP service available."); + } + + /** + * Search the given file name/path in the directories of the classpath, then inside WEB-INF and finally inside META-INF. + * + * @param filePath A file name/path. + * @param config Servlet configuration (containing also the context class loader - link with the servlet classpath). + * + * @return The input stream toward the specified file, or NULL if no file can be found. + * + * @since 2.0 + */ + protected final InputStream searchFile(String filePath, final ServletConfig config){ + InputStream input = null; + + // Try to search in the classpath (with just a file name or a relative path): + input = Thread.currentThread().getContextClassLoader().getResourceAsStream(filePath); + + // If not found, try searching in WEB-INF and META-INF (as this fileName is a file path relative to one of these directories): + if (input == null){ + if (filePath.startsWith("/")) + filePath = filePath.substring(1); + // ...try at the root of WEB-INF: + input = config.getServletContext().getResourceAsStream("/WEB-INF/" + filePath); + // ...and at the root of META-INF: + if (input == null) + input = config.getServletContext().getResourceAsStream("/META-INF/" + filePath); + } + + return input; + } + + @Override + public void destroy(){ + // Free all resources used by TAP: + if (tap != null){ + tap.destroy(); + tap = null; + } + super.destroy(); + } + + @Override + protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException{ + if (tap != null){ + try{ + tap.executeRequest(req, resp); + }catch(Throwable t){ + resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, t.getMessage()); + } + }else + resp.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, "TAP service not yet initialized!"); + } + +} diff --git a/src/tap/config/TAPConfiguration.java b/src/tap/config/TAPConfiguration.java new file mode 100644 index 0000000000000000000000000000000000000000..11934ed55bd9b8760ae462c9ca3083bd190c348f --- /dev/null +++ b/src/tap/config/TAPConfiguration.java @@ -0,0 +1,539 @@ +package tap.config; + +/* + * This file is part of TAPLibrary. + * + * TAPLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * TAPLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with TAPLibrary. If not, see . + * + * Copyright 2015 - Astronomisches Rechen Institut (ARI) + */ + +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.util.HashMap; +import java.util.Properties; + +import tap.ServiceConnection.LimitUnit; +import tap.TAPException; +import tap.TAPFactory; +import tap.backup.DefaultTAPBackupManager; + +/** + *

    Utility class gathering tool functions and properties' names useful to deal with a TAP configuration file.

    + * + *

    This class implements the Design Pattern "Utility": no instance of this class can be created, it can not be extended, + * and it must be used only thanks to its static classes and attributes.

    + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (04/2015) + * @since 2.0 + */ +public final class TAPConfiguration { + + /** Name of the initial parameter to set in the WEB-INF/web.xml file + * in order to specify the location and the name of the TAP configuration file to load. */ + public final static String TAP_CONF_PARAMETER = "tapconf"; + /** Default TAP configuration file. This file is research automatically + * if none is specified in the WEB-INF/web.xml initial parameter {@value #TAP_CONF_PARAMETER}. */ + public final static String DEFAULT_TAP_CONF_FILE = "tap.properties"; + + /* FILE MANAGER KEYS */ + /** Name/Key of the property setting the file manager to use in the TAP service. */ + public final static String KEY_FILE_MANAGER = "file_manager"; + /** Value of the property {@link #KEY_FILE_MANAGER} specifying a local file manager. */ + public final static String VALUE_LOCAL = "local"; + /** Default value of the property {@link #KEY_FILE_MANAGER}: {@value #DEFAULT_FILE_MANAGER}. */ + public final static String DEFAULT_FILE_MANAGER = VALUE_LOCAL; + /** Name/Key of the property setting the local root directory where all TAP files must be stored. + * This property is used only if {@link #KEY_FILE_MANAGER} is set to {@link #VALUE_LOCAL}. */ + public final static String KEY_FILE_ROOT_PATH = "file_root_path"; + /** Name/Key of the property indicating whether the jobs must be saved by user or not. + * If yes, there will be one directory per user. Otherwise, all jobs are backuped in the same directory + * (generally {@link #KEY_FILE_ROOT_PATH}). */ + public final static String KEY_DIRECTORY_PER_USER = "directory_per_user"; + /** Default value of the property {@link #KEY_DIRECTORY_PER_USER}: {@value #DEFAULT_DIRECTORY_PER_USER}. */ + public final static boolean DEFAULT_DIRECTORY_PER_USER = false; + /** Name/Key of the property indicating whether the user directories (in which jobs of the user are backuped) + * must be gathered in less directories. If yes, the groups are generally made using the alphabetic order. + * The idea is to reduce the number of apparent directories and to easier the research of a user directory. */ + public final static String KEY_GROUP_USER_DIRECTORIES = "group_user_directories"; + /** Default value of the property {@link #KEY_GROUP_USER_DIRECTORIES}: {@value #DEFAULT_GROUP_USER_DIRECTORIES}. */ + public final static boolean DEFAULT_GROUP_USER_DIRECTORIES = false; + /** Name/Key of the property specifying the default period (in seconds) while a job must remain on the server. + * This value is set automatically to any job whose the retention period has never been specified by the user. */ + public final static String KEY_DEFAULT_RETENTION_PERIOD = "default_retention_period"; + /** Name/Key of the property specifying the maximum period (in seconds) while a job can remain on the server. */ + public final static String KEY_MAX_RETENTION_PERIOD = "max_retention_period"; + /** Default value of the properties {@link #KEY_DEFAULT_RETENTION_PERIOD} and {@link #KEY_MAX_RETENTION_PERIOD}: + * {@value #DEFAULT_RETENTION_PERIOD}. */ + public final static int DEFAULT_RETENTION_PERIOD = 0; + + /* LOG KEYS */ + /** Name/Key of the property specifying the minimum type of messages (i.e. DEBUG, INFO, WARNING, ERROR, FATAL) + * that must be logged. By default all messages are logged...which is equivalent to set this property to "DEBUG". */ + public final static String KEY_MIN_LOG_LEVEL = "min_log_level"; + /** Name/Key of the property specifying the frequency of the log file rotation. + * By default the log rotation occurs every day at midnight. */ + public final static String KEY_LOG_ROTATION = "log_rotation"; + + /* UWS BACKUP */ + /** Name/Key of the property specifying the frequency (in milliseconds) of jobs backup. + * This property accepts three types of value: "never" (default), "user_action" (the backup of a job is done when + * it is modified), or a numeric positive value (expressed in milliseconds). */ + public final static String KEY_BACKUP_FREQUENCY = "backup_frequency"; + /** Value of the property {@link #KEY_BACKUP_FREQUENCY} indicating that jobs should never be backuped. */ + public final static String VALUE_NEVER = "never"; + /** Value of the property {@link #KEY_BACKUP_FREQUENCY} indicating that job backup should occur only when the user + * creates or modifies one of his jobs. This value can be used ONLY IF {@link #KEY_BACKUP_BY_USER} is "true". */ + public final static String VALUE_USER_ACTION = "user_action"; + /** Default value of the property {@link #KEY_BACKUP_FREQUENCY}: {@link #DEFAULT_BACKUP_FREQUENCY}. */ + public final static long DEFAULT_BACKUP_FREQUENCY = DefaultTAPBackupManager.MANUAL; // = "never" => no UWS backup manager + /** Name/Key of the property indicating whether there should be one backup file per user or one file for all. */ + public final static String KEY_BACKUP_BY_USER = "backup_by_user"; + /** Default value of the property {@link #KEY_BACKUP_BY_USER}: {@value #DEFAULT_BACKUP_BY_USER}. + * This property can be enabled only if a user identification method is provided. */ + public final static boolean DEFAULT_BACKUP_BY_USER = false; + + /* ASYNCHRONOUS JOBS */ + /** Name/Key of the property specifying the maximum number of asynchronous jobs that can run simultaneously. + * A negative or null value means "no limit". */ + public final static String KEY_MAX_ASYNC_JOBS = "max_async_jobs"; + /** Default value of the property {@link #KEY_MAX_ASYNC_JOBS}: {@value #DEFAULT_MAX_ASYNC_JOBS}. */ + public final static int DEFAULT_MAX_ASYNC_JOBS = 0; + + /* EXECUTION DURATION */ + /** Name/Key of the property specifying the default execution duration (in milliseconds) set automatically to a job + * if none has been specified by the user. */ + public final static String KEY_DEFAULT_EXECUTION_DURATION = "default_execution_duration"; + /** Name/Key of the property specifying the maximum execution duration (in milliseconds) that can be set on a job. */ + public final static String KEY_MAX_EXECUTION_DURATION = "max_execution_duration"; + /** Default value of the property {@link #KEY_DEFAULT_EXECUTION_DURATION} and {@link #KEY_MAX_EXECUTION_DURATION}: {@value #DEFAULT_EXECUTION_DURATION}. */ + public final static int DEFAULT_EXECUTION_DURATION = 0; + + /* DATABASE KEYS */ + /** Name/Key of the property specifying the database access method to use. */ + public final static String KEY_DATABASE_ACCESS = "database_access"; + /** Value of the property {@link #KEY_DATABASE_ACCESS} to select the simple JDBC method. */ + public final static String VALUE_JDBC = "jdbc"; + /** Value of the property {@link #KEY_DATABASE_ACCESS} to access the database using a DataSource stored in JNDI. */ + public final static String VALUE_JNDI = "jndi"; + /** Name/Key of the property specifying the ADQL to SQL translator to use. */ + public final static String KEY_SQL_TRANSLATOR = "sql_translator"; + /** Value of the property {@link #KEY_SQL_TRANSLATOR} to select a PostgreSQL translator (no support for geometrical functions). */ + public final static String VALUE_POSTGRESQL = "postgres"; + /** Value of the property {@link #KEY_SQL_TRANSLATOR} to select a PgSphere translator. */ + public final static String VALUE_PGSPHERE = "pgsphere"; + /** Name/Key of the property specifying by how many rows the library should fetch a query result from the database. + * This is the fetch size for to apply for synchronous queries. */ + public final static String KEY_SYNC_FETCH_SIZE = "sync_fetch_size"; + /** Default value of the property {@link #KEY_SYNC_FETCH_SIZE}: {@value #DEFAULT_SYNC_FETCH_SIZE}. */ + public final static int DEFAULT_SYNC_FETCH_SIZE = 10000; + /** Name/Key of the property specifying by how many rows the library should fetch a query result from the database. + * This is the fetch size for to apply for asynchronous queries. */ + public final static String KEY_ASYNC_FETCH_SIZE = "async_fetch_size"; + /** Default value of the property {@link #KEY_ASYNC_FETCH_SIZE}: {@value #DEFAULT_ASYNC_FETCH_SIZE}. */ + public final static int DEFAULT_ASYNC_FETCH_SIZE = 100000; + /** Name/Key of the property specifying the name of the DataSource into the JDNI. */ + public final static String KEY_DATASOURCE_JNDI_NAME = "datasource_jndi_name"; + /** Name/Key of the property specifying the full class name of the JDBC driver. + * Alternatively, a shortcut the most known JDBC drivers can be used. The list of these drivers is stored + * in {@link #VALUE_JDBC_DRIVERS}. */ + public final static String KEY_JDBC_DRIVER = "jdbc_driver"; + /** List of the most known JDBC drivers. For the moment this list contains 4 drivers: + * oracle ("oracle.jdbc.OracleDriver"), postgresql ("org.postgresql.Driver"), mysql ("com.mysql.jdbc.Driver") + * and sqlite ("org.sqlite.JDBC"). */ + public final static HashMap VALUE_JDBC_DRIVERS = new HashMap(4); + static{ + VALUE_JDBC_DRIVERS.put("oracle", "oracle.jdbc.OracleDriver"); + VALUE_JDBC_DRIVERS.put("postgresql", "org.postgresql.Driver"); + VALUE_JDBC_DRIVERS.put("mysql", "com.mysql.jdbc.Driver"); + VALUE_JDBC_DRIVERS.put("sqlite", "org.sqlite.JDBC"); + } + /** Name/Key of the property specifying the JDBC URL of the database to access. */ + public final static String KEY_JDBC_URL = "jdbc_url"; + /** Name/Key of the property specifying the database user name to use to access the database. */ + public final static String KEY_DB_USERNAME = "db_username"; + /** Name/Key of the property specifying the password of the database user. */ + public final static String KEY_DB_PASSWORD = "db_password"; + + /* METADATA KEYS */ + /** Name/Key of the property specifying where the list of schemas, tables and columns and their respective metadata + * is provided. */ + public final static String KEY_METADATA = "metadata"; + /** Value of the property {@link #KEY_METADATA} which indicates that metadata are provided in an XML file, whose the + * local path is given by the property {@link #KEY_METADATA_FILE}. */ + public final static String VALUE_XML = "xml"; + /** Value of the property {@link #KEY_METADATA} which indicates that metadata are already in the TAP_SCHEMA of the database. */ + public final static String VALUE_DB = "db"; + /** Name/Key of the property specifying the local file path of the XML file containing the TAP metadata to load. */ + public final static String KEY_METADATA_FILE = "metadata_file"; + + /* HOME PAGE KEY */ + /** Name/Key of the property specifying the TAP home page to use. + * It can be a file, a URL or a class. If null, the default TAP home page of the library is used. + * By default the default library home page is used. */ + public final static String KEY_HOME_PAGE = "home_page"; + /** Name/Key of the property specifying the MIME type of the set home page. + * By default, "text/html" is set. */ + public final static String KEY_HOME_PAGE_MIME_TYPE = "home_page_mime_type"; + + /* PROVIDER KEYS */ + /** Name/Key of the property specifying the name of the organization/person providing the TAP service. */ + public final static String KEY_PROVIDER_NAME = "provider_name"; + /** Name/Key of the property specifying the description of the TAP service. */ + public final static String KEY_SERVICE_DESCRIPTION = "service_description"; + + /* UPLOAD KEYS */ + /** Name/Key of the property indicating whether the UPLOAD feature must be enabled or not. + * By default, this feature is disabled. */ + public final static String KEY_UPLOAD_ENABLED = "upload_enabled"; + /** Name/Key of the property specifying the default limit (in rows or bytes) on the uploaded VOTable(s). */ + public final static String KEY_DEFAULT_UPLOAD_LIMIT = "upload_default_db_limit"; + /** Name/Key of the property specifying the maximum limit (in rows or bytes) on the uploaded VOTable(s). */ + public final static String KEY_MAX_UPLOAD_LIMIT = "upload_max_db_limit"; + /** Name/Key of the property specifying the maximum size of all VOTable(s) uploaded in a query. */ + public final static String KEY_UPLOAD_MAX_FILE_SIZE = "upload_max_file_size"; + /** Default value of the property {@link #KEY_UPLOAD_MAX_FILE_SIZE}: {@value #DEFAULT_UPLOAD_MAX_FILE_SIZE}. */ + public final static int DEFAULT_UPLOAD_MAX_FILE_SIZE = Integer.MAX_VALUE; + + /* OUTPUT KEYS */ + /** Name/Key of the property specifying the list of all result output formats to support. + * By default all formats provided by the library are allowed. */ + public final static String KEY_OUTPUT_FORMATS = "output_formats"; + /** Value of the property {@link #KEY_OUTPUT_FORMATS} which select all formats that the library can provide. */ + public final static String VALUE_ALL = "ALL"; + /** Value of the property {@link #KEY_OUTPUT_FORMATS} which select a VOTable format. + * The format can be parameterized with the VOTable version and serialization. */ + public final static String VALUE_VOTABLE = "votable"; + /** Value of the property {@link #KEY_OUTPUT_FORMATS} which select a VOTable format. + * The format can be parameterized with the VOTable version and serialization. + * This value is just an alias of {@link #VALUE_VOTABLE}. */ + public final static String VALUE_VOT = "vot"; + /** Value of the property {@link #KEY_OUTPUT_FORMATS} which select a FITS format. */ + public final static String VALUE_FITS = "fits"; + /** Value of the property {@link #KEY_OUTPUT_FORMATS} which select a JSON format. */ + public final static String VALUE_JSON = "json"; + /** Value of the property {@link #KEY_OUTPUT_FORMATS} which select an HTML format. */ + public final static String VALUE_HTML = "html"; + /** Value of the property {@link #KEY_OUTPUT_FORMATS} which select a human-readable table. */ + public final static String VALUE_TEXT = "text"; + /** Value of the property {@link #KEY_OUTPUT_FORMATS} which select a CSV format. */ + public final static String VALUE_CSV = "csv"; + /** Value of the property {@link #KEY_OUTPUT_FORMATS} which select a TSV format. */ + public final static String VALUE_TSV = "tsv"; + /** Value of the property {@link #KEY_OUTPUT_FORMATS} which select a Separated-Value format. + * This value must be parameterized with the separator to use. */ + public final static String VALUE_SV = "sv"; + /** Name/Key of the property specifying the number of result rows that should be returned if none is specified by the user. */ + public final static String KEY_DEFAULT_OUTPUT_LIMIT = "output_default_limit"; + /** Name/Key of the property specifying the maximum number of result rows that can be returned by the TAP service. */ + public final static String KEY_MAX_OUTPUT_LIMIT = "output_max_limit"; + + /* USER IDENTIFICATION */ + /** Name/Key of the property specifying the user identification method to use. + * None is implemented by the library, so a class must be provided as value of this property. */ + public final static String KEY_USER_IDENTIFIER = "user_identifier"; + + /* ADQL RESTRICTIONS */ + /** Name/Key of the property specifying the list of all allowed coordinate systems that can be used in ADQL queries. + * By default, all are allowed, but no conversion is done by the library. */ + public final static String KEY_COORD_SYS = "coordinate_systems"; + /** Name/Key of the property specifying the list of all ADQL geometrical function that can be used in ADQL queries. + * By default, all are allowed. */ + public final static String KEY_GEOMETRIES = "geometries"; + /** Value of {@link #KEY_COORD_SYS} and {@link #KEY_GEOMETRIES} that forbid all possible values. */ + public final static String VALUE_NONE = "NONE"; + /** Name/Key of the property that lets declare all User Defined Functions that must be allowed in ADQL queries. + * By default, all unknown functions are rejected. This default behavior can be totally reversed by using the + * value {@link #VALUE_ANY} */ + public final static String KEY_UDFS = "udfs"; + /** Value of {@link #KEY_UDFS} allowing any unknown function in ADQL queries. Those functions will be considered as UDFs + * and will be translated into SQL exactly as they are written in ADQL. */ + public final static String VALUE_ANY = "ANY"; + + /* ADDITIONAL TAP RESOURCES */ + /** Name/Key of the property specifying a list of resources to add to the TAP service (e.g. a ADQL query validator). + * By default, this list if empty ; only the default TAP resources exist. */ + public final static String KEY_ADD_TAP_RESOURCES = "additional_resources"; + + /* CUSTOM FACTORY */ + /** Name/Key of the property specifying the {@link TAPFactory} class to use instead of the default {@link ConfigurableTAPFactory}. + * Setting a value to this property could disable several properties of the TAP configuration file. */ + public final static String KEY_TAP_FACTORY = "tap_factory"; + + /** No instance of this class should be created. */ + private TAPConfiguration(){} + + /** + *

    Read the asked property from the given Properties object.

    + *
      + *
    • The returned property value is trimmed (no space at the beginning and at the end of the string).
    • + *
    • If the value is empty (length=0), NULL is returned.
    • + *
    + * + * @param prop List of property + * @param key Property whose the value is requested. + * + * @return Return property value. + */ + public final static String getProperty(final Properties prop, final String key){ + if (prop == null) + return null; + + String value = prop.getProperty(key); + if (value != null){ + value = value.trim(); + return (value.length() == 0) ? null : value; + } + + return value; + } + + /** + * Test whether a property value is a class name. + * Expected syntax: a non-empty string surrounded by brackets ('{' and '}'). + * + * Note: The class name itself is not checked! + * + * @param value Property value. + * + * @return true if the given value is formatted as a class name, false otherwise. + */ + public final static boolean isClassName(final String value){ + return (value != null && value.length() > 2 && value.charAt(0) == '{' && value.charAt(value.length() - 1) == '}'); + } + + /** + * Fetch the class object corresponding to the class name provided between brackets in the given value. + * + * @param value Value which is supposed to contain the class name between brackets (see {@link #isClassName(String)} for more details) + * @param propertyName Name of the property associated with the parameter "value". + * @param expectedType Type of the class expected to be returned ; it is also the type which parameterizes this function: C. + * + * @return The corresponding Class object. + * + * @throws TAPException If the class name is incorrect or if its type is not compatible with the parameterized type C (represented by the parameter "expectedType"). + * + * @see #isClassName(String) + */ + @SuppressWarnings("unchecked") + public final static < C > Class fetchClass(final String value, final String propertyName, final Class expectedType) throws TAPException{ + if (!isClassName(value)) + return null; + + String classPath = value.substring(1, value.length() - 1).trim(); + if (classPath.isEmpty()) + return null; + + try{ + Class classObject = (Class)Class.forName(classPath); + if (!expectedType.isAssignableFrom(classObject)) + throw new TAPException("The class specified by the property \"" + propertyName + "\" (" + value + ") is not implementing " + expectedType.getName() + "."); + else + return classObject; + }catch(ClassNotFoundException cnfe){ + throw new TAPException("The class specified by the property \"" + propertyName + "\" (" + value + ") can not be found."); + }catch(ClassCastException cce){ + throw new TAPException("The class specified by the property \"" + propertyName + "\" (" + value + ") is not implementing " + expectedType.getName() + "."); + } + } + + /** + *

    Create an instance of the specified class. The class name is expected to be surrounded by {} in the given value.

    + * + *

    The instance is created using the empty constructor of the specified class.

    + * + * @param propValue Value which is supposed to contain the class name between brackets (see {@link #isClassName(String)} for more details) + * @param propName Name of the property associated with the parameter "value". + * @param expectedType Type of the class expected to be returned ; it is also the type which parameterizes this function: C. + * + * @return The corresponding instance. + * + * @throws TAPException If the class name is incorrect + * or if its type is not compatible with the parameterized type C (represented by the parameter "expectedType") + * or if the specified class has no empty constructor + * or if an error occurred while calling this constructor. + * + * @see #isClassName(String) + * @see #fetchClass(String, String, Class) + */ + public final static < C > C newInstance(final String propValue, final String propName, final Class expectedType) throws TAPException{ + return newInstance(propValue, propName, expectedType, null, null); + } + + /** + *

    Create an instance of the specified class. The class name is expected to be surrounded by {} in the given value.

    + * + *

    IMPORTANT: + * The instance is created using the constructor whose the declaration matches exactly with the given list of parameter types. + * The number and types of given parameters MUST match exactly to the list of parameter types. + *

    + * + * @param propValue Value which is supposed to contain the class name between brackets (see {@link #isClassName(String)} for more details) + * @param propName Name of the property associated with the parameter "value". + * @param expectedType Type of the class expected to be returned ; it is also the type which parameterizes this function: C. + * @param pTypes List of each constructor parameter type. Each type MUST be exactly the type declared in the class constructor to select. NULL or empty array if no parameter. + * @param parameters List of all constructor parameters. The number of object MUST match exactly the number of classes provided in the parameter pTypes. NULL or empty array if no parameter. + * + * @return The corresponding instance. + * + * @throws TAPException If the class name is incorrect + * or if its type is not compatible with the parameterized type C (represented by the parameter "expectedType") + * or if the constructor with the specified parameters can not be found + * or if an error occurred while calling this constructor. + * + * @see #isClassName(String) + * @see #fetchClass(String, String, Class) + */ + public final static < C > C newInstance(final String propValue, final String propName, final Class expectedType, final Class[] pTypes, final Object[] parameters) throws TAPException{ + // Ensure the given name is a class name specification: + if (!isClassName(propValue)) + throw new TAPException("Class name expected for the property \"" + propName + "\" instead of: \"" + propValue + "\"! The specified class must extend/implement " + expectedType.getName() + "."); + + Class classObj = null; + try{ + + // Fetch the class object: + classObj = fetchClass(propValue, propName, expectedType); + + // Get a constructor matching the given parameters list: + Constructor constructor = classObj.getConstructor((pTypes == null) ? new Class[0] : pTypes); + + // Finally create a new instance: + return constructor.newInstance((parameters == null) ? new Object[0] : parameters); + + }catch(NoSuchMethodException e){ + // List parameters' type: + StringBuffer pTypesStr = new StringBuffer(); + for(int i = 0; i < pTypes.length; i++){ + if (pTypesStr.length() > 0) + pTypesStr.append(", "); + if (pTypes[i] == null) + pTypesStr.append("NULL"); + pTypesStr.append(pTypes[i].getName()); + } + // Throw the error: + throw new TAPException("Missing constructor " + classObj.getName() + "(" + pTypesStr.toString() + ")! See the value \"" + propValue + "\" of the property \"" + propName + "\"."); + }catch(InstantiationException ie){ + throw new TAPException("Impossible to create an instance of an abstract class: \"" + classObj.getName() + "\"! See the value \"" + propValue + "\" of the property \"" + propName + "\"."); + }catch(InvocationTargetException ite){ + if (ite.getCause() != null){ + if (ite.getCause() instanceof TAPException) + throw (TAPException)ite.getCause(); + else + throw new TAPException(ite.getCause()); + }else + throw new TAPException(ite); + }catch(TAPException te){ + throw te; + }catch(Exception ex){ + throw new TAPException("Impossible to create an instance of " + expectedType.getName() + " as specified in the property \"" + propName + "\": \"" + propValue + "\"!", ex); + } + } + + /** + *

    Lets parsing a limit (for output, upload, ...) with its numeric value and its unit.

    + *

    + * Here is the expected syntax: num_val[unit]. + * Where unit is optional and should be one of the following values: r or R, B, kB, MB, GB. + * If the unit is not specified, it is set by default to ROWS. + *

    + *

    Note: If the value is strictly less than 0 (whatever is the unit), the returned value will be -1.

    + * + * @param value Property value (must follow the limit syntax: num_val[unit] ; ex: 20kB or 2000 (for 2000 rows)). + * @param propertyName Name of the property which specify the limit. + * @param areBytesAllowed Tells whether the unit BYTES is allowed. If not and a BYTES unit is encountered, then an exception is thrown. + * + * @return An array with always 2 items: [0]=numeric value (of type Integer), [1]=unit (of type {@link LimitUnit}). + * + * @throws TAPException If the syntax is incorrect or if a not allowed unit has been used. + */ + public final static Object[] parseLimit(String value, final String propertyName, final boolean areBytesAllowed) throws TAPException{ + // Remove any whitespace inside or outside the numeric value and its unit: + if (value != null) + value = value.replaceAll("\\s", ""); + + // If empty value, return an infinite limit: + if (value == null || value.length() == 0) + return new Object[]{-1,LimitUnit.rows}; + + // A. Parse the string from the end in order to extract the unit part. + // The final step of the loop is the extraction of the numeric value, when the first digit is encountered. + int numValue = -1; + LimitUnit unit; + StringBuffer buf = new StringBuffer(); + for(int i = value.length() - 1; i >= 0; i--){ + // if a digit, extract the numeric value: + if (value.charAt(i) >= '0' && value.charAt(i) <= '9'){ + try{ + numValue = Integer.parseInt(value.substring(0, i + 1)); + break; + }catch(NumberFormatException nfe){ + throw new TAPException("Integer expected for the property " + propertyName + " for the substring \"" + value.substring(0, i + 1) + "\" of the whole value: \"" + value + "\"!"); + } + } + // if a character, store it for later processing: + else + buf.append(value.charAt(i)); + + } + + // B. Parse the unit. + // if no unit, set ROWS by default: + if (buf.length() == 0) + unit = LimitUnit.rows; + // if the unit is too long, throw an exception: + else if (buf.length() > 2) + throw new TAPException("Unknown limit unit (" + buf.reverse().toString() + ") for the property " + propertyName + ": \"" + value + "\"!"); + // try to identify the unit: + else{ + // the base unit: bytes or rows + switch(buf.charAt(0)){ + case 'B': + if (!areBytesAllowed) + throw new TAPException("BYTES unit is not allowed for the property " + propertyName + " (" + value + ")!"); + unit = LimitUnit.bytes; + break; + case 'r': + case 'R': + unit = LimitUnit.rows; + break; + default: + throw new TAPException("Unknown limit unit (" + buf.reverse().toString() + ") for the property " + propertyName + ": \"" + value + "\"!"); + } + // the 10-power of the base unit, if any: + if (buf.length() > 1){ + if (unit == LimitUnit.bytes){ + switch(buf.charAt(1)){ + case 'k': + unit = LimitUnit.kilobytes; + break; + case 'M': + unit = LimitUnit.megabytes; + break; + case 'G': + unit = LimitUnit.gigabytes; + break; + default: + throw new TAPException("Unknown limit unit (" + buf.reverse().toString() + ") for the property " + propertyName + ": \"" + value + "\"!"); + } + }else + throw new TAPException("Unknown limit unit (" + buf.reverse().toString() + ") for the property " + propertyName + ": \"" + value + "\"!"); + } + } + + return new Object[]{((numValue < 0) ? -1 : numValue),unit}; + } + +} diff --git a/src/tap/config/gums_table.txt b/src/tap/config/gums_table.txt new file mode 100644 index 0000000000000000000000000000000000000000..9ec49684286687515fa350dd5fc6ecf65b301c53 --- /dev/null +++ b/src/tap/config/gums_table.txt @@ -0,0 +1,49 @@ +Name|DBType|JDBCType|TAPType|VOTableType +id|character varying(19)|varchar(19)|| +ra2|numeric(14,10)|numeric(14,10)|| +dec2|numeric(14,10)|numeric(14,10)|| +vmag|real|float4|| +gmag|real|float4|| +gbmag|real|float4|| +grmag|real|float4|| +gsmag|real|float4|| +ra|numeric(14,10)|numeric(14,10)|| +deg|numeric(14,10)|numeric(14,10)|| +r|double precision|float8|| +pmra|double precision|float8|| +pmde|double precision|float8|| +rv|double precision|float8|| +v_i|real|float4|| +av|real|float4|| +age|real|float4|| +alphafe|real|float4|| +balb|real|float4|| +e|real|float4|| +feh|real|float4|| +fi|smallint|int2|| +galb|real|float4|| +fm|smallint|int2|| +host|smallint|int2|| +i|real|float4|| +logg|real|float4|| +Omega|real|float4|| +mass|double precision|float8|| +mbol|real|float4|| +nc|smallint|int2|| +nt|smallint|int2|| +p|double precision|float8|| +omega|real|float4|| +t0|double precision|float8|| +phase|real|float4|| +pop|smallint|int2|| +beenv|double precision|float8|| +radius|double precision|float8|| +a|double precision|float8|| +teff|integer|int4|| +vamp|real|float4|| +vper|double precision|float8|| +vphase|real|float4|| +vtype|character varying(4)|varchar(4)|| +vsini|real|float4|| +recno|integer|int4|| +coord|spoint|spoint|| diff --git a/src/tap/config/tap_configuration_file.html b/src/tap/config/tap_configuration_file.html new file mode 100644 index 0000000000000000000000000000000000000000..0bebdc978d0b190839d13f9c9b1ae186a4c58cae --- /dev/null +++ b/src/tap/config/tap_configuration_file.html @@ -0,0 +1,720 @@ + + + + + TAP configuration file + + + + +

    TAP Configuration File

    +

    + All properties listed in the below table are all the possible TAP configuration properties. + Some of them are mandatory. If one of these properties is missing, the TAP Service will not able to start: + an error will be displayed immediately in the application server log and a HTTP 503 error will be sent when accessing the TAP URL. +

    +

    Besides, you should know that any property key not listed in this table will be ignored without error or warning message.

    +

    + However, any not allowed property value will generate a warning message in the application server log and the default value will be kept. + Thus, the TAP Service will be started and available but the desired configuration value will not be set. So, you should take a look + at the application server log every times you start the TAP Service! +

    + +

    Here is an empty minimum TAP configuration file: tap_min.properties and a complete one: tap_full.properties.

    + + +

    Important note: Any limit value is an integer and so can be at most: 231-1 bytes/rows = 2147483647B/R (or also for the byte unit: = 2147483kB = 2147MB = 2GB). + Otherwise, you should use the null value 0 to raise the limit constraint.

    + +

    Legend: M means that the property is mandatory. If nothing is written for the second column, the property is optional.

    + + / mandatory properties +

    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    PropertyTypeDescriptionExample
    General
    home_pagetext +

    This property lets set a custom home page. 4 different kinds of value are accepted:

    +
      +
    • nothing (default): the default home page provided by the library (just a simple HTML page displaying a list of all available TAP resources).
    • +
    • name or relative path of a file: this method MUST be chosen if the new home page is a JSP file. This file MUST be inside the directory WebContent of your web application.
    • +
    • a URI starting with file://: in this method the local file pointed by the URI will be merely returned when the home page will be requested.
    • +
    • a URL: here, a redirection toward this URL will be made at each request on the home page
    • +
    • a class name: the class name of an extension of tap.resource.HomePage which must replace the default home page resource. This class MUST have at least one constructor with exactly one parameter not NULL of type tap.resource.TAP.
    • +
    +

    By default, the default home page provided by the library is used.

    +
    • my_tap_homepage.jsp
    • jsp/my_tap_homepage.jsp
    • file:///home/foo/customHomePage.html
    • http://...
    • {aPackage.NewHomePage}
    home_page_mime_typetext +

    MIME type of the service home page.

    +

    This property is used only if the specified "home_page" is a local file path (i.e. if "home_page=file://...").

    +

    If no value is provided "text/html" will be set by default.

    +

    Default: text/html

    +
    • text/html (default)
    • text/plain
    • application/xml
    Provider
    provider_nametextName of the provider of the TAP Service.
    service_descriptiontextDescription of the TAP Service.
    Database (only if tap_factory = ø)
    database_accesstext +

    Method to use in order to create database connections.

    +

    Only two values are supported:

    +
      +
    • jndi: database connections will be supplied by a Datasource whose the JNDI name must be given. This method may propose connection pooling in function of the datasource configuration.
    • +
    • jdbc: the library will create itself connections when they will be needed thanks to the below JDBC parameters. This method does not propose any connection pooling.
    • +
    +
    • jdbc
    • jndi
    sql_translatortext +

    The translator to use in order to translate ADQL to a SQL compatible with the used DBMS and its spatial extension.

    +

    The TAP library supports only Postgresql (without spatial extension) and PgSphere for the moment. But you can provide your own SQL translator + (even if it does not have spatial features), by providing the name of a class (within brackets: {...}) that implements ADQLTranslator and which have at least an empty constructor.

    +
    • postgres
    • pgsphere
    • {apackage.MyADQLTranslator}
    sync_fetch_sizeinteger +

    Size of result blocks to fetch from the database when a ADQL query is executed in Synchronous mode.

    +

    Rather than fetching a query result in a whole, it may be possible to specify to the database + that results may be retrieved by blocks whose the size can be specified with this property. + If supported by the DBMS and the JDBC driver, this feature may help sparing memory and avoid + too much waiting time from the TAP /sync users (and thus, avoiding some HTTP client timeouts).

    +

    A negative or null value means that the default value of the JDBC driver will be used. Generally, + it means that the database must wait to have collected all data before sending them to the library.

    +

    Default: sync_fetch_size=10000

    +
    • 10000 (default)
    • 0 (wait for the the whole result)
    • 100000
    async_fetch_sizeinteger +

    Size of result blocks to fetch from the database when an ADQL query is executed in Asynchronous mode.

    +

    Rather than fetching a query result in a whole, it may be possible to specify to the database + that results may be retrieved by blocks whose the size can be specified with this property. + If supported by the DBMS and the JDBC driver, this feature may help sparing memory.

    +

    A negative or null value means that the default value of the JDBC driver will be used. Generally, + it means that the database must wait to have collected all data before sending them to the library.

    +

    Default: async_fetch_size=100000

    +
    • 100000 (default)
    • 0 (wait for the the whole result)
    • 1000000
    ⤷ JNDI datasource (only if database_access=jndi)
    datasource_jndi_nametext +

    JNDI name of the datasource. It should be defined in the web application (e.g. in the META-INF/context.xml file in tomcat).

    +
    • jdbc/postgres
    • jdbc/mydatasource
    • mydatasource
    ⤷ JDBC parameters (only if database_access=jdbc)
    jdbc_drivertext +

    JDBC driver path. By default, it is guessed in function of the database name provided + in the jdbc_url property. It MUST be provided if another DBMS is used or if the JDBC driver path + does not match the following ones:

    +
      +
    • Oracle : oracle.jdbc.OracleDriver
    • +
    • PostgreSQL: org.postgresql.Driver
    • +
    • MySQL : com.mysql.jdbc.Driver
    • +
    • SQLite : org.sqlite.JDBC
    • +
    +
    oracle.jdbc.driver.OracleDriver
    jdbc_urltext +

    It must be a JDBC driver URL.

    +

    Note: The username, password or other parameters may be included in it, but in this case, the corresponding properties + should leave empty or not provided at all.

    +
    • jdbc:postgresql:mydb
    • jdbc:postgresql://myserver:1234/mydb
    • jdbc:sqlite:Database.db
    db_usernametext +

    Mandatory if the username is not already provided in jdbc_url

    +

    Username used to access to the database.

    +
    db_passwordtext +

    Mandatory if the password is not already provided in jdbc_url

    +

    Password used by db_username to access to the database.

    +

    Warning: No password encryption can be done in this configuration file for the moment.

    +
    Metadata
    metadatatext +

    Define the way the library must get the list of all schemas, tables and columns to publish and all their metadata (e.g. utype, description, type, ...)

    +

    In its current state, the library proposes three methods:

    +
      +
    1. Parse a TableSet XML document and load its content into the database schema TAP_SCHEMA (note: this schema is first erased and rebuilt by the library).
    2. +
    3. Get all metadata from the database schema TAP_SCHEMA.
    4. +
    5. Build yourself the metadata of your service by creating an extension of tap.metadata.TAPMetadata. This extension must have either an empty constructor + or a constructor with exactly 3 parameters of type UWSFileManager, TAPFactory and TAPLog ; if both constructor are provided, only the one with parameters will be used.
    6. +
    +
    • xml
    • db
    • {apackage.MyTAPMetadata}
    +
    metadata_filetext +

    Mandatory if the value of "metadata" is "xml".

    +

    Local file path to the TableSet XML document.

    +

    The XML document must implement the schema TableSet defined by VODataService.

    +

    The file path must be either an absolute local file path or a file path relative to WebContent + (i.e. the web application directory in which there are WEB-INF and META-INF).

    +
    • /home/foo/my_metadata.xml
    • my_metadata.xml
    • WEB-INF/my_metadata.xml
    Files
    file_managertext +

    Type of the file manager.

    +

    Accepted values are: local (to manage files on the local system). + You can also add another way to manage files by providing the name (within brackets: {...}) of a class implementing TAPFileManager and having at least one constructor with only a java.util.Properties parameter.

    +
    • local
    • {apackage.MyTAPFileManager}
    file_root_pathtext +

    Local file path of the directory in which all TAP files (logs, errors, job results, backup, ...) must be.

    +

    The file path must be either an absolute local directory path or a directory path relative to WebContent + (i.e. the web application directory in which there are WEB-INF and META-INF).

    +
    • /home/my_home_dir/tapFiles
    • tapFiles
    • WEB-INF/tapFiles
    directory_per_userboolean +

    Tells whether a directory should be created for each user. If yes, the user directory will be named with the user ID. In this directory, there will be error files, job results + and it may be the backup file of the user.

    +

    Default: true

    +
    • true (default)
    • false
    group_user_directoriesboolean +

    Tells whether user directories must be grouped. If yes, directories are grouped by the first letter found in the user ID.

    +

    Default: false

    +
    • true
    • false (default)
    default_retention_periodinteger +

    The default period (in seconds) to keep query results. The prefix "default" means here that this value is put by default by the TAP Service + if the client does not provide a value for it.

    +

    The default period MUST be less or equals to the maximum retention period. If this rule is not respected, the default retention period is set immediately + to the maximum retention period.

    +

    A negative or null value means there is no restriction on the default retention period: job results will be kept forever. Float values are not allowed.

    +

    By default query results are kept forever: default_retention_period=0.

    86400 (1 day)
    max_retention_periodinteger +

    The maximum period (in seconds) to keep query results. The prefix "max" means here that the client can not set a retention period greater than this one.

    +

    The maximum period MUST be greater or equals to the default retention period. If this rule is not respected, the default retention period is set immediately + to the maximum retention period.

    +

    A negative or null value means there is no restriction on the maximum retention period: the job results will be kept forever. Float values are not allowed.

    +

    Default: max_retention_period=0 (results kept for ever)

    604800 (1 week)
    Log files
    min_log_leveltext +

    Minimum level that a message must have in order to be logged.

    +

    5 possible values:

    p> +
      +
    • DEBUG: every messages are logged.
    • +
    • INFO: every messages EXCEPT DEBUG are logged.
    • +
    • WARNING: every messages EXCEPT DEBUG and INFO are logged.
    • +
    • ERROR: only ERROR and FATAL messages are logged.
    • +
    • FATAL: only FATAL messages are logged.
    • +
    +

    Default: DEBUG (every messages are logged)

    +
    • DEBUG
    • INFO
    • WANRING
    • ERROR
    • FATAL
    log_rotationtext +

    Frequency of the log file rotation. That's to say, logs will be written in a new file after this period. This avoid having too big log files. + Old log files are renamed so that highlighting its logging period.

    +

    The frequency string must respect the following syntax:

    +
      +
    • 'D' hh mm: daily schedule at hh:mm
    • +
    • 'W' dd hh mm: weekly schedule at the given day of the week (1:sunday, 2:monday, ..., 7:saturday) at hh:mm
    • +
    • 'M' dd hh mm: monthly schedule at the given day of the month at hh:mm
    • +
    • 'h' mm: hourly schedule at the given minute
    • +
    • 'm': scheduled every minute (for completness :-))
    • +
    +

    Where: hh = integer between 0 and 23, mm = integer between 0 and 59, dd (for 'W') = integer between 1 and 7 (1:sunday, 2:monday, ..., 7:saturday), + dd (for 'M') = integer between 1 and 31.

    +

    Warning: + The frequency type is case sensitive! Then you should particularly pay attention at the case + when using the frequency types 'M' (monthly) and 'm' (every minute). +

    +

    Default: D 0 0 (daily at midnight)

    +
    • D 6 30
    • W 2 6 30
    • M 2 6 30
    • H 10
    • m
    UWS Backup (only if tap_factory = ø)
    backup_frequencytext or integer +

    Frequency at which the UWS service (that's to say, all its users and jobs) must be backuped.

    +

    Allowed values are: never (no backup will never be done), user_action (each time a user does a writing action, like creating or execution a job), a time (must be positive and not null) in milliseconds.

    +

    The value user_action can be used ONLY IF backup_mode=true.

    +

    Default: backup_frequency=never (no backup)

    +
    • never (default)
    • user_action
    • 3600000 (1 hour)
    backup_by_usertext +

    Tells whether the backup must be one file for every user (false), or one file for each user (true). This second option should be chosen if your TAP Service is organizing its files by user directories ; see the property directory_per_user.

    +

    This option can be enabled ONLY IF a user identification method is provided ; see property user_identifier.

    +

    Default: false

    +
    • false (default)
    • true
    Asynchronous jobs management
    max_async_jobsinteger +

    Maximum number of asynchronous jobs that can run simultaneously.

    +

    A negative or null value means there is no restriction on the number of running asynchronous jobs.

    +

    Default: max_async_jobs=0 (no restriction)

    +
    • 0 (default)
    • 10
    Query Execution
    default_execution_durationinteger +

    Default time (in milliseconds) for query execution. The prefix "default" means here that the execution duration will be this one if the client does not set one.

    +

    The default duration MUST be less or equals to the maximum execution duration. If this rule is not respected, the default execution duration is set immediately + to the maximum execution duration.

    +

    A negative or null value means there is no restriction on the default execution duration: the execution could never end. Float values are not allowed.

    +

    Default: default_execution_duration=0 (no restriction)

    +
    600000 (10 minutes)
    max_execution_durationinteger +

    Maximum time (in milliseconds) for query execution. The prefix "max" means here that the client can not set a time greater than this one.

    +

    The maximum duration MUST be greater or equals to the default execution duration. If this rule is not respected, the default execution duration is set immediately + to the maximum execution duration.

    +

    A negative or null value means there is no restriction on the maximum execution duration: the execution could never end. Float values are not allowed.

    +

    Default: max_execution_duration=0 (no restriction)

    +
    3600000 (1 hour)
    Output
    output_formatstext +

    Comma separated list of output formats for query results.

    +

    Allowed values are: votable (or 'vot'), fits, text, csv, tsv, json and html.

    +

    The special value "ALL" will select all formats provided by the library.

    +

    The VOTable format may be more detailed with the following syntax: (serialization,version):mime_type:short_mime_type. + The MIME type part and the parameters part may be omitted (e.g. votable:application/xml:votable , votable(td,1.3)]). + Empty string values are allowed for each values (e.g. votable():: , votable(td)::votable).

    +

    It is also possible to define a custom Separated Value format, different from CSV and TSV, thanks to the following syntax: sv(separator):mime_type:short_mime_type. + On the contrary to the VOTable syntax, the parameter (i.e. separator) MUST BE provided. The MIME type part may be omitted ; then the MIME type will be set by default to text/plain.

    +

    There is finally a last possible value: a class name of a class implementing OutputFormat and having at least one constructor with exactly one parameter of type tap.ServiceConnection.

    +

    Default: ALL

    +
    • votable
    • vot
    • vot(td,1.2)::votable
    • json,html ,csv, text
    • sv(|):text/psv:psv
    • sv([])
    • {apackage.FooOutputFormat}
    output_default_limittext +

    Default limit for the result output. The prefix "default" means here that this value will be set if the client does not provide one.

    +

    This limit can be expressed in only one unit: rows.

    +

    A negative value means there is no restriction on this limit. Float values are not allowed.

    +

    Obviously this limit MUST be less or equal than output_max_limit.

    +

    Default: output_default_limit=-1 (no restriction)

    +
    • -1 (default)
    • 20
    • 20r
    • 20R
    output_max_limittext +

    Maximum limit for the result output. The prefix "max" means here that the client can not set a limit greater than this one.

    +

    This limit can be expressed in only one unit: rows.

    +

    A negative value means there is no restriction on this limit. Float values are not allowed.

    +

    Obviously this limit MUST be greater or equal than output_default_limit.

    +

    Default: output_max_limit=-1 (no restriction)

    +
    • -1 (default)
    • 1000
    • 10000r
    • 10000R
    Upload
    upload_enabledboolean +

    Tells whether the Upload must be enabled. If enabled, files can be uploaded in the file_root_path, + the corresponding tables can be added inside the UPLOAD_SCHEMA of the database, queried and then deleted.

    +

    By default, the Upload is disabled: upload_enabled=false.

    +
    • false (default)
    • true
    upload_default_db_limittext +

    Default limit for the number of uploaded records that can be inserted inside the database. The prefix "default" means here that this value will be set if the client does not provide one.

    +

    This limit can be expressed with 2 types: rows or bytes. For rows, you just have to suffix the value by a "r" (upper- or lower-case) + or by nothing (by default, nothing will mean "rows"). For bytes, you have to suffix the numeric value by "B", "kB", "MB" or "GB". + Here, unit is case sensitive. No other storage unit is allowed.

    +

    A negative value means there is no restriction on this limit. Float values are not allowed.

    +

    Warning! Obviously this limit MUST be less or equal than upload_max_db_limit, and MUST be of the same type as it. + If the chosen type is rows, this limit MUST also be strictly less than upload_max_file_size.

    +

    Default: upload_default_db_limit=-1 (no restriction)

    +
    • -1 (default)
    • 20
    • 20r
    • 20R
    • 200kB
    upload_max_db_limittext +

    Maximum limit for the number of uploaded records that can be inserted inside the database. The prefix "max" means here that the client can not set a limit greater than this one.

    +

    This limit can be expressed with 2 types: rows or bytes. For rows, you just have to suffix the value by a "r" (upper- or lower-case), + with nothing (by default, nothing will mean "rows"). For bytes, you have to suffix the numeric value by "B", "kB", "MB" or "GB". + Here, unit is case sensitive. No other storage unit is allowed.

    +

    A negative value means there is no restriction on this limit. Float values are not allowed.

    +

    Warning! Obviously this limit MUST be greater or equal than upload_default_db_limit, and MUST be of the same type as it. + If the chosen type is rows, this limit MUST also be strictly less than upload_max_file_size.

    +

    Default: upload_max_db_limit=-1 (no restriction)

    +
    • -1 (default)
    • 10000
    • 10000r
    • 10000R
    • 1MB
    upload_max_file_sizetext +

    Maximum allowed size for the uploaded file.

    +

    This limit MUST be expressed in bytes. Thus, you have to suffix the numeric value by "B", "kB", "MB" or "GB". + Here, unit is case sensitive. No other storage unit is allowed.

    +

    Warning! When the upload is enabled, there must be a maximum file size. Here, no "unlimited" value is possible ; 0 and any negative value are not allowed.

    +

    Warning! In function of the chosen upload_max_db_limit type, upload_max_file_size MUST be greater in order to figure out the file metadata part.

    +

    Default: upload_max_file_size=2147483647B (~2GB ; maximum possible value)

    +
    • 2147483647B (default)
    • 2MB
    User identification
    user_identifiertext +

    Class to use in order to identify a user of the TAP service. The same instance of this class will be used for every request sent to the service.

    +

    + The value of this property MUST be a class name (with brackets: {...}) of a class implementing the interface uws.service.UserIdentifier. + This class MUST have one of its constructors with no parameter. +

    +

    By default, no identification is performed ; all users are then anonymous and their jobs can be seen by everybody.

    +
    {apackage.FooUserIdentifier}
    ADQL restrictions
    coordinate_systemstext +

    Comma-separated list of all allowed coordinate systems.

    +

    + Each item of the list be a kind of regular expression respecting the following syntax: + Frame RefPos Flavor. In other words, it must be a string of exactly + 3 parts. Each of this part is a single value, a list of allowed values or a * meaning all + values. A list of values must be indicated between parenthesis and values must be separated by a pipe. +

    +

    Allowed values for Frame are: ICRS, FK4, + FK5, ECLIPTIC, GALACTIC and UNKNOWNFRAME.

    +

    Allowed values for RefPos are: BARYCENTER, GEOCENTER, + HELIOCENTER, LSR, TOPOCENTER, RELOCATABLE + and UNKNOWNREFPOS.

    +

    Allowed values for Flavor are: CARTESIAN2, CARTESIAN3 and + SPHERICAL2.

    +

    + If the special value NONE is given instead of a list of allowed coordinate systems, + no coordinate system will be allowed. And if the list is empty, any coordinate system will be allowed. +

    +

    By default, any coordinate system is allowed.

    +
    • ø (default)
    • NONE
    • ICRS * *
    • ICRS * *, ECLIPTIC * (CARTESIAN2 | SPHERICAL2)
    geometriestext +

    Comma-separated list of all allowed geometries.

    +

    + Each item of the list must be the name (whatever is the case) of an ADQL geometrical function (e.g. INTERSECTS, COORDSYS, POINT) to allow. + If the list is empty (no item), all functions are allowed. And if the special value NONE is given, no ADQL function will be allowed. +

    +

    By default, all ADQL geometrical functions are allowed.

    +
    • ø (default)
    • NONE
    • CONTAINS, intersects, Point, Box, CIRCLE
    udfstext +

    Comma-separated list of all allowed UDFs (User Defined Functions).

    +

    + Each item of the list must have the following syntax: [fct_signature] or [fct_signature, className]. + fct_function is the function signature. Its syntax is the same as in TAPRegExt. + className is the name of a class extending UserDefinedFunction. An instance of this class will replace any reference of a UDF + written in an ADQL function with the associated signature. A class name must be specified if the function to represent has a signature + (and more particularly a name) different in ADQL and in SQL. +

    +

    + If the list is empty (no item), all unknown functions are forbidden. And if the special value ANY is given, any unknown function is allowed ; + consequently the unknown ADQL functions will be translated into SQL as they are in ADQL. +

    +

    By default, no unknown function is allowed.

    +
    • ø (default)
    • ANY
    • [trim(txt String) -> String], [random() -> DOUBLE]
    • [newFct(x double)->double, {apackage.MyNewFunction}]
    Additional TAP Resources
    additional_resourcestext +

    Comma-separated list of additional TAP resources/end-point.

    +

    + By default, the following standard TAP resources are already existing: /sync, /async, /tables, /capabilities and /availability. + With this property, you can add a custom resource to your TAP service (e.g. /adqlValidator, /admin). +

    +

    + Each item of the list MUST be the name of a class implementing tap.resource.TAPResource. This class MUST have at least one constructor + with exactly one parameter of type tap.resource.TAP. +

    +

    + The string returned by tap.resource.TAPResource.getName() will be the resource name, following the root TAP service URL (e.g. if getName() + returns "foo", then its access URL will "{tapRoot}/foo"). Then, it is possible to replace TAP resources already existing by using the same + name (e.g. if getName() returns "sync", the /sync resource won't be anymore the default Sync resource of this library but your new resource). +

    +

    By default, this list is empty ; only the standard TAP resources exist.

    +
    {aPackage.QuickADQLValidator}
    Custom TAP Factory
    tap_factorytext +

    Class to use in replacement of the default TAPFactory.

    +

    + This property must be a class name (given between {...}). It must reference an implementation of TAPFactory. + This implementation must have at least one constructor with exactly one parameter of type ServiceConnection. +

    +

    + It is recommended to extend an existing implementation such as: + tap.AbstractTAPFactory or tap.config.ConfigurableTAPFactory. +

    +

    By default, the default TAPFactory (tap.config.ConfigurableTAPFactory) is used and may use all properties related to the backup management, + the database access and the ADQL translation.

    +
    {aPackage.MyTAPFactory}
    + + + \ No newline at end of file diff --git a/src/tap/config/tap_full.properties b/src/tap/config/tap_full.properties new file mode 100644 index 0000000000000000000000000000000000000000..b223d59420f1927188439eadc2b48ee90ca23340 --- /dev/null +++ b/src/tap/config/tap_full.properties @@ -0,0 +1,536 @@ +########################################################## +# FULL TAP CONFIGURATION FILE # +# # +# TAP Version: 2.0 # +# Date: 13 April 2015 # +# Author: Gregory Mantelet (ARI) # +# # +########################################################## + +########### +# GENERAL # +########### + +# [OPTIONAL] +# This property lets set a custom home page. +# +# 4 different kinds of value are accepted: +# * nothing (default): the default home page provided by the library (just a simple HTML page displaying a list of all available TAP resources). +# * name or relative path of a file: this method MUST be chosen if the new home page is a JSP file. This file MUST be inside the directory WebContent of your web application. +# * a URI starting with file://: in this method the local file pointed by the URI will be merely returned when the home page will be requested. +# * a URL: here, a redirection toward this URL will be made at each request on the home page +# * a class name: the class name of an extension of tap.resource.HomePage which must replace the default home page resource. +# This class MUST have at least one constructor with exactly one parameter not NULL of type tap.resource.TAP. +home_page = + +# [OPTIONAL] +# MIME type of the service home page. +# +# This property is used only if the specified "home_page" is a local file path (i.e. if "home_page=file://..."). +# +# If no value is provided "text/html" will be set by default. +# +# Default: text/html +home_page_mime_type = + +############ +# PROVIDER # +############ + +# [OPTIONAL] +# Name of the provider of the TAP Service. +provider_name = ARI + +# [OPTIONAL] +# Description of the TAP Service. +service_description = My TAP Service is so amazing! You should use it with your favorite TAP client. + +############ +# DATABASE # +############ + +# [MANDATORY] +# Method to use in order to create database connections. +# +# Only two values are supported: +# * jndi: database connections will be supplied by a Datasource whose the JNDI name must be given. This method may propose connection pooling in function of the datasource configuration. +# * jdbc: the library will create itself connections when they will be needed thanks to the below JDBC parameters. This method does not propose any connection pooling. +# +# Allowed values: jndi, jdbc. +database_access = + +# [MANDATORY] +# The translator to use in order to translate ADQL to a SQL compatible with the used DBMS and its spatial extension. +# +# The TAP library supports only Postgresql (without spatial extension) and PgSphere for the moment. But you can provide your own SQL translator +# (even if it does not have spatial features), by providing the name of a class (within brackets: {...}) that implements ADQLTranslator (for instance: {apackage.MyADQLTranslator}) +# and which have at least an empty constructor. +# +# Allowed values: postgres, pgsphere, a class name +sql_translator = postgres + +# [OPTIONAL] +# Size of result blocks to fetch from the database when a ADQL query is executed in Synchronous mode. +# +# Rather than fetching a query result in a whole, it may be possible to specify to the database that +# results may be retrieved by blocks whose the size can be specified with this property. If supported by +# the DBMS and the JDBC driver, this feature may help sparing memory and avoid too much waiting time from +# the TAP /sync users (and thus, avoiding some HTTP client timeouts). +# +# A negative or null value means that the default value of the JDBC driver will be used. Generally, it means +# that the database must wait to have collected all data before sending them to the library. +# +# Default: sync_fetch_size=10000 +sync_fetch_size = 10000 + +# [OPTIONAL] +# Size of result blocks to fetch from the database when an ADQL query is executed in Asynchronous mode. +# +# Rather than fetching a query result in a whole, it may be possible to specify to the database that +# results may be retrieved by blocks whose the size can be specified with this property. If supported by +# the DBMS and the JDBC driver, this feature may help sparing memory. +# +# A negative or null value means that the default value of the JDBC driver will be used. Generally, it means +# that the database must wait to have collected all data before sending them to the library. +# +# Default: async_fetch_size=100000 +async_fetch_size=100000 + +############################# +# IF DATABASE ACCESS = JNDI # +############################# + +# [MANDATORY] +# JNDI name of the datasource pointing toward the database to use. +# It should be defined in the web application (e.g. in the META-INF/context.xml file in tomcat). +datasource_jndi_name = + +############################# +# IF DATABASE ACCESS = JDBC # +############################# + +# [MANDATORY] +# JDBC driver URL pointing toward the database to use. +# +# Note: The username, password or other parameters may be included in it, but in this case, the corresponding properties should leave empty or not provided at all. +jdbc_url = + +# [OPTIONAL] +# JDBC driver path. +# +# By default, it is guessed in function of the database name provided in the jdbc_url property. It MUST be provided if another DBMS is used or if the JDBC driver path does not match the following ones: +# * Oracle : oracle.jdbc.OracleDriver +# * PostgreSQL: org.postgresql.Driver +# * MySQL : com.mysql.jdbc.Driver +# * SQLite : org.sqlite.JDBC +#jdbc_driver = + +# [MANDATORY] +# Mandatory if the username is not already provided in jdbc_url +# Username used to access to the database. +db_username = + +# [MANDATORY] +# Mandatory if the password is not already provided in jdbc_url +# Password used by db_username to access to the database. +# +# Note: No password encryption can be done in this configuration file for the moment. +db_password = + +############ +# METADATA # +############ + +# [MANDATORY] +# Metadata fetching method. +# +# The value of this key defines the way the library will get the list of all schemas, tables and columns to publish and all their metadata (e.g. utype, description, type, ...). +# +# In its current state, the library proposes three methods: +# 1/ Parse a TableSet XML document and load its content into the database schema TAP_SCHEMA (note: this schema is first erased and rebuilt by the library). +# 2/ Get all metadata from the database schema TAP_SCHEMA. +# 3/ Build yourself the metadata of your service by creating an extension of tap.metadata.TAPMetadata. This extension must have either an empty constructor +# or a constructor with exactly 3 parameters of type UWSFileManager, TAPFactory and TAPLog ; if both constructor are provided, only the one with parameters will be used. +# +# Allowed values: xml, db or a full class name (between {}). +metadata = + +# [MANDATORY] +# Mandatory if the value of "metadata" is "xml". +# Local file path to the TableSet XML document. +# The XML document must implement the schema TableSet defined by VODataService. +# The file path must be either an absolute local file path or a file path relative to WebContent (i.e. the web application directory in which there are WEB-INF and META-INF). +metadata_file = + +######### +# FILES # +######### + +# [MANDATORY] +# Type of the file manager. +# +# Accepted values are: local (to manage files on the local system). You can also add another way to manage files by providing +# the name (within brackets: {...}) of a class implementing TAPFileManager and having at least one constructor with only a +# java.util.Properties parameter. +# +# Allowed values: local, a class name. +file_manager = local + +# [MANDATORY] +# Local file path of the directory in which all TAP files (logs, errors, job results, backup, ...) must be. +# The file path must be either an absolute local directory path or a directory path relative to WebContent (i.e. the web application directory in which there are WEB-INF and META-INF). +file_root_path = + +# [OPTIONAL] +# Tells whether a directory should be created for each user. +# +# If yes, the user directory will be named with the user ID. In this directory, there will be error files, job results and it may be the backup file of the user. +# +# Allowed values: true (default), false. +directory_per_user = true + +# [OPTIONAL] +# Tells whether user directories must be grouped. +# +# If yes, directories are grouped by the first letter found in the user ID. +# +# Allowed values: true (default), false. +group_user_dir = true + +# [OPTIONAL] +# The default period (in seconds) to keep query results. +# +# The prefix "default" means here that this value is put by default by the TAP Service if the client does not provide a value for it. +# +# The default period MUST be less or equals to the maximum retention period. If this rule is not respected, the default retention period is set +# immediately to the maximum retention period. +# +# A negative or null value means there is no restriction on the default retention period: job results will be kept forever. Float values are not allowed. +# +# Default: 0 (results kept forever). +default_retention_period = 0 + +# [OPTIONAL] +# The maximum period (in seconds) to keep query results. +# +# The prefix "max" means here that the client can not set a retention period greater than this one. +# +# The maximum period MUST be greater or equals to the default retention period. If this rule is not respected, the default retention period is set +# immediately to the maximum retention period. +# +# A negative or null value means there is no restriction on the maximum retention period: the job results will be kept forever. Float values are not allowed. +# +# Default: 0 (results kept forever). +max_retention_period = 0 + +############# +# LOG FILES # +############# + +# [OPTIONAL] +# Minimum level that a message must have in order to be logged. +# +# 5 possible values: +# * DEBUG: every messages are logged. +# * INFO: every messages EXCEPT DEBUG are logged. +# * WARNING: every messages EXCEPT DEBUG and INFO are logged. +# * ERROR: only ERROR and FATAL messages are logged. +# * FATAL: only FATAL messages are logged. +# +# Default: DEBUG (every messages are logged) +min_log_level = + +# [OPTIONAL] +# Frequency of the log file rotation. That's to say, logs will be written in a new file after this period. This avoid having too big log files. +# Old log files are renamed so that highlighting its logging period. +# +# The frequency string must respect the following syntax: +# 'D' hh mm: daily schedule at hh:mm +# 'W' dd hh mm: weekly schedule at the given day of the week (1:sunday, 2:monday, ..., 7:saturday) at hh:mm +# 'M' dd hh mm: monthly schedule at the given day of the month at hh:mm +# 'h' mm: hourly schedule at the given minute +# 'm': scheduled every minute (for completness :-)) +# Where: hh = integer between 0 and 23, mm = integer between 0 and 59, dd (for 'W') = integer between 1 and 7 (1:sunday, 2:monday, ..., 7:saturday), dd (for 'M') = integer between 1 and 31. +# +# Warning: The frequency type is case sensitive! Then you should particularly pay attention at the case when using the frequency types 'M' (monthly) and 'm' (every minute). +# +# Note: this property is ignored if the file manager is not any more an extension of uws.service.file.LocalUWSFileManager. +# +# Default: D 0 0 (daily at midnight) +log_rotation = + +############## +# UWS_BACKUP # +############## + +# [OPTIONAL] +# Frequency at which the UWS service (that's to say, all its users and jobs) must be backuped. +# +# Allowed values: never (no backup will never be done ; default), user_action (each time a user does a writing action, like creating or execution a job), +# a time (must be positive and not null) in milliseconds. +# +# The value user_action can be used ONLY IF backup_mode=true. +# +# Default: never +backup_frequency = never + +# [OPTIONAL] +# Tells whether the backup must be one file for every user (false), or one file for each user (true). +# This second option should be chosen if your TAP Service is organizing its files by user directories ; +# see the property directory_per_user. +# +# This option can be enabled ONLY IF a user identification method is provided ; see property user_identifier. +# +# Default: false +backup_by_user = false + +##################### +# ASYNCHRONOUS JOBS # +##################### + +# [OPTIONAL] +# Maximum number of asynchronous jobs that can run simultaneously. +# +# A negative or null value means there is no restriction on the number of running asynchronous jobs. +# +# Default: there is no restriction => max_async_jobs=0. +max_async_jobs = 0 + +################### +# QUERY_EXECUTION # +################### + +# [OPTIONAL] +# Default time (in milliseconds) for query execution. +# +# The prefix "default" means here that the execution duration will be this one if the client does not set one. +# +# The default duration MUST be less or equals to the maximum execution duration. If this rule is not respected, the default execution duration is set +# immediately to the maximum execution duration. +# +# A negative or null value means there is no restriction on the default execution duration: the execution could never end. Float values are not allowed. +# +# Default: there is no restriction => default_execution_duration=0. +default_execution_duration = 0 + +# [OPTIONAL] +# Maximum time (in milliseconds) for query execution. +# +# The prefix "max" means here that the client can not set a time greater than this one. +# +# The maximum duration MUST be greater or equals to the default execution duration. If this rule is not respected, the default execution duration is set +# immediately to the maximum execution duration. +# +# A negative or null value means there is no restriction on the maximum execution duration: the execution could never end. Float values are not allowed. +# +# Default: there is no restriction => max_execution_duration=0. +max_execution_duration = 0 + +########## +# OUTPUT # +########## + +# [OPTIONAL] +# Comma separated list of output formats for query results. +# +# Allowed values are: votable (or 'vot'), fits, text, csv, tsv, json and html. +# +# The special value "ALL" will select all formats provided by the library. +# +# The VOTable format may be more detailed with the following syntax: (serialization,version):mime_type:short_mime_type. +# The MIME type part and the parameters part may be omitted (e.g. votable:application/xml:votable , votable(td,1.3)]). +# Empty string values are allowed for each values (e.g. votable():: , votable(td)::votable). +# +# It is also possible to define a custom Separated Value format, different from CSV and TSV, thanks to the following syntax: sv(separator):mime_type:short_mime_type. +# On the contrary to the VOTable syntax, the parameter (i.e. separator) MUST BE provided. +# The MIME type part may be omitted ; then the MIME type will be set by default to text/plain. +# +# There is finally a last possible value: a class name of a class implementing OutputFormat and having at least one constructor with exactly one parameter of type +# tap.ServiceConnection. +# +# Default: ALL +output_formats = ALL + +# [OPTIONAL] +# Default limit for the result output. +# +# The prefix "default" means here that this value will be set if the client does not provide one. +# +# This limit can be expressed in only one unit: rows. +# +# A negative value means there is no restriction on this limit. Float values are not allowed. +# +# Obviously this limit MUST be less or equal than output_max_limit. +# +# Default: there is no restriction: output_default_limit=-1 +output_default_limit = -1 + +# [OPTIONAL] +# Maximum limit for the result output. The prefix "max" means here that the client can not set a limit greater than this one. +# +# This limit can be expressed in only one unit: rows. +# +# A negative value means there is no restriction on this limit. Float values are not allowed. +# +# Obviously this limit MUST be greater or equal than output_default_limit. +# +# Default: there is no restriction => output_max_limit=-1 +output_max_limit = -1 + +########## +# UPLOAD # +########## + +# [OPTIONAL] +# Tells whether the Upload must be enabled. +# +# If enabled, files can be uploaded in the file_root_path, the corresponding tables can be added inside the UPLOAD_SCHEMA +# of the database, queried and then deleted. +# +# Allowed values: true, false (default). +upload_enabled = false + +# [OPTIONAL] +# Default limit for the number of uploaded records that can be inserted inside the database. +# +# The prefix "default" means here that this value will be set if the client does not provide one. +# +# This limit can be expressed with 2 types: rows or bytes. For rows, you just have to suffix the value by a "r" (upper- or lower-case), +# with nothing (by default, nothing will mean "rows"). For bytes, you have to suffix the numeric value by "b", "kb", "Mb" or "Gb". Here, +# unit is case sensitive (except for the last character: "b"). No other storage unit is allowed. +# +# A negative value means there is no restriction on this limit. Float values are not allowed. +# +# Obviously this limit MUST be less or equal than upload_max_db_limit. +# +# Default: there is no restriction: upload_default_db_limit=-1 +upload_default_db_limit = -1 + +# [OPTIONAL] +# Maximum limit for the number of uploaded records that can be inserted inside the database. +# +# The prefix "max" means here that the client can not set a limit greater than this one. +# +# This limit can be expressed with 2 types: rows or bytes. For rows, you just have to suffix the value by a "r" (upper- or lower-case), +# with nothing (by default, nothing will mean "rows"). For bytes, you have to suffix the numeric value by "b", "kb", "Mb" or "Gb". Here, +# unit is case sensitive (except for the last character: "b"). No other storage unit is allowed. +# +# A negative value means there is no restriction on this limit. Float values are not allowed. +# +# Obviously this limit MUST be greater or equal than upload_default_db_limit. +# +# Default: there is no restriction: upload_max_db_limit=-1 +upload_max_db_limit = -1 + +# [OPTIONAL] +# Maximum allowed size for the uploaded file. +# +# This limit MUST be expressed in bytes. Thus, you have to suffix the numeric value by "B", "kB", "MB" or "GB". Here, unit is case sensitive. No other storage unit is allowed. +# +# Warning! When the upload is enabled, there must be a maximum file size. Here, no "unlimited" value is possible ; 0 and any negative value are not allowed. +# +# Warning! In function of the chosen upload_max_db_limit type, upload_max_file_size MUST be greater in order to figure out the file metadata part. +# +# Default: upload_max_file_size=2147483647B (~2GB ; maximum possible value) +upload_max_file_size = 2147483647B + +####################### +# USER IDENTIFICATION # +####################### + +# [OPTIONAL] +# Class to use in order to identify a user of the TAP service. +# +# The same instance of this class will be used for every request sent to the service. +# +# The value of this property MUST be a class name (with brackets: {...}) of a class implementing the interface uws.service.UserIdentifier. +# This class MUST have one of its constructors with no parameter. +# +# Default: no identification is performed => all users are then anonymous and their jobs can be seen by everybody. +user_identifier = + +###################### +# COORDINATE SYSTEMS # +###################### + +# [OPTIONAL] +# Comma-separated list of all allowed coordinate systems. +# +# Each item of the list be a kind of regular expression respecting the following syntax: Frame RefPos Flavor. In other words, it must be a string of exactly 3 parts. Each of this part is a single value, a list of allowed values or a * meaning all values. A list of values must be indicated between parenthesis and values must be separated by a pipe. +# +# Allowed values for Frame are: ICRS, FK4, FK5, ECLIPTIC, GALACTIC and UNKNOWNFRAME. +# Allowed values for RefPos are: BARYCENTER, GEOCENTER, HELIOCENTER, LSR, TOPOCENTER, RELOCATABLE and UNKNOWNREFPOS. +# Allowed values for Flavor are: CARTESIAN2, CARTESIAN3 and SPHERICAL2. +# +# If the special value NONE is given instead of a list of allowed coordinate systems, no coordinate system will be allowed. And if the list is empty, any coordinate system will be allowed. +# +# By default, any coordinate system is allowed. +coordinate_systems = + +############## +# GEOMETRIES # +############## + +# [OPTIONAL] +# Comma-separated list of all allowed geometries. +# +# Each item of the list must be the name (whatever is the case) of an ADQL geometrical function (e.g. INTERSECTS, COORDSYS, POINT) to allow. +# If the list is empty (no item), all functions are allowed. And if the special value NONE is given, no ADQL function will be allowed. +# +# Default: all ADQL geometrical functions are allowed. +geometries = + +################################# +# USER DEFINED FUNCTIONS (UDFs) # +################################# + +# [OPTIONAL] +# Comma-separated list of all allowed UDFs (User Defined Functions). +# +# Each item of the list must have the following syntax: [fct_signature] or [fct_signature, className]. fct_function is the function signature. +# Its syntax is the same as in TAPRegExt. className is the name of a class extending UserDefinedFunction. An instance of this class will replace +# any reference of a UDF written in an ADQL function with the associated signature. A class name must be specified if the function to represent +# has a signature (and more particularly a name) different in ADQL and in SQL. +# +# If the list is empty (no item), all unknown functions are forbidden. And if the special value ANY is given, any unknown function is allowed ; +# consequently the unknown ADQL functions will be translated into SQL as they are in ADQL. +# +# Default: no unknown function is allowed. +udfs = + +######################## +# ADDITIONAL RESOURCES # +######################## + +# [OPTIONAL] +# Comma-separated list of additional TAP resources/end-point. +# +# By default, the following standard TAP resources are already existing: /sync, /async, /tables, /capabilities and /availability. +# With this property, you can add a custom resource to your TAP service (e.g. /adqlValidator, /admin). +# +# Each item of the list MUST be the name of a class implementing tap.resource.TAPResource. This class MUST have at least one constructor with +# exactly one parameter of type tap.resource.TAP. +# +# The string returned by tap.resource.TAPResource.getName() will be the resource name, following the root TAP service URL (e.g. if getName() +# returns "foo", then its access URL will "{tapRoot}/foo"). Then, it is possible to replace TAP resources already existing by using the same +# name (e.g. if getName() returns "sync", the /sync resource won't be anymore the default Sync resource of this library but your new resource). +# +# By default, this list is empty ; only the standard TAP resources exist. +additional_resources = + +###################### +# CUSTOM TAP_FACTORY # +###################### + +# [OPTIONAL] +# Class to use in replacement of the default TAPFactory. +# +# This property must be a class name (given between {...}). It must reference an implementation of TAPFactory. +# This implementation must have at least one constructor with exactly one parameter of type ServiceConnection. +# +# It is recommended to extend an existing implementation such as: +# tap.AbstractTAPFactory or tap.config.ConfigurableTAPFactory. +# +# By default, the default TAPFactory (tap.config.ConfigurableTAPFactory) is used and may use all properties related to the backup management, +# the database access and the ADQL translation. +tap_factory = diff --git a/src/tap/config/tap_min.properties b/src/tap/config/tap_min.properties new file mode 100644 index 0000000000000000000000000000000000000000..2bfd5afc7a2df811cbeeff0f6ab2803233514893 --- /dev/null +++ b/src/tap/config/tap_min.properties @@ -0,0 +1,107 @@ +########################################################## +# MINIMUM TAP CONFIGURATION FILE # +# # +# TAP Version: 2.0 # +# Date: 27 Feb. 2015 # +# Author: Gregory Mantelet (ARI) # +# # +########################################################## + +############ +# DATABASE # +############ + +# Method to use in order to create database connections. +# +# Only two values are supported: +# * jndi: database connections will be supplied by a Datasource whose the JNDI name must be given. This method may propose connection pooling in function of the datasource configuration. +# * jdbc: the library will create itself connections when they will be needed thanks to the below JDBC parameters. This method does not propose any connection pooling. +# +# Allowed values: jndi, jdbc. +database_access = + +# The translator to use in order to translate ADQL to a SQL compatible with the used DBMS and its spatial extension. +# +# The TAP library supports only Postgresql (without spatial extension) and PgSphere for the moment. But you can provide your own SQL translator +# (even if it does not have spatial features), by providing the name of a class (within brackets: {...}) that implements ADQLTranslator (for instance: {apackage.MyADQLTranslator}) +# and which have at least an empty constructor. +# +# Allowed values: postgres, pgsphere, a class name +sql_translator = postgres + +############################# +# IF DATABASE ACCESS = JNDI # +############################# + +# JNDI name of the datasource. +# +# It should be defined in the web application (e.g. in the META-INF/context.xml file in tomcat). +datasource_jndi_name = + +############################# +# IF DATABASE ACCESS = JDBC # +############################# + +# It must be a JDBC driver URL. +# +# Note: The username, password or other parameters may be included in it, but in this case, the corresponding properties should leave empty or not provided at all. +jdbc_url = + +# JDBC driver path. +# +# By default, it is guessed in function of the database name provided in the jdbc_url property. It MUST be provided if another DBMS is used or if the JDBC driver path does not match the following ones: +# * Oracle : oracle.jdbc.OracleDriver +# * PostgreSQL: org.postgresql.Driver +# * MySQL : com.mysql.jdbc.Driver +# * SQLite : org.sqlite.JDBC +jdbc_driver = + +# Mandatory if the username is not already provided in jdbc_url +# Username used to access to the database. +db_user = + +# Mandatory if the password is not already provided in jdbc_url +# Password used by db_username to access to the database. +# +# Note: No password encryption can be done in this configuration file for the moment. +db_password = + +############ +# METADATA # +############ + +# Metadata fetching method. +# +# The value of this key defines the way the library will get the list of all schemas, tables and columns to publish and all their metadata (e.g. utype, description, type, ...). +# +# In its current state, the library proposes three methods: +# 1/ Parse a TableSet XML document and load its content into the database schema TAP_SCHEMA (note: this schema is first erased and rebuilt by the library). +# 2/ Get all metadata from the database schema TAP_SCHEMA. +# 3/ Build yourself the metadata of your service by creating an extension of tap.metadata.TAPMetadata. This extension must have either an empty constructor +# or a constructor with exactly 3 parameters of type UWSFileManager, TAPFactory and TAPLog ; if both constructor are provided, only the one with parameters will be used. +# +# Allowed values: xml, db or a full class name (between {}). +metadata = + +# Mandatory if the value of "metadata" is "xml". +# Local file path to the TableSet XML document. +# The XML document must implement the schema TableSet defined by VODataService. +# The file path must be either an absolute local file path or a file path relative to WebContent (i.e. the web application directory in which there are WEB-INF and META-INF). +metadata_file = + +######### +# FILES # +######### + +# Type of the file manager. +# +# Accepted values are: local (to manage files on the local system). You can also add another way to manage files by providing +# the name (within brackets: {...}) of a class implementing TAPFileManager and having at least one constructor with only a +# java.util.Properties parameter. +# +# Allowed values: local, a class name. +file_manager = local + +# Local file path of the directory in which all TAP files (logs, errors, job results, backup, ...) must be. +# The file path must be either an absolute local directory path or a directory path relative to WebContent (i.e. the web application directory in which there are WEB-INF and META-INF). +file_root_path = diff --git a/src/tap/formatter/ResultSetFormatter.java b/src/tap/data/DataReadException.java similarity index 53% rename from src/tap/formatter/ResultSetFormatter.java rename to src/tap/data/DataReadException.java index bd4608d7cb42c4588ddb1605106c90689e84af66..d75e19c70e8910585724fe15308638e032b75c0e 100644 --- a/src/tap/formatter/ResultSetFormatter.java +++ b/src/tap/data/DataReadException.java @@ -1,4 +1,4 @@ -package tap.formatter; +package tap.data; /* * This file is part of TAPLibrary. @@ -16,17 +16,33 @@ package tap.formatter; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2014 - Astronomisches Rechen Institut (ARI) */ -import java.sql.ResultSet; - import tap.TAPException; -import adql.db.DBColumn; +/** + * Exception that occurs when reading a data input (can be an InputStream, a ResultSet, a SavotTable, ...). + * + * @author Grégory Mantelet (ARI) - gmantele@ari.uni-heidelberg.de + * @version 2.0 (06/2014) + * @since 2.0 + * + * @see TableIterator + */ +public class DataReadException extends TAPException { + private static final long serialVersionUID = 1L; + + public DataReadException(final String message){ + super(message); + } -public interface ResultSetFormatter extends OutputFormat { + public DataReadException(Throwable cause){ + super(cause); + } - public Object formatValue(final Object value, final DBColumn colMeta) throws TAPException; + public DataReadException(String message, Throwable cause){ + super(message, cause); + } } diff --git a/src/tap/data/LimitedTableIterator.java b/src/tap/data/LimitedTableIterator.java new file mode 100644 index 0000000000000000000000000000000000000000..9a98d9431e57fb234b44b80a5215d706275a0a53 --- /dev/null +++ b/src/tap/data/LimitedTableIterator.java @@ -0,0 +1,252 @@ +package tap.data; + +/* + * This file is part of TAPLibrary. + * + * ADQLLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * ADQLLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with ADQLLibrary. If not, see . + * + * Copyright 2014 - Astronomisches Rechen Institut (ARI) + */ + +import java.io.IOException; +import java.io.InputStream; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.util.NoSuchElementException; + +import tap.ServiceConnection.LimitUnit; +import tap.metadata.TAPColumn; +import tap.upload.LimitedSizeInputStream; +import adql.db.DBType; + +import com.oreilly.servlet.multipart.ExceededSizeException; + +/** + *

    Wrap a {@link TableIterator} in order to limit its reading to a fixed number of rows.

    + * + *

    + * This wrapper can be "mixed" with a {@link LimitedSizeInputStream}, by wrapping the original input stream by a {@link LimitedSizeInputStream} + * and then by wrapping the {@link TableIterator} based on this wrapped input stream by {@link LimitedTableIterator}. + * Thus, this wrapper will be able to detect embedded {@link ExceededSizeException} thrown by a {@link LimitedSizeInputStream} through another {@link TableIterator}. + * If a such exception is detected, it will declare this wrapper as overflowed as it would be if a rows limit is reached. + *

    + * + *

    Warning: + * To work together with a {@link LimitedSizeInputStream}, this wrapper relies on the hypothesis that any {@link IOException} (including {@link ExceededSizeException}) + * will be embedded in a {@link DataReadException} as cause of this exception (using {@link DataReadException#DataReadException(Throwable)} + * or {@link DataReadException#DataReadException(String, Throwable)}). If it is not the case, no overflow detection could be done and the exception will just be forwarded. + *

    + * + *

    + * If a limit - either of rows or of bytes - is reached, a flag "overflow" is set to true. This flag can be got with {@link #isOverflow()}. + * Thus, when a {@link DataReadException} is caught, it will be easy to detect whether the error occurred because of an overflow + * or of another problem. + *

    + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (01/2015) + * @since 2.0 + */ +public class LimitedTableIterator implements TableIterator { + + /** The wrapped {@link TableIterator}. */ + private final TableIterator innerIt; + + /** Limit on the number of rows to read. note: a negative value means "no limit". */ + private final int maxNbRows; + + /** The number of rows already read. */ + private int countRow = 0; + + /** Indicate whether a limit (rows or bytes) has been reached or not. */ + private boolean overflow = false; + + /** + * Wrap the given {@link TableIterator} so that limiting the number of rows to read. + * + * @param it The iterator to wrap. MUST NOT be NULL + * @param nbMaxRows Maximum number of rows that can be read. There is overflow if more than this number of rows is asked. A negative value means "no limit". + */ + public LimitedTableIterator(final TableIterator it, final int nbMaxRows) throws DataReadException{ + if (it == null) + throw new NullPointerException("Missing TableIterator to wrap!"); + innerIt = it; + this.maxNbRows = nbMaxRows; + } + + /** + *

    Build the specified {@link TableIterator} instance and wrap it so that limiting the number of rows OR bytes to read.

    + * + *

    + * If the limit is on the number of bytes, the given input stream will be first wrapped inside a {@link LimitedSizeInputStream}. + * Then, it will be given as only parameter of the constructor of the specified {@link TableIterator} instance. + *

    + * + *

    If the limit is on the number of rows, this {@link LimitedTableIterator} will count and limit itself the number of rows.

    + * + *

    IMPORTANT: The specified class must:

    + *
      + *
    • extend {@link TableIterator},
    • + *
    • be a concrete class,
    • + *
    • have at least one constructor with only one parameter of type {@link InputStream}.
    • + *
    + * + *

    Note: + * If the given limit type is NULL (or different from ROWS and BYTES), or the limit value is <=0, no limit will be set. + * All rows and bytes will be read until the end of input is reached. + *

    + * + * @param classIt Class of the {@link TableIterator} implementation to create and whose the output must be limited. + * @param input Input stream toward the table to read. + * @param type Type of the limit: ROWS or BYTES. MAY be NULL + * @param limit Limit in rows or bytes, depending of the "type" parameter. MAY BE <=0 + * + * @throws DataReadException If no instance of the given class can be created, + * or if the {@link TableIterator} instance can not be initialized, + * or if the limit (in rows or bytes) has been reached. + */ + public < T extends TableIterator > LimitedTableIterator(final Class classIt, final InputStream input, final LimitUnit type, final int limit) throws DataReadException{ + try{ + Constructor construct = classIt.getConstructor(InputStream.class); + if (LimitUnit.bytes.isCompatibleWith(type) && limit > 0){ + maxNbRows = -1; + innerIt = construct.newInstance(new LimitedSizeInputStream(input, limit * type.bytesFactor())); + }else{ + innerIt = construct.newInstance(input); + maxNbRows = (type == null || type != LimitUnit.rows) ? -1 : limit; + } + }catch(InvocationTargetException ite){ + Throwable t = ite.getCause(); + if (t != null && t instanceof DataReadException){ + ExceededSizeException exceedEx = getExceededSizeException(t); + // if an error caused by an ExceedSizeException occurs, set this iterator as overflowed and throw the exception: + if (exceedEx != null) + throw new DataReadException(exceedEx.getMessage(), exceedEx); + else + throw (DataReadException)t; + }else + throw new DataReadException("Can not create a LimitedTableIterator!", ite); + }catch(Exception ex){ + throw new DataReadException("Can not create a LimitedTableIterator!", ex); + } + } + + /** + * Get the iterator wrapped by this {@link TableIterator} instance. + * + * @return The wrapped iterator. + */ + public final TableIterator getWrappedIterator(){ + return innerIt; + } + + /** + *

    Tell whether a limit (in rows or bytes) has been reached.

    + * + *

    Note: + * If true is returned (that's to say, if a limit has been reached) no more rows or column values + * can be read ; an {@link IllegalStateException} would then be thrown. + *

    + * + * @return true if a limit has been reached, false otherwise. + */ + public final boolean isOverflow(){ + return overflow; + } + + @Override + public void close() throws DataReadException{ + innerIt.close(); + } + + @Override + public TAPColumn[] getMetadata() throws DataReadException{ + return innerIt.getMetadata(); + } + + @Override + public boolean nextRow() throws DataReadException{ + // Test the overflow flag and proceed only if not overflowed: + if (overflow) + throw new DataReadException("Data read overflow: the limit has already been reached! No more data can be read."); + + // Read the next row: + boolean nextRow; + try{ + nextRow = innerIt.nextRow(); + countRow++; + }catch(DataReadException ex){ + ExceededSizeException exceedEx = getExceededSizeException(ex); + // if an error caused by an ExceedSizeException occurs, set this iterator as overflowed and throw the exception: + if (exceedEx != null){ + overflow = true; + throw new DataReadException(exceedEx.getMessage()); + }else + throw ex; + } + + // If, counting this one, the number of rows exceeds the limit, set this iterator as overflowed and throw an exception: + if (nextRow && maxNbRows >= 0 && countRow > maxNbRows){ + overflow = true; + throw new DataReadException("Data read overflow: the limit of " + maxNbRows + " rows has been reached!"); + } + + // Send back the value returned by the inner iterator: + return nextRow; + } + + @Override + public boolean hasNextCol() throws IllegalStateException, DataReadException{ + testOverflow(); + return innerIt.hasNextCol(); + } + + @Override + public Object nextCol() throws NoSuchElementException, IllegalStateException, DataReadException{ + testOverflow(); + return innerIt.nextCol(); + } + + @Override + public DBType getColType() throws IllegalStateException, DataReadException{ + testOverflow(); + return innerIt.getColType(); + } + + /** + * Test the overflow flag and throw an {@link IllegalStateException} if true. + * + * @throws IllegalStateException If this iterator is overflowed (because of either a bytes limit or a rows limit). + */ + private void testOverflow() throws IllegalStateException{ + if (overflow) + throw new IllegalStateException("Data read overflow: the limit has already been reached! No more data can be read."); + } + + /** + * Get the first {@link ExceededSizeException} found in the given {@link Throwable} trace. + * + * @param ex A {@link Throwable} + * + * @return The first {@link ExceededSizeException} encountered, or NULL if none has been found. + */ + private ExceededSizeException getExceededSizeException(Throwable ex){ + if (ex == null) + return null; + while(!(ex instanceof ExceededSizeException) && ex.getCause() != null) + ex = ex.getCause(); + return (ex instanceof ExceededSizeException) ? (ExceededSizeException)ex : null; + } + +} diff --git a/src/tap/data/ResultSetTableIterator.java b/src/tap/data/ResultSetTableIterator.java new file mode 100644 index 0000000000000000000000000000000000000000..9c906a15783082d45389b5da4ce4897ff8a67a8b --- /dev/null +++ b/src/tap/data/ResultSetTableIterator.java @@ -0,0 +1,544 @@ +package tap.data; + +/* + * This file is part of TAPLibrary. + * + * TAPLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * TAPLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with TAPLibrary. If not, see . + * + * Copyright 2014 - Astronomisches Rechen Institut (ARI) + */ + +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.util.NoSuchElementException; + +import tap.metadata.TAPColumn; +import uws.ISO8601Format; +import adql.db.DBColumn; +import adql.db.DBType; +import adql.db.DBType.DBDatatype; +import adql.db.STCS.Region; +import adql.parser.ParseException; +import adql.translator.JDBCTranslator; + +/** + *

    {@link TableIterator} which lets iterate over a SQL {@link ResultSet}.

    + * + *

    Note: + * {@link #getColType()} will return a TAP type based on the one declared in the {@link ResultSetMetaData} object. + *

    + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (11/2014) + * @since 2.0 + */ +public class ResultSetTableIterator implements TableIterator { + + /** ResultSet/Dataset to read. */ + private final ResultSet data; + + /** Object which has the knowledge of the specific JDBC column types + * and which knows how to deal with geometrical values between the + * library and the database. */ + private final JDBCTranslator translator; + + /** Number of columns to read. */ + private final int nbColumns; + /** Metadata of all columns identified before the iteration. */ + private final TAPColumn[] colMeta; + + /** Indicate whether the row iteration has already started. */ + private boolean iterationStarted = false; + /** Indicate whether the last row has already been reached. */ + private boolean endReached = false; + /** Index of the last read column (=0 just after {@link #nextRow()} and before {@link #nextCol()}, ={@link #nbColumns} after the last column has been read). */ + private int colIndex; + + /** + *

    Build a TableIterator able to read rows and columns of the given ResultSet.

    + * + *

    + * In order to provide the metadata through {@link #getMetadata()}, this constructor is trying to guess the datatype + * from the DBMS column datatype (using {@link #convertType(int, String, String)}). + *

    + * + *

    Type guessing

    + * + *

    + * In order to guess a TAP type from a DBMS type, this constructor will call {@link #convertType(int, String, String)} + * which deals with the most common standard datatypes known in Postgres, SQLite, MySQL, Oracle and JavaDB/Derby. + * This conversion is therefore not as precise as the one expected by a translator. That's why it is recommended + * to use one of the constructor having a {@link JDBCTranslator} in parameter. + *

    + * + * @param dataSet Dataset over which this iterator must iterate. + * + * @throws NullPointerException If NULL is given in parameter. + * @throws DataReadException If the given ResultSet is closed or if the metadata (columns count and types) can not be fetched. + * + * @see #convertType(int, String, String) + * @see #ResultSetTableIterator(ResultSet, JDBCTranslator, String, DBColumn[]) + */ + public ResultSetTableIterator(final ResultSet dataSet) throws NullPointerException, DataReadException{ + this(dataSet, null, null, null); + } + + /** + *

    Build a TableIterator able to read rows and columns of the given ResultSet.

    + * + *

    + * In order to provide the metadata through {@link #getMetadata()}, this constructor is trying to guess the datatype + * from the DBMS column datatype (using {@link #convertType(int, String, String)}). + *

    + * + *

    Type guessing

    + * + *

    + * In order to guess a TAP type from a DBMS type, this constructor will call {@link #convertType(int, String, String)} + * which deals with the most common standard datatypes known in Postgres, SQLite, MySQL, Oracle and JavaDB/Derby. + * This conversion is therefore not as precise as the one expected by a translator. That's why it is recommended + * to use one of the constructor having a {@link JDBCTranslator} in parameter. + *

    + * + *

    Important: + * The second parameter of this constructor is given as second parameter of {@link #convertType(int, String, String)}. + * This parameter is really used ONLY when the DBMS is SQLite ("sqlite"). + * Indeed, SQLite has so many datatype restrictions that it is absolutely needed to know it is the DBMS from which the + * ResultSet is coming. Without this information, type guessing will be unpredictable! + *

    + * + * @param dataSet Dataset over which this iterator must iterate. + * @param dbms Lower-case string which indicates from which DBMS the given ResultSet is coming. note: MAY be NULL. + * + * @throws NullPointerException If NULL is given in parameter. + * @throws DataReadException If the given ResultSet is closed or if the metadata (columns count and types) can not be fetched. + * + * @see #convertType(int, String, String) + * @see ResultSetTableIterator#ResultSetTableIterator(ResultSet, JDBCTranslator, String, DBColumn[]) + */ + public ResultSetTableIterator(final ResultSet dataSet, final String dbms) throws NullPointerException, DataReadException{ + this(dataSet, null, dbms, null); + } + + /** + *

    Build a TableIterator able to read rows and columns of the given ResultSet.

    + * + *

    + * In order to provide the metadata through {@link #getMetadata()}, this constructor is trying to guess the datatype + * from the DBMS column datatype (using {@link #convertType(int, String, String)}). + *

    + * + *

    Type guessing

    + * + *

    + * In order to guess a TAP type from a DBMS type, this constructor will call {@link #convertType(int, String, String)} + * which will ask to the given translator ({@link JDBCTranslator#convertTypeFromDB(int, String, String, String[])}) + * if not NULL. However if no translator is provided, this function will proceed to a default conversion + * using the most common standard datatypes known in Postgres, SQLite, MySQL, Oracle and JavaDB/Derby. + * This conversion is therefore not as precise as the one expected by the translator. + *

    + * + * @param dataSet Dataset over which this iterator must iterate. + * @param translator The {@link JDBCTranslator} used to transform the ADQL query into SQL query. This translator is also able to convert + * JDBC types and to parse geometrical values. note: MAY be NULL + * + * @throws NullPointerException If NULL is given in parameter. + * @throws DataReadException If the given ResultSet is closed or if the metadata (columns count and types) can not be fetched. + * + * @see #convertType(int, String, String) + * @see ResultSetTableIterator#ResultSetTableIterator(ResultSet, JDBCTranslator, String, DBColumn[]) + */ + public ResultSetTableIterator(final ResultSet dataSet, final JDBCTranslator translator) throws NullPointerException, DataReadException{ + this(dataSet, translator, null, null); + } + + /** + *

    Build a TableIterator able to read rows and columns of the given ResultSet.

    + * + *

    + * In order to provide the metadata through {@link #getMetadata()}, this constructor is trying to guess the datatype + * from the DBMS column datatype (using {@link #convertType(int, String, String)}). + *

    + * + *

    Type guessing

    + * + *

    + * In order to guess a TAP type from a DBMS type, this constructor will call {@link #convertType(int, String, String)} + * which will ask to the given translator ({@link JDBCTranslator#convertTypeFromDB(int, String, String, String[])}) + * if not NULL. However if no translator is provided, this function will proceed to a default conversion + * using the most common standard datatypes known in Postgres, SQLite, MySQL, Oracle and JavaDB/Derby. + * This conversion is therefore not as precise as the one expected by the translator. + *

    + * + *

    Important: + * The third parameter of this constructor is given as second parameter of {@link #convertType(int, String, String)}. + * This parameter is really used ONLY when the translator conversion failed and when the DBMS is SQLite ("sqlite"). + * Indeed, SQLite has so many datatype restrictions that it is absolutely needed to know it is the DBMS from which the + * ResultSet is coming. Without this information, type guessing will be unpredictable! + *

    + * + * @param dataSet Dataset over which this iterator must iterate. + * @param translator The {@link JDBCTranslator} used to transform the ADQL query into SQL query. This translator is also able to convert + * JDBC types and to parse geometrical values. note: MAY be NULL + * @param dbms Lower-case string which indicates from which DBMS the given ResultSet is coming. note: MAY be NULL. + * + * @throws NullPointerException If NULL is given in parameter. + * @throws DataReadException If the given ResultSet is closed or if the metadata (columns count and types) can not be fetched. + * + * @see #convertType(int, String, String) + * @see ResultSetTableIterator#ResultSetTableIterator(ResultSet, JDBCTranslator, String, DBColumn[]) + */ + public ResultSetTableIterator(final ResultSet dataSet, final JDBCTranslator translator, final String dbms) throws NullPointerException, DataReadException{ + this(dataSet, translator, dbms, null); + } + + /** + *

    Build a TableIterator able to read rows and columns of the given ResultSet.

    + * + *

    + * In order to provide the metadata through {@link #getMetadata()}, this constructor is reading first the given metadata (if any), + * and then, try to guess the datatype from the DBMS column datatype (using {@link #convertType(int, String, String)}). + *

    + * + *

    Provided metadata

    + * + *

    The third parameter of this constructor aims to provide the metadata expected for each column of the ResultSet.

    + * + *

    + * For that, it is expected that all these metadata are {@link TAPColumn} objects. Indeed, simple {@link DBColumn} + * instances do not have the type information. If just {@link DBColumn}s are provided, the ADQL name it provides will be kept + * but the type will be guessed from the type provide by the ResultSetMetadata. + *

    + * + *

    Note: + * If this parameter is incomplete (array length less than the column count returned by the ResultSet or some array items are NULL), + * column metadata will be associated in the same order as the ResultSet columns. Missing metadata will be built from the + * {@link ResultSetMetaData} and so the types will be guessed. + *

    + * + *

    Type guessing

    + * + *

    + * In order to guess a TAP type from a DBMS type, this constructor will call {@link #convertType(int, String, String)} + * which will ask to the given translator ({@link JDBCTranslator#convertTypeFromDB(int, String, String, String[])}) + * if not NULL. However if no translator is provided, this function will proceed to a default conversion + * using the most common standard datatypes known in Postgres, SQLite, MySQL, Oracle and JavaDB/Derby. + * This conversion is therefore not as precise as the one expected by the translator. + *

    + * + *

    Important: + * The third parameter of this constructor is given as second parameter of {@link #convertType(int, String, String)}. + * This parameter is really used ONLY when the translator conversion failed and when the DBMS is SQLite ("sqlite"). + * Indeed, SQLite has so many datatype restrictions that it is absolutely needed to know it is the DBMS from which the + * ResultSet is coming. Without this information, type guessing will be unpredictable! + *

    + * + * @param dataSet Dataset over which this iterator must iterate. + * @param translator The {@link JDBCTranslator} used to transform the ADQL query into SQL query. This translator is also able to convert + * JDBC types and to parse geometrical values. note: MAY be NULL + * @param dbms Lower-case string which indicates from which DBMS the given ResultSet is coming. note: MAY be NULL. + * @param resultMeta List of expected columns. note: these metadata are expected to be really {@link TAPColumn} objects ; MAY be NULL. + * + * @throws NullPointerException If NULL is given in parameter. + * @throws DataReadException If the metadata (columns count and types) can not be fetched. + * + * @see #convertType(int, String, String) + */ + public ResultSetTableIterator(final ResultSet dataSet, final JDBCTranslator translator, final String dbms, final DBColumn[] resultMeta) throws NullPointerException, DataReadException{ + // A dataset MUST BE provided: + if (dataSet == null) + throw new NullPointerException("Missing ResultSet object over which to iterate!"); + + // Keep a reference to the ResultSet: + data = dataSet; + + // Set the translator to use (if needed): + this.translator = translator; + + // Count columns and determine their type: + try{ + // get the metadata: + ResultSetMetaData metadata = data.getMetaData(); + // count columns: + nbColumns = metadata.getColumnCount(); + // determine their type: + colMeta = new TAPColumn[nbColumns]; + for(int i = 1; i <= nbColumns; i++){ + if (resultMeta != null && (i - 1) < resultMeta.length && resultMeta[i - 1] != null){ + try{ + colMeta[i - 1] = (TAPColumn)resultMeta[i - 1]; + }catch(ClassCastException cce){ + DBType datatype = convertType(metadata.getColumnType(i), metadata.getColumnTypeName(i), dbms); + colMeta[i - 1] = new TAPColumn(resultMeta[i - 1].getADQLName(), datatype); + } + }else{ + DBType datatype = convertType(metadata.getColumnType(i), metadata.getColumnTypeName(i), dbms); + colMeta[i - 1] = new TAPColumn(metadata.getColumnLabel(i), datatype); + } + } + }catch(SQLException se){ + throw new DataReadException("Can not get the column types of the given ResultSet!", se); + } + } + + @Override + public void close() throws DataReadException{ + try{ + data.close(); + }catch(SQLException se){ + throw new DataReadException("Can not close the iterated ResultSet!", se); + } + } + + @Override + public TAPColumn[] getMetadata(){ + return colMeta; + } + + @Override + public boolean nextRow() throws DataReadException{ + try{ + // go to the next row: + boolean rowFetched = data.next(); + endReached = !rowFetched; + // prepare the iteration over its columns: + colIndex = 0; + iterationStarted = true; + return rowFetched; + }catch(SQLException e){ + throw new DataReadException("Unable to read a result set row!", e); + } + } + + /** + *

    Check the row iteration state. That's to say whether:

    + *
      + *
    • the row iteration has started = the first row has been read = a first call of {@link #nextRow()} has been done
    • + *
    • AND the row iteration is not finished = the last row has been read.
    • + *
    + * + * @throws IllegalStateException + */ + private void checkReadState() throws IllegalStateException{ + if (!iterationStarted) + throw new IllegalStateException("No row has yet been read!"); + else if (endReached) + throw new IllegalStateException("End of ResultSet already reached!"); + } + + @Override + public boolean hasNextCol() throws IllegalStateException, DataReadException{ + // Check the read state: + checkReadState(); + + // Determine whether the last column has been reached or not: + return (colIndex < nbColumns); + } + + @Override + public Object nextCol() throws NoSuchElementException, IllegalStateException, DataReadException{ + // Check the read state and ensure there is still at least one column to read: + if (!hasNextCol()) + throw new NoSuchElementException("No more column to read!"); + + // Get the column value: + try{ + Object o = data.getObject(++colIndex); + if (o != null){ + DBType colType = getColType(); + // if the column value is a Timestamp object, format it in ISO8601: + if (o instanceof Timestamp) + o = ISO8601Format.format(((Timestamp)o).getTime()); + // if the column value is a geometrical object, it must be serialized in STC-S: + else if (translator != null && colType.isGeometry()){ + Region region = translator.translateGeometryFromDB(o); + if (region != null) + o = region.toSTCS(); + } + } + return o; + }catch(SQLException se){ + throw new DataReadException("Can not read the value of the " + colIndex + "-th column!", se); + }catch(ParseException pe){ + throw new DataReadException(pe.getMessage()); + } + } + + @Override + public DBType getColType() throws IllegalStateException, DataReadException{ + // Basically check the read state (for rows iteration): + checkReadState(); + + // Check deeper the read state (for columns iteration): + if (colIndex <= 0) + throw new IllegalStateException("No column has yet been read!"); + else if (colIndex > nbColumns) + throw new IllegalStateException("All columns have already been read!"); + + // Return the column type: + return colMeta[colIndex - 1].getDatatype(); + } + + /** + *

    Convert the given DBMS type into the corresponding {@link DBType} instance.

    + * + *

    + * This function first tries the conversion using the translator ({@link JDBCTranslator#convertTypeFromDB(int, String, String, String[])}). + * If the translator fails, a default conversion is done. + *

    + * + *

    Warning: + * It is not recommended to rely on the default conversion. + * This conversion is just a matter of guessing the better matching {@link DBType} + * considering the types of the following DBMS: PostgreSQL, SQLite, MySQL, Oracle and Java/DB/Derby. + *

    + * + * @param dbmsType DBMS column data-type name. + * @param dbms Lower-case string which indicates which DBMS the ResultSet is coming from. note: MAY be NULL. + * + * @return The best suited {@link DBType} object. + * + * @see JDBCTranslator#convertTypeFromDB(int, String, String, String[]) + * @see #defaultTypeConversion(String, String[], String) + */ + protected DBType convertType(final int dbmsType, String dbmsTypeName, final String dbms) throws DataReadException{ + // If no type is provided return VARCHAR: + if (dbmsTypeName == null || dbmsTypeName.trim().length() == 0) + return new DBType(DBDatatype.VARCHAR, DBType.NO_LENGTH); + + // Extract the type prefix and lower-case it: + int startParamIndex = dbmsTypeName.indexOf('('), endParamIndex = dbmsTypeName.indexOf(')'); + String dbmsTypePrefix = (startParamIndex <= 0) ? dbmsTypeName : dbmsTypeName.substring(0, endParamIndex); + dbmsTypePrefix = dbmsTypePrefix.trim().toLowerCase(); + String[] typeParams = (startParamIndex <= 0) ? null : dbmsTypeName.substring(startParamIndex + 1, endParamIndex).split(","); + + // Ask first to the translator: + DBType dbType = null; + if (translator != null) + dbType = translator.convertTypeFromDB(dbmsType, dbmsTypeName, dbmsTypePrefix, typeParams); + + // And if unsuccessful, apply a default conversion: + if (dbType == null) + dbType = defaultTypeConversion(dbmsTypePrefix, typeParams, dbms); + + return dbType; + } + + /** + *

    Convert the given DBMS type into the better matching {@link DBType} instance. + * This function is used to guess the TAP type of a column when it is not provided in the constructor. + * It aims not to be exhaustive, but just to provide a type when the given TAP metadata are incomplete.

    + * + *

    Note: + * Any unknown DBMS data-type will be considered and translated as a VARCHAR. + * This latter will be also returned if the given parameter is an empty string or NULL. + *

    + * + *

    Note: + * This type conversion function has been designed to work with all standard data-types of the following DBMS: + * PostgreSQL, SQLite, MySQL, Oracle and JavaDB/Derby. + *

    + * + *

    Important: + * The third parameter is REALLY NEEDED when the DBMS is SQLite ("sqlite")! + * Indeed, SQLite has a so restrictive list of data-types that this function can reliably convert + * only if it knows the DBMS is SQLite. Otherwise, the conversion result would be unpredictable. + * In this default implementation of this function, all other DBMS values are ignored. + *

    + * + *

    Warning: + * This function is not translating the geometrical data-types. If a such data-type is encountered, + * it will considered as unknown and so, a VARCHAR TAP type will be returned. + *

    + * + * @param dbmsTypeName Name of type, without the eventual parameters. + * @param params The eventual type parameters (e.g. char string length). + * @param dbms The targeted DBMS. + * + * @return The corresponding ADQL/TAP type. NEVER NULL + */ + protected final DBType defaultTypeConversion(final String dbmsTypeName, final String[] params, final String dbms){ + // Get the length parameter (always in first position): + int lengthParam = DBType.NO_LENGTH; + if (params != null && params.length > 0){ + try{ + lengthParam = Integer.parseInt(params[0]); + }catch(NumberFormatException nfe){} + } + + // CASE: SQLITE + if (dbms != null && dbms.equals("sqlite")){ + // INTEGER -> SMALLINT, INTEGER, BIGINT + if (dbmsTypeName.equals("integer")) + return new DBType(DBDatatype.BIGINT); + // REAL -> REAL, DOUBLE + else if (dbmsTypeName.equals("real")) + return new DBType(DBDatatype.DOUBLE); + // TEXT -> CHAR, VARCHAR, CLOB, TIMESTAMP + else if (dbmsTypeName.equals("text")) + return new DBType(DBDatatype.VARCHAR); + // BLOB -> BINARY, VARBINARY, BLOB + else if (dbmsTypeName.equals("blob")) + return new DBType(DBDatatype.BLOB); + // Default: + else + return new DBType(DBDatatype.VARCHAR, DBType.NO_LENGTH); + } + // CASE: OTHER DBMS + else{ + // SMALLINT + if (dbmsTypeName.equals("smallint") || dbmsTypeName.equals("int2") || dbmsTypeName.equals("smallserial") || dbmsTypeName.equals("serial2") || dbmsTypeName.equals("boolean") || dbmsTypeName.equals("bool")) + return new DBType(DBDatatype.SMALLINT); + // INTEGER + else if (dbmsTypeName.equals("integer") || dbmsTypeName.equals("int") || dbmsTypeName.equals("int4") || dbmsTypeName.equals("serial") || dbmsTypeName.equals("serial4")) + return new DBType(DBDatatype.INTEGER); + // BIGINT + else if (dbmsTypeName.equals("bigint") || dbmsTypeName.equals("int8") || dbmsTypeName.equals("bigserial") || dbmsTypeName.equals("bigserial8") || dbmsTypeName.equals("number")) + return new DBType(DBDatatype.BIGINT); + // REAL + else if (dbmsTypeName.equals("real") || dbmsTypeName.equals("float4") || (dbmsTypeName.equals("float") && lengthParam <= 63)) + return new DBType(DBDatatype.REAL); + // DOUBLE + else if (dbmsTypeName.equals("double") || dbmsTypeName.equals("double precision") || dbmsTypeName.equals("float8") || (dbmsTypeName.equals("float") && lengthParam > 63)) + return new DBType(DBDatatype.DOUBLE); + // BINARY + else if (dbmsTypeName.equals("bit") || dbmsTypeName.equals("binary") || dbmsTypeName.equals("raw") || ((dbmsTypeName.equals("char") || dbmsTypeName.equals("character")) && dbmsTypeName.endsWith(" for bit data"))) + return new DBType(DBDatatype.BINARY, lengthParam); + // VARBINARY + else if (dbmsTypeName.equals("bit varying") || dbmsTypeName.equals("varbit") || dbmsTypeName.equals("varbinary") || dbmsTypeName.equals("long raw") || ((dbmsTypeName.equals("varchar") || dbmsTypeName.equals("character varying")) && dbmsTypeName.endsWith(" for bit data"))) + return new DBType(DBDatatype.VARBINARY, lengthParam); + // CHAR + else if (dbmsTypeName.equals("char") || dbmsTypeName.equals("character")) + return new DBType(DBDatatype.CHAR, lengthParam); + // VARCHAR + else if (dbmsTypeName.equals("varchar") || dbmsTypeName.equals("varchar2") || dbmsTypeName.equals("character varying")) + return new DBType(DBDatatype.VARCHAR, lengthParam); + // BLOB + else if (dbmsTypeName.equals("bytea") || dbmsTypeName.equals("blob") || dbmsTypeName.equals("binary large object")) + return new DBType(DBDatatype.BLOB); + // CLOB + else if (dbmsTypeName.equals("text") || dbmsTypeName.equals("clob") || dbmsTypeName.equals("character large object")) + return new DBType(DBDatatype.CLOB); + // TIMESTAMP + else if (dbmsTypeName.equals("timestamp") || dbmsTypeName.equals("timestamptz") || dbmsTypeName.equals("time") || dbmsTypeName.equals("timetz") || dbmsTypeName.equals("date")) + return new DBType(DBDatatype.TIMESTAMP); + // Default: + else + return new DBType(DBDatatype.VARCHAR, DBType.NO_LENGTH); + } + } + +} diff --git a/src/tap/data/TableIterator.java b/src/tap/data/TableIterator.java new file mode 100644 index 0000000000000000000000000000000000000000..cdc16de18754ddca928626b0bc4946e4496d98a8 --- /dev/null +++ b/src/tap/data/TableIterator.java @@ -0,0 +1,138 @@ +package tap.data; + +/* + * This file is part of TAPLibrary. + * + * TAPLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * TAPLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with TAPLibrary. If not, see . + * + * Copyright 2014 - Astronomisches Rechen Institut (ARI) + */ + +import java.util.NoSuchElementException; + +import tap.metadata.TAPColumn; +import adql.db.DBType; + +/** + *

    Let's iterate on each row and then on each column over a table dataset.

    + * + *

    Initially, no rows are loaded and the "cursor" inside the dataset is set before the first row. + * Thus, a first call to {@link #nextRow()} is required to read each of the column values of the first row.

    + * + *

    Example of an expected usage:

    + *
    + * 	TableIterator it = ...;
    + * 	try{
    + * 		while(it.nextRow()){
    + * 			while(it.hasNextCol()){
    + * 				Object colValue = it.nextCol();
    + * 				String colType = it.getColType();
    + * 				...
    + * 			}
    + * 		}
    + * 	}catch(DataReadException dre){
    + * 		...
    + * 	}finally{
    + * 		try{
    + * 			it.close();
    + * 		}catch(DataReadException dre){ ... }
    + * 	}
    + * 
    + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (12/2014) + * @since 2.0 + */ +public interface TableIterator { + /** + *

    Get all the metadata column that have been successfully extracted at the creation of this iterator.

    + * + *

    Important: This function should be callable at any moment from the creation of the iterator until the end of the table dataset has been reached.

    + * + *

    Note: This function MAY BE NOT IMPLEMENTED or the metadata can not be fetched. In this case, NULL will be returned.

    + * + *

    Warning: If the metadata part of the original document is corrupted (i.e. false number of columns), + * the column type information should be fetched thanks to {@link #getColType()} while iterating over rows and columns.

    + * + * @return An array of {@link TAPColumn} objects (each for a column of any row), + * or NULL if this function is not implemented OR if it was not possible to get these metadata. + * + * @see #getColType() + */ + public TAPColumn[] getMetadata() throws DataReadException; + + /** + *

    Go to the next row if there is one.

    + * + *

    Note: After a call to this function the columns must be fetched individually using {@link #nextCol()} + * IF this function returned true.

    + * + * @return true if the next row has been successfully reached, + * false if no more rows can be read. + * + * @throws DataReadException If an error occurs while reading the table dataset. + */ + public boolean nextRow() throws DataReadException; + + /** + * Tell whether another column is available. + * + * @return true if {@link #nextCol()} will return the value of the next column with no error, + * false otherwise. + * + * @throws IllegalStateException If {@link #nextRow()} has not yet been called. + * @throws DataReadException If an error occurs while reading the table dataset. + */ + public boolean hasNextCol() throws IllegalStateException, DataReadException; + + /** + *

    Return the value of the next column.

    + * + *

    Note: The column type can be fetched using {@link #getColType()} after a call to {@link #nextCol()}.

    + * + * @return Get the value of the next column. + * + * @throws NoSuchElementException If no more column value is available. + * @throws IllegalStateException If {@link #nextRow()} has not yet been called. + * @throws DataReadException If an error occurs while reading the table dataset. + */ + public Object nextCol() throws NoSuchElementException, IllegalStateException, DataReadException; + + /** + *

    Get the type of the current column value.

    + * + *

    Note 1: "Current column value" means here "the value last returned by {@link #nextCol()}".

    + * + *

    Note 2: This function MAY BE NOT IMPLEMENTED or the type information can not be fetched. If this is the case, NULL will be returned.

    + * + *

    Warning: In some cases, the metadata part of the original document does not match with the data + * it should have represented. In such case, the types returned here and by {@link #getMetadata()} would be different. + * In case of such mismatch, the type returned by {@link #getColType()} should be considered as more correct/accurate.

    + * + * @return Type of the current column value, + * or NULL if this information is not available or if this function is not implemented. + * + * @throws IllegalStateException If {@link #nextCol()} has not yet been called. + * @throws DataReadException If an error occurs while reading the table dataset. + */ + public DBType getColType() throws IllegalStateException, DataReadException; + + /** + * Close the stream or input over which this class iterates. + * + * @throws DataReadException If any error occurs while closing it. + */ + public void close() throws DataReadException; + +} diff --git a/src/tap/data/VOTableIterator.java b/src/tap/data/VOTableIterator.java new file mode 100644 index 0000000000000000000000000000000000000000..95d2d5611e2c3ac679cb2b355100ed0ed33b37df --- /dev/null +++ b/src/tap/data/VOTableIterator.java @@ -0,0 +1,484 @@ +package tap.data; + +/* + * This file is part of TAPLibrary. + * + * TAPLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * TAPLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with TAPLibrary. If not, see . + * + * Copyright 2015 - Astronomisches Rechen Institut (ARI) + */ + +import java.io.IOException; +import java.io.InputStream; +import java.util.NoSuchElementException; + +import tap.TAPException; +import tap.metadata.TAPColumn; +import tap.metadata.VotType; +import tap.metadata.VotType.VotDatatype; +import uk.ac.starlink.table.ColumnInfo; +import uk.ac.starlink.table.DescribedValue; +import uk.ac.starlink.table.StarTable; +import uk.ac.starlink.table.StarTableFactory; +import uk.ac.starlink.table.TableBuilder; +import uk.ac.starlink.table.TableFormatException; +import uk.ac.starlink.table.TableSink; +import adql.db.DBType; + +/** + *

    {@link TableIterator} which lets iterate over a VOTable input stream using STIL.

    + * + *

    {@link #getColType()} will return TAP type based on the type declared in the VOTable metadata part.

    + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (04/2015) + * @since 2.0 + */ +public class VOTableIterator implements TableIterator { + + /** Message of the IOException sent when the streaming is aborted. */ + protected static final String STREAM_ABORTED_MESSAGE = "Streaming aborted!"; + + /** + *

    This class lets consume the metadata and rows of a VOTable document.

    + * + *

    + * On the contrary to a usual TableSink, this one will stop after each row until this row has been fetched by {@link VOTableIterator}. + *

    + * + *

    + * Besides, the metadata returned by StarTable are immediately converted into TAP metadata. If this conversion fails, the error is kept + * in metaError, so that the VOTable reading can continue if the fact that metadata are missing is not a problem for the class using the + * {@link VOTableIterator}. + *

    + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (04/2015) + * @since 2.0 + */ + protected static class StreamVOTableSink implements TableSink { + + /**

    The accepted VOTable metadata, after conversion from StarTable metadata.

    + *

    Note: this may be NULL after the metadata has been read if an error occurred while performing the conversion. + * In this case, metaError contains this error. */ + private TAPColumn[] meta = null; + + /** The error which happened while converting the StarTable metadata into TAP metadata. */ + private DataReadException metaError = null; + + /** The last accepted row. */ + private Object[] pendingRow = null; + + /** Flag meaning that the end of the stream has been reached + * OR if the VOTable reading should be stopped before reading more rows. */ + private boolean endReached = false; + + /** + *

    Stop nicely reading the VOTable.

    + * + *

    + * An exception will be thrown to the STILTS class using this TableSink, + * but no exception should be thrown to VOTableIterator. + *

    + */ + public synchronized void stop(){ + endReached = true; + notifyAll(); + } + + @Override + public synchronized void acceptMetadata(final StarTable metaTable) throws TableFormatException{ + try{ + // Convert the StartTable metadata into TAP metadata: + meta = extractColMeta(metaTable); + + }catch(DataReadException dre){ + // Save the error ; this error will be throw when a call to getMetadata() will be done: + metaError = dre; + + }finally{ + // Free all waiting threads: + notifyAll(); + } + } + + @Override + public synchronized void acceptRow(final Object[] row) throws IOException{ + try{ + // Wait until the last accepted row has been consumed: + while(!endReached && pendingRow != null) + wait(); + + /* If the end has been reached, this is not normal + * (because endRows() is always called after acceptRow()...so, it means the iteration has been aborted before the end) + * and so the stream reading should be interrupted: */ + if (endReached) + throw new IOException(STREAM_ABORTED_MESSAGE); + + // Otherwise, keep the given row: + pendingRow = row; + + /* Security for the cases where a row to accept is NULL. + * In such case, pendingRow will be set to NULL and the function getRow() will wait for ever. + * This case is not supposed to happen because the caller of acceptRow(...) should not give a NULL row... + * ...which should then mean that the end of the stream has been reached. */ + if (pendingRow == null) + endReached = true; + + }catch(InterruptedException ie){ + /* If the thread has been interrupted, set this TableSink in a state similar to + * when the end of the stream has been reached: */ + pendingRow = null; + endReached = true; + + }finally{ + // In all cases, all waiting threads must be freed: + notifyAll(); + } + } + + @Override + public synchronized void endRows() throws IOException{ + try{ + // Wait until the last accepted row has been consumed: + while(!endReached && pendingRow != null) + wait(); + }catch(InterruptedException ie){ + /* Nothing to do in particular ; the end of the stream will be set anyway. */ + }finally{ + // No more rows are available: + pendingRow = null; + // Set the END flag: + endReached = true; + // Notify all waiting threads that the end has been reached: + notifyAll(); + } + } + + /** + *

    Get the metadata found in the VOTable.

    + * + *

    Note: + * This method is blocking until metadata are fully available by this TableSink + * or if an error occurred while converting them in TAP metadata. + * A Thread interruption will also make this function returning. + *

    + * + * @return The metadata found in the VOTable header. + * + * @throws DataReadException If the metadata can not be interpreted correctly. + */ + public synchronized TAPColumn[] getMeta() throws DataReadException{ + try{ + // Wait until metadata are available, or if an error has occurred while accepting them: + while(metaError == null && meta == null) + wait(); + + // If there was an error while interpreting the accepted metadata, throw it: + if (metaError != null) + throw metaError; + + // Otherwise, just return the metadata: + return meta; + + }catch(InterruptedException ie){ + /* If the thread has been interrupted, set this TableSink in a state similar to + * when the end of the stream has been reached: */ + endReached = true; + /* Return the metadata ; + * NULL will be returned if the interruption has occurred before the real reading of the VOTable metadata: */ + return meta; + + }finally{ + // In all cases, the waiting threads must be freed: + notifyAll(); + } + } + + /** + *

    Get the last accepted row.

    + * + *

    Note: + * This function is blocking until a row has been accepted or the end of the stream has been reached. + * A Thread interruption will also make this function returning. + *

    + * + * @return The last accepted row, + * or NULL if no more rows are available or if the iteration has been interrupted/canceled. + */ + public synchronized Object[] getRow(){ + try{ + // Wait until a row has been accepted or the end has been reached: + while(!endReached && pendingRow == null) + wait(); + + // If there is no more rows, just return NULL (meaning for the called "end of stream"): + if (endReached && pendingRow == null) + return null; + + /* Otherwise, reset pendingRow to NULL in order to enable the reading of the next row, + * and finally return the last accepted row: */ + Object[] row = pendingRow; + pendingRow = null; + return row; + + }catch(InterruptedException ie){ + /* If the thread has been interrupted, set this TableSink in a state similar to + * when the end of the stream has been reached: */ + endReached = true; + // Return NULL, meaning the end of the stream has been reached: + return null; + + }finally{ + // In all cases, the waiting threads must be freed: + notifyAll(); + } + } + + /** + * Extract an array of {@link TAPColumn} objects. Each corresponds to one of the columns listed in the given table, + * and so corresponds to the metadata of a column. + * + * @param table {@link StarTable} which contains only the columns' information. + * + * @return The corresponding list of {@link TAPColumn} objects. + * + * @throws DataReadException If there is a problem while resolving the field datatype (for instance: unknown datatype, a multi-dimensional array is provided, a bad number format for the arraysize). + */ + protected TAPColumn[] extractColMeta(final StarTable table) throws DataReadException{ + // Count the number columns and initialize the array: + TAPColumn[] columns = new TAPColumn[table.getColumnCount()]; + + // Add all columns meta: + for(int i = 0; i < columns.length; i++){ + // get the field: + ColumnInfo colInfo = table.getColumnInfo(i); + + // get the datatype: + String datatype = getAuxDatumValue(colInfo, "Datatype"); + + // get the arraysize: + String arraysize = ColumnInfo.formatShape(colInfo.getShape()); + + // get the xtype: + String xtype = getAuxDatumValue(colInfo, "xtype"); + + // Resolve the field type: + DBType type; + try{ + type = resolveVotType(datatype, arraysize, xtype).toTAPType(); + }catch(TAPException te){ + if (te instanceof DataReadException) + throw (DataReadException)te; + else + throw new DataReadException(te.getMessage(), te); + } + + // build the TAPColumn object: + TAPColumn col = new TAPColumn(colInfo.getName(), type, colInfo.getDescription(), colInfo.getUnitString(), colInfo.getUCD(), colInfo.getUtype()); + col.setPrincipal(false); + col.setIndexed(false); + col.setStd(false); + + // append it to the array: + columns[i] = col; + } + + return columns; + } + + /** + * Extract the specified auxiliary datum value from the given {@link ColumnInfo}. + * + * @param colInfo {@link ColumnInfo} from which the auxiliary datum must be extracted. + * @param auxDatumName The name of the datum to extract. + * + * @return The extracted value as String. + */ + protected String getAuxDatumValue(final ColumnInfo colInfo, final String auxDatumName){ + DescribedValue value = colInfo.getAuxDatumByName(auxDatumName); + return (value != null) ? value.getValue().toString() : null; + } + + } + + /** Stream containing the VOTable on which this {@link TableIterator} is iterating. */ + protected final InputStream input; + /** The StarTable consumer which is used to iterate on each row. */ + protected final StreamVOTableSink sink; + + /** Indicate whether the row iteration has already started. */ + protected boolean iterationStarted = false; + /** Indicate whether the last row has already been reached. */ + protected boolean endReached = false; + + /** The last read row. Column iteration is done on this array. */ + protected Object[] row; + /** Index of the last read column (=0 just after {@link #nextRow()} and before {@link #nextCol()}, ={@link #nbCol} after the last column has been read). */ + protected int indCol = -1; + /** Number of columns available according to the metadata. */ + protected int nbCol = 0; + + /** + * Build a TableIterator able to read rows and columns inside the given VOTable input stream. + * + * @param input Input stream over a VOTable document. + * + * @throws NullPointerException If NULL is given in parameter. + * @throws DataReadException If the given VOTable can not be parsed. + */ + public VOTableIterator(final InputStream input) throws DataReadException{ + // An input stream MUST BE provided: + if (input == null) + throw new NullPointerException("Missing VOTable document input stream over which to iterate!"); + this.input = input; + + try{ + + // Set the VOTable builder/interpreter: + final TableBuilder tb = (new StarTableFactory()).getTableBuilder("votable"); + + // Build the TableSink to use: + sink = new StreamVOTableSink(); + + // Initiate the stream process: + Thread streamThread = new Thread(){ + @Override + public void run(){ + try{ + tb.streamStarTable(input, sink, null); + }catch(IOException e){ + if (e.getMessage() != null && !e.getMessage().equals(STREAM_ABORTED_MESSAGE)) + e.printStackTrace(); + } + } + }; + streamThread.start(); + + }catch(Exception ex){ + throw new DataReadException("Unable to parse/read the given VOTable input stream!", ex); + } + } + + @Override + public TAPColumn[] getMetadata() throws DataReadException{ + return sink.getMeta(); + } + + @Override + public boolean nextRow() throws DataReadException{ + // If no more rows, return false directly: + if (endReached) + return false; + + // Fetch the row: + row = sink.getRow(); + + // Reset the column iteration: + if (!iterationStarted){ + iterationStarted = true; + nbCol = sink.getMeta().length; + } + indCol = 0; + + // Tells whether there is more rows or not: + endReached = (row == null); + return !endReached; + } + + @Override + public boolean hasNextCol() throws IllegalStateException, DataReadException{ + // Check the read state: + checkReadState(); + + // Determine whether the last column has been reached or not: + return (indCol < nbCol); + } + + @Override + public Object nextCol() throws NoSuchElementException, IllegalStateException, DataReadException{ + // Check the read state and ensure there is still at least one column to read: + if (!hasNextCol()) + throw new NoSuchElementException("No more field to read!"); + + // Get the column value: + return row[indCol++]; + } + + @Override + public DBType getColType() throws IllegalStateException, DataReadException{ + // Basically check the read state (for rows iteration): + checkReadState(); + + // Check deeper the read state (for columns iteration): + if (indCol <= 0) + throw new IllegalStateException("No field has yet been read!"); + else if (indCol > nbCol) + throw new IllegalStateException("All fields have already been read!"); + + // Return the column type: + return sink.getMeta()[indCol - 1].getDatatype(); + } + + @Override + public void close() throws DataReadException{ + endReached = true; + sink.stop(); + // input.close(); // in case sink.stop() is not enough to stop the VOTable reading! + } + + /** + *

    Check the row iteration state. That's to say whether:

    + *
      + *
    • the row iteration has started = the first row has been read = a first call of {@link #nextRow()} has been done
    • + *
    • AND the row iteration is not finished = the last row has been read.
    • + *
    + * @throws IllegalStateException + */ + protected void checkReadState() throws IllegalStateException{ + if (!iterationStarted) + throw new IllegalStateException("No row has yet been read!"); + else if (endReached) + throw new IllegalStateException("End of VOTable file already reached!"); + } + + /** + * Resolve a VOTable field type by using the datatype, arraysize and xtype strings as specified in a VOTable document. + * + * @param datatype Attribute value of VOTable corresponding to the datatype. + * @param arraysize Attribute value of VOTable corresponding to the arraysize. + * @param xtype Attribute value of VOTable corresponding to the xtype. + * + * @return The resolved VOTable field type, or a CHAR(*) type if the specified type can not be resolved. + * + * @throws DataReadException If a field datatype is unknown. + */ + public static VotType resolveVotType(final String datatype, final String arraysize, final String xtype) throws DataReadException{ + // If no datatype is specified, return immediately a CHAR(*) type: + if (datatype == null || datatype.trim().length() == 0) + return new VotType(VotDatatype.CHAR, "*"); + + // Identify the specified datatype: + VotDatatype votdatatype; + try{ + votdatatype = VotDatatype.valueOf(datatype.toUpperCase()); + }catch(IllegalArgumentException iae){ + throw new DataReadException("unknown field datatype: \"" + datatype + "\""); + } + + // Build the VOTable type: + return new VotType(votdatatype, arraysize, xtype); + } + +} diff --git a/src/tap/db/DBConnection.java b/src/tap/db/DBConnection.java index 695cf14ffde4c236e56b6c99157cb9b5a2aefee2..e836211115b3159bae26c789792821f3de27b892 100644 --- a/src/tap/db/DBConnection.java +++ b/src/tap/db/DBConnection.java @@ -16,47 +16,256 @@ package tap.db; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ -import cds.savot.model.SavotTR; +import tap.TAPFactory; +import tap.data.DataReadException; +import tap.data.TableIterator; +import tap.metadata.TAPColumn; +import tap.metadata.TAPMetadata; import tap.metadata.TAPTable; -import uws.service.log.UWSLogType; - import adql.query.ADQLQuery; /** - * TODO + *

    Connection to the "database" (whatever is the type or whether it is linked to a true DBMS connection).

    + * + *

    It lets executing ADQL queries and updating the TAP datamodel (with the list of schemas, tables and columns published in TAP, + * or with uploaded tables).

    * - * @author Grégory Mantelet (CDS) - * @version 06/2012 + *

    IMPORTANT: + * This connection aims only to provide a common and known interface for any kind of database connection. + * A connection MUST be opened/created and closed/freed ONLY by the {@link TAPFactory}, which will usually merely wrap + * the real database connection by a {@link DBConnection} object. That's why this interface does not provide anymore + * a close() function. + *

    * - * @param Result type of the execution of a query (see {@link #executeQuery(String, ADQLQuery)}. + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (03/2015) */ -public interface DBConnection< R > { - - public final static UWSLogType LOG_TYPE_DB_ACTIVITY = UWSLogType.createCustomLogType("DBActivity"); +public interface DBConnection { + /** + *

    Get any identifier for this connection.

    + * + *

    note: it is used only for logging purpose.

    + * + * @return ID of this connection. + */ public String getID(); - public void startTransaction() throws DBException; - - public void cancelTransaction() throws DBException; - - public void endTransaction() throws DBException; - - public R executeQuery(final String sqlQuery, final ADQLQuery adqlQuery) throws DBException; - - public void createSchema(final String schemaName) throws DBException; + /** + *

    Fetch the whole content of TAP_SCHEMA.

    + * + *

    + * This function SHOULD be used only once: at the starting of the TAP service. It is an alternative way + * to get the published schemas, tables and columns. The other way is to build a {@link TAPMetadata} object + * yourself in function of the schemas/tables/columns you want to publish (i.e. which can be done by reading + * metadata from a XML document - following the same schema - XSD- as for the TAP resource tables) + * and then to load them in the DB (see {@link #setTAPSchema(TAPMetadata)} for more details). + *

    + * + *

    CAUTION: + * This function MUST NOT be used if the tables to publish or the standard TAP_SCHEMA tables have names in DB different from the + * ones defined by the TAP standard. So, if DB names are different from the ADQL names, you have to write yourself a way to get + * the metadata from the DB. + *

    + * + *

    Important note: + * If the schema or some standard tables or columns are missing, TAP_SCHEMA will be considered as incomplete + * and an exception will be thrown. + *

    + * + *

    Note: + * This function MUST be able to read the standard tables and columns described by the IVOA. All other tables/columns + * will be merely ignored. + *

    + * + * @return Content of TAP_SCHEMA inside the DB. + * + * @throws DBException If TAP_SCHEMA can not be found, is incomplete or if some important metadata can not be retrieved. + * + * @since 2.0 + */ + public TAPMetadata getTAPSchema() throws DBException; - public void dropSchema(final String schemaName) throws DBException; + /** + *

    Empty and then fill all the TAP_SCHEMA tables with the given list of metadata.

    + * + *

    + * This function SHOULD be used only once: at the starting of the TAP service, + * when metadata are loaded from a XML document (following the same schema - XSD- + * as for the TAP resource tables). + *

    + * + *

    + * THIS FUNCTION IS MANIPULATING THE SCHEMAS AND TABLES OF YOUR DATABASE. + * SO IT SHOULD HAVE A SPECIFIC BEHAVIOR DESCRIBED BELOW. + * SO PLEASE READ THE FOLLOWINGS AND TRY TO RESPECT IT AS MUCH AS POSSIBLE IN THE IMPLEMENTATIONS + *

    + * + *

    TAP_SCHEMA CREATION

    + *

    + * This function is MAY drop and then re-create the schema TAP_SCHEMA and all + * its tables listed in the TAP standard (TAP_SCHEMA.schemas, .tables, .columns, .keys and .key_columns). + * All other tables inside TAP_SCHEMA SHOULD NOT be modified! + *

    + * + *

    + * The schema and the tables MUST be created using either the standard definition or the + * definition provided in the {@link TAPMetadata} object (if provided). Indeed, if your definition of these TAP tables + * is different from the standard (the standard + new elements), you MUST provide your modifications in parameter + * through the {@link TAPMetadata} object so that they can be applied and taken into account in TAP_SCHEMA. + *

    + * + *

    Note: + * DB names provided in the given TAPMetadata (see {@link TAPTable#getDBSchemaName()}, {@link TAPTable#getDBName()} and {@link TAPColumn#getDBName()}) + * are used for the creation and filling of the tables. + * + * Whether these requests must be case sensitive or not SHOULD be managed by ADQLTranslator. + *

    + * + *

    TAPMetadata PARAMETER

    + *

    + * This object MUST contain all schemas, tables and columns that MUST be published. All its content will be + * used in order to fill the TAP_SCHEMA tables. + *

    + *

    + * Of course, TAP_SCHEMA and its tables MAY be provided in this object. But: + *

    + *
      + *
    • (a) if TAP_SCHEMA tables are NOT provided: + * this function SHOULD consider their definition as exactly the one provided by + * the TAP standard/protocol. If so, the standard definition MUST be automatically added + * into the {@link TAPMetadata} object AND into TAP_SCHEMA. + *
    • + *
    • (b) if TAP_SCHEMA tables ARE provided: + * the definition of all given elements will be taken into account while updating the TAP_SCHEMA. + * Each element definition not provided MUST be considered as exactly the same as the standard one + * and MUST be added into the {@link TAPMetadata} object AND into TAP_SCHEMA. + *
    • + *
    + * + *

    Note: By default, all implementations of this interface in the TAP library will fill only standard columns and tables of TAP_SCHEMA. + * To fill your own, you MUST implement yourself this interface or to extend an existing implementation.

    + * + *

    WARNING: + * (b) lets consider a TAP_SCHEMA different from the standard one. BUT, these differences MUST be only additions, + * NOT modifications or deletion of the standard definition! This function MUST be able to work AT LEAST on a + * standard definition of TAP_SCHEMA. + *

    + * + *

    FILLING BEHAVIOUR

    + *

    + * The TAP_SCHEMA tables SHOULD be completely emptied (in SQL: "DELETE FROM <table_name>;" or merely "DROP TABLE <table_name>") before insertions can be processed. + *

    + * + *

    ERRORS MANAGEMENT

    + *

    + * If any error occurs while executing any "DB" queries (in SQL: DROP, DELETE, INSERT, CREATE, ...), all queries executed + * before in this function MUST be canceled (in SQL: ROLLBACK). + *

    + * + * @param metadata List of all schemas, tables, columns and foreign keys to insert in the TAP_SCHEMA. + * + * @throws DBException If any error occurs while updating the database. + * + * @since 2.0 + */ + public void setTAPSchema(final TAPMetadata metadata) throws DBException; - public void createTable(final TAPTable table) throws DBException; + /** + * Add the defined and given table inside the TAP_UPLOAD schema. + * + *

    If the TAP_UPLOAD schema does not already exist, it will be created.

    + * + *

    note: A table of TAP_UPLOAD MUST be transient and persistent only for the lifetime of the query. + * So, this function should always be used with {@link #dropUploadedTable(TAPTable)}, which is called at + * the end of each query execution.

    + * + * @param tableDef Definition of the table to upload (list of all columns and of their type). + * @param data Rows and columns of the table to upload. + * + * @return true if the given table has been successfully added, false otherwise. + * + * @throws DBException If any error occurs while adding the table. + * @throws DataReadException If any error occurs while reading the given data (particularly if any limit - in byte or row - set in the TableIterator is reached). + * + * @since 2.0 + */ + public boolean addUploadedTable(final TAPTable tableDef, final TableIterator data) throws DBException, DataReadException; - public void insertRow(final SavotTR row, final TAPTable table) throws DBException; + /** + *

    Drop the specified uploaded table from the database. + * More precisely, it means dropping a table from the TAP_UPLOAD schema.

    + * + *

    Note: + * This function SHOULD drop only one table. So, if more than one table match in the "database" to the given one, an exception MAY be thrown. + * This behavior is implementation-dependent. + *

    + * + * @param tableDef Definition of the uploaded table to drop (the whole object is needed in order to get the DB schema and tables names). + * + * @return true if the specified table has been successfully dropped, false otherwise. + * + * @throws DBException If any error occurs while dropping the specified uploaded table. + * + * @since 2.0 + */ + public boolean dropUploadedTable(final TAPTable tableDef) throws DBException; - public void dropTable(final TAPTable table) throws DBException; + /** + *

    Let executing the given ADQL query.

    + * + *

    The result of this query must be formatted as a table, and so must be iterable using a {@link TableIterator}.

    + * + *

    note: the interpretation of the ADQL query is up to the implementation. In most of the case, it is just needed + * to translate this ADQL query into an SQL query (understandable by the chosen DBMS).

    + * + * @param adqlQuery ADQL query to execute. + * + * @return The table result. + * + * @throws DBException If any errors occurs while executing the query. + * + * @since 2.0 + */ + public TableIterator executeQuery(final ADQLQuery adqlQuery) throws DBException; - public void close() throws DBException; + /** + *

    Set the number of rows to fetch before searching/getting the following. + * Thus, rows are fetched by block whose the size is set by this function.

    + * + *

    + * This feature may not be supported. In such case or if an exception occurs while setting the fetch size, + * this function must not send any exception and the connection stays with its default fetch size. A message may be however + * logged. + *

    + * + *

    Note: + * The "fetch size" should be taken into account only for SELECT queries executed by {@link #executeQuery(ADQLQuery)}. + *

    + * + *

    + * This feature is generally implemented by JDBC drivers using the V3 protocol. Thus, here is how the PostgreSQL JDBC documentation + * (https://jdbc.postgresql.org/documentation/head/query.html#query-with-cursor) describes this feature: + *

    + *
    + *

    + * By default the driver collects all the results for the query at once. This can be inconvenient for large data sets + * so the JDBC driver provides a means of basing a ResultSet on a database cursor and only fetching a small number of rows. + *

    + *

    + * A small number of rows are cached on the client side of the connection and when exhausted the next block of rows + * is retrieved by repositioning the cursor. + *

    + *
    + * + * @param size Blocks size (in number of rows) to fetch. + * + * @since 2.0 + */ + public void setFetchSize(final int size); } diff --git a/src/tap/db/JDBCConnection.java b/src/tap/db/JDBCConnection.java index ee79567f7deb1a166c18e4eb188145642be6a9c2..c596f26ded5673229c53f92c86717555fe3db689 100644 --- a/src/tap/db/JDBCConnection.java +++ b/src/tap/db/JDBCConnection.java @@ -16,81 +16,366 @@ package tap.db; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomishes Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.Driver; import java.sql.DriverManager; +import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; +import java.sql.Timestamp; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.HashMap; import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import tap.data.DataReadException; +import tap.data.ResultSetTableIterator; +import tap.data.TableIterator; import tap.log.TAPLog; import tap.metadata.TAPColumn; +import tap.metadata.TAPForeignKey; +import tap.metadata.TAPMetadata; +import tap.metadata.TAPMetadata.STDSchema; +import tap.metadata.TAPMetadata.STDTable; +import tap.metadata.TAPSchema; import tap.metadata.TAPTable; +import tap.metadata.TAPTable.TableType; +import uws.ISO8601Format; +import uws.service.log.UWSLog.LogLevel; +import adql.db.DBColumn; +import adql.db.DBType; +import adql.db.DBType.DBDatatype; +import adql.db.STCS; +import adql.db.STCS.Region; import adql.query.ADQLQuery; -import cds.savot.model.SavotTR; -import cds.savot.model.TDSet; +import adql.query.IdentifierField; +import adql.translator.ADQLTranslator; +import adql.translator.JDBCTranslator; +import adql.translator.TranslationException; /** - * Simple implementation of the {@link DBConnection} interface. - * It creates and manages a JDBC connection to a specified database. - * Thus results of any executed SQL query will be a {@link ResultSet}. + *

    This {@link DBConnection} implementation is theoretically able to deal with any DBMS JDBC connection.

    + * + *

    Note: + * "Theoretically", because its design has been done using information about Postgres, SQLite, Oracle, MySQL and Java DB (Derby). + * Then it has been really tested successfully with Postgres and SQLite. + *

    + * + *

    Deal with different DBMS features

    + * + *

    Update queries are taking into account whether the following features are supported by the DBMS:

    + *
      + *
    • data definition: when not supported, no update operation will be possible. + * All corresponding functions will then throw a {@link DBException} ; + * only {@link #executeQuery(ADQLQuery)} will be possibly called.
    • + * + *
    • transactions: when not supported, no transaction is started or merely used. + * It means that in case of update failure, no rollback will be possible + * and that already done modification will remain in the database.
    • + * + *
    • schemas: when the DBMS does not have the notion of schema (like SQLite), no schema creation or dropping will be obviously processed. + * Besides, if not already done, database name of all tables will be prefixed by the schema name.
    • + * + *
    • batch updates: when not supported, updates will just be done, "normally, one by one. + * In one word, there will be merely no optimization. + * Anyway, this feature concerns only the insertions into tables.
    • + * + *
    • case sensitivity of identifiers: the case sensitivity of quoted identifier varies from the used DBMS. This {@link DBConnection} + * implementation is able to adapt itself in function of the way identifiers are stored and + * researched in the database. How the case sensitivity is managed by the DBMS is the problem + * of only one function (which can be overwritten if needed): {@link #equals(String, String, boolean)}.
    • + *
    + * + *

    Warning: + * All these features have no impact at all on ADQL query executions ({@link #executeQuery(ADQLQuery)}). + *

    + * + *

    Datatypes

    + * + *

    + * All datatype conversions done while fetching a query result (via a {@link ResultSet}) + * are done exclusively by the returned {@link TableIterator} (so, here {@link ResultSetTableIterator}). + *

    + * + *

    + * However, datatype conversions done while uploading a table are done here by the function + * {@link #convertTypeToDB(DBType)}. This function uses first the conversion function of the translator + * ({@link JDBCTranslator#convertTypeToDB(DBType)}), and then {@link #defaultTypeConversion(DBType)} + * if it fails. + *

    + * + *

    + * In this default conversion, all typical DBMS datatypes are taken into account, EXCEPT the geometrical types + * (POINT and REGION). That's why it is recommended to use a translator in which the geometrical types are supported + * and managed. + *

    + * + *

    Fetch size

    + * + *

    + * The possibility to specify a "fetch size" to the JDBC driver (and more exactly to a {@link Statement}) may reveal + * very helpful when dealing with large datasets. Thus, it is possible to fetch rows by block of a size represented + * by this "fetch size". This is also possible with this {@link DBConnection} thanks to the function {@link #setFetchSize(int)}. + *

    + * + *

    + * However, some JDBC driver or DBMS may not support this feature. In such case, it is then automatically disabled by + * {@link JDBCConnection} so that any subsequent queries do not attempt to use it again. The {@link #supportsFetchSize} + * is however reset to true when {@link #setFetchSize(int)} is called. + *

    + * + *

    Note 1: + * The "fetch size" feature is used only for SELECT queries executed by {@link #executeQuery(ADQLQuery)}. In all other functions, + * results of SELECT queries are fetched with the default parameter of the JDBC driver and its {@link Statement} implementation. + *

    + * + *

    Note 2: + * By default, this feature is disabled. So the default value of the JDBC driver is used. + * To enable it, a simple call to {@link #setFetchSize(int)} is enough, whatever is the given value. + *

    * * @author Grégory Mantelet (CDS;ARI) - * @version 1.1 (04/2014) + * @version 2.0 (04/2015) + * @since 2.0 */ -public class JDBCConnection implements DBConnection { +public class JDBCConnection implements DBConnection { - /** JDBC prefix of any database URL (for instance: jdbc:postgresql://127.0.0.1/myDB or jdbc:postgresql:myDB). */ - public final static String JDBC_PREFIX = "jdbc"; + /** DBMS name of PostgreSQL used in the database URL. */ + protected final static String DBMS_POSTGRES = "postgresql"; + + /** DBMS name of SQLite used in the database URL. */ + protected final static String DBMS_SQLITE = "sqlite"; - /** Connection ID (typically, the job ID). */ + /** DBMS name of MySQL used in the database URL. */ + protected final static String DBMS_MYSQL = "mysql"; + + /** DBMS name of Oracle used in the database URL. */ + protected final static String DBMS_ORACLE = "oracle"; + + /** Name of the database column giving the database name of a TAP column, table or schema. */ + protected final static String DB_NAME_COLUMN = "dbname"; + + /** Connection ID (typically, the job ID). It lets identify the DB errors linked to the Job execution in the logs. */ protected final String ID; /** JDBC connection (created and initialized at the creation of this {@link JDBCConnection} instance). */ protected final Connection connection; - /** Logger to use if any message needs to be printed to the server manager. */ + /** The translator this connection must use to translate ADQL into SQL. It is also used to get information about the case sensitivity of all types of identifier (schema, table, column). */ + protected final JDBCTranslator translator; + + /** Object to use if any message needs to be logged. note: this logger may be NULL. If NULL, messages will never be printed. */ protected final TAPLog logger; + /* JDBC URL MANAGEMENT */ + + /** JDBC prefix of any database URL (for instance: jdbc:postgresql://127.0.0.1/myDB or jdbc:postgresql:myDB). */ + public final static String JDBC_PREFIX = "jdbc"; + + /** Name (in lower-case) of the DBMS with which the connection is linked. */ + protected final String dbms; + + /* DBMS SUPPORTED FEATURES */ + + /** Indicate whether the DBMS supports transactions (start, commit, rollback and end). note: If no transaction is possible, no transaction will be used, but then, it will never possible to cancel modifications in case of error. */ + protected boolean supportsTransaction; + + /** Indicate whether the DBMS supports the definition of data (create, update, drop, insert into schemas and tables). note: If not supported, it will never possible to create TAP_SCHEMA from given metadata (see {@link #setTAPSchema(TAPMetadata)}) and to upload/drop tables (see {@link #addUploadedTable(TAPTable, TableIterator)} and {@link #dropUploadedTable(TAPTable)}). */ + protected boolean supportsDataDefinition; + + /** Indicate whether the DBMS supports several updates in once (using {@link Statement#addBatch(String)} and {@link Statement#executeBatch()}). note: If not supported, every updates will be done one by one. So it is not really a problem, but just a loss of optimization. */ + protected boolean supportsBatchUpdates; + + /** Indicate whether the DBMS has the notion of SCHEMA. Most of the DBMS has it, but not SQLite for instance. note: If not supported, the DB table name will be prefixed by the DB schema name followed by the character "_". Nevertheless, if the DB schema name is NULL, the DB table name will never be prefixed. */ + protected boolean supportsSchema; + + /* CASE SENSITIVITY SUPPORT */ + + /** Indicate whether UNquoted identifiers will be considered as case INsensitive and stored in mixed case by the DBMS. note: If FALSE, unquoted identifiers will still be considered as case insensitive for the researches, but will be stored in lower or upper case (in function of {@link #lowerCaseUnquoted} and {@link #upperCaseUnquoted}). If none of these two flags is TRUE, the storage case will be though considered as mixed. */ + protected boolean supportsMixedCaseUnquotedIdentifier; + /** Indicate whether the unquoted identifiers are stored in lower case in the DBMS. */ + protected boolean lowerCaseUnquoted; + /** Indicate whether the unquoted identifiers are stored in upper case in the DBMS. */ + protected boolean upperCaseUnquoted; + + /** Indicate whether quoted identifiers will be considered as case INsensitive and stored in mixed case by the DBMS. note: If FALSE, quoted identifiers will be considered as case sensitive and will be stored either in lower, upper or in mixed case (in function of {@link #lowerCaseQuoted}, {@link #upperCaseQuoted} and {@link #mixedCaseQuoted}). If none of these three flags is TRUE, the storage case will be mixed case. */ + protected boolean supportsMixedCaseQuotedIdentifier; + /** Indicate whether the quoted identifiers are stored in lower case in the DBMS. */ + protected boolean lowerCaseQuoted; + /** Indicate whether the quoted identifiers are stored in mixed case in the DBMS. */ + protected boolean mixedCaseQuoted; + /** Indicate whether the quoted identifiers are stored in upper case in the DBMS. */ + protected boolean upperCaseQuoted; + + /* FETCH SIZE */ + + /** Special fetch size meaning that the JDBC driver is free to set its own guess for this value. */ + public final static int IGNORE_FETCH_SIZE = 0; + /** Default fetch size. + * Note 1: this value may be however ignored if the JDBC driver does not support this feature. + * Note 2: by default set to {@link #IGNORE_FETCH_SIZE}. */ + public final static int DEFAULT_FETCH_SIZE = IGNORE_FETCH_SIZE; + + /**

    Indicate whether the last fetch size operation works.

    + *

    By default, this attribute is set to false, meaning that the "fetch size" feature is + * disabled. To enable it, a simple call to {@link #setFetchSize(int)} is enough, whatever is the given value.

    + *

    If just once this operation fails, the fetch size feature will be always considered as unsupported in this {@link JDBCConnection} + * until the next call of {@link #setFetchSize(int)}.

    */ + protected boolean supportsFetchSize = false; + + /**

    Fetch size to set in the {@link Statement} in charge of executing a SELECT query.

    + *

    Note 1: this value must always be positive. If negative or null, it will be ignored and the {@link Statement} will keep its default behavior.

    + *

    Note 2: if this feature is enabled (i.e. has a value > 0), the AutoCommit will be disabled.

    */ + protected int fetchSize = DEFAULT_FETCH_SIZE; + /** - *

    - * Creates a JDBC connection to the specified database and with the specified JDBC driver. - * This connection is established using the given user name and password. - *

    - *

    note: the JDBC driver is loaded using

    Class.forName(driverPath)
    .

    + *

    Creates a JDBC connection to the specified database and with the specified JDBC driver. + * This connection is established using the given user name and password.

    + * + *

    note: the JDBC driver is loaded using

    Class.forName(driverPath)
    and the connection is created with
    DriverManager.getConnection(dbUrl, dbUser, dbPassword)
    .

    + * + *

    Warning: + * This constructor really creates a new SQL connection. Creating a SQL connection is time consuming! + * That's why it is recommended to use a pool of connections. When doing so, you should use the other constructor of this class + * ({@link #JDBCConnection(Connection, JDBCTranslator, String, TAPLog)}). + *

    * * @param driverPath Full class name of the JDBC driver. * @param dbUrl URL to the database. note This URL may not be prefixed by "jdbc:". If not, the prefix will be automatically added. - * @param dbUser Name of the database user (supposed to be the database owner). + * @param dbUser Name of the database user. * @param dbPassword Password of the given database user. - * @param logger Logger to use if any message needs to be printed to the server admin. + * @param translator {@link ADQLTranslator} to use in order to get SQL from an ADQL query and to get qualified DB table names. + * @param connID ID of this connection. note: may be NULL ; but in this case, logs concerning this connection will be more difficult to localize. + * @param logger Logger to use in case of need. note: may be NULL ; in this case, error will never be logged, but sometimes DBException may be raised. + * + * @throws DBException If the driver can not be found or if the connection can not merely be created (usually because DB parameters are wrong). + */ + public JDBCConnection(final String driverPath, final String dbUrl, final String dbUser, final String dbPassword, final JDBCTranslator translator, final String connID, final TAPLog logger) throws DBException{ + this(createConnection(driverPath, dbUrl, dbUser, dbPassword), translator, connID, logger); + } + + /** + * Create a JDBC connection by wrapping the given connection. * - * @throws DBException If the specified driver can not be found, or if the database URL or user is incorrect. + * @param conn Connection to wrap. + * @param translator {@link ADQLTranslator} to use in order to get SQL from an ADQL query and to get qualified DB table names. + * @param connID ID of this connection. note: may be NULL ; but in this case, logs concerning this connection will be more difficult to localize. + * @param logger Logger to use in case of need. note: may be NULL ; in this case, error will never be logged, but sometimes DBException may be raised. */ - public JDBCConnection(final String ID, final String driverPath, final String dbUrl, final String dbUser, final String dbPassword, final TAPLog logger) throws DBException{ + public JDBCConnection(final Connection conn, final JDBCTranslator translator, final String connID, final TAPLog logger) throws DBException{ + if (conn == null) + throw new NullPointerException("Missing SQL connection! => can not create a JDBCConnection object."); + if (translator == null) + throw new NullPointerException("Missing ADQL translator! => can not create a JDBCConnection object."); + + this.connection = conn; + this.translator = translator; + this.ID = connID; this.logger = logger; - this.ID = ID; - // Load the specified JDBC driver: + // Set the supporting features' flags + DBMS type: try{ - Class.forName(driverPath); - }catch(ClassNotFoundException cnfe){ - logger.dbError("Impossible to find the JDBC driver \"" + driverPath + "\" !", cnfe); - throw new DBException("Impossible to find the JDBC driver \"" + driverPath + "\" !", cnfe); + DatabaseMetaData dbMeta = connection.getMetaData(); + dbms = getDBMSName(dbMeta.getURL()); + supportsTransaction = dbMeta.supportsTransactions(); + supportsBatchUpdates = dbMeta.supportsBatchUpdates(); + supportsDataDefinition = dbMeta.supportsDataDefinitionAndDataManipulationTransactions(); + supportsSchema = dbMeta.supportsSchemasInTableDefinitions(); + lowerCaseUnquoted = dbMeta.storesLowerCaseIdentifiers(); + upperCaseUnquoted = dbMeta.storesUpperCaseIdentifiers(); + supportsMixedCaseUnquotedIdentifier = dbMeta.supportsMixedCaseIdentifiers(); + lowerCaseQuoted = dbMeta.storesLowerCaseQuotedIdentifiers(); + mixedCaseQuoted = dbMeta.storesMixedCaseQuotedIdentifiers(); + upperCaseQuoted = dbMeta.storesUpperCaseQuotedIdentifiers(); + supportsMixedCaseQuotedIdentifier = dbMeta.supportsMixedCaseQuotedIdentifiers(); + }catch(SQLException se){ + throw new DBException("Unable to access to one or several DB metadata (url, supportsTransaction, supportsBatchUpdates, supportsDataDefinitionAndDataManipulationTransactions, supportsSchemasInTableDefinitions, storesLowerCaseIdentifiers, storesUpperCaseIdentifiers, supportsMixedCaseIdentifiers, storesLowerCaseQuotedIdentifiers, storesMixedCaseQuotedIdentifiers, storesUpperCaseQuotedIdentifiers and supportsMixedCaseQuotedIdentifiers) from the given Connection!"); } + } - // Build a connection to the specified database: + /** + * Extract the DBMS name from the given database URL. + * + * @param dbUrl JDBC URL to access the database. This URL must start with "jdbc:" ; otherwise an exception will be thrown. + * + * @return The DBMS name as found in the given URL. + * + * @throws DBException If NULL has been given, if the URL is not a JDBC one (starting with "jdbc:") or if the DBMS name is missing. + */ + protected static final String getDBMSName(String dbUrl) throws DBException{ + if (dbUrl == null) + throw new DBException("Missing database URL!"); + + if (!dbUrl.startsWith(JDBC_PREFIX + ":")) + throw new DBException("This DBConnection implementation is only able to deal with JDBC connection! (the DB URL must start with \"" + JDBC_PREFIX + ":\" ; given url: " + dbUrl + ")"); + + dbUrl = dbUrl.substring(5); + int indSep = dbUrl.indexOf(':'); + if (indSep <= 0) + throw new DBException("Incorrect database URL: " + dbUrl); + + return dbUrl.substring(0, indSep).toLowerCase(); + } + + /** + * Create a {@link Connection} instance using the given database parameters. + * The path of the JDBC driver will be used to load the adequate driver if none is found by default. + * + * @param driverPath Path to the JDBC driver. + * @param dbUrl JDBC URL to connect to the database. note This URL may not be prefixed by "jdbc:". If not, the prefix will be automatically added. + * @param dbUser Name of the user to use to connect to the database. + * @param dbPassword Password of the user to use to connect to the database. + * + * @return A new DB connection. + * + * @throws DBException If the driver can not be found or if the connection can not merely be created (usually because DB parameters are wrong). + * + * @see DriverManager#getDriver(String) + * @see Driver#connect(String, Properties) + */ + private final static Connection createConnection(final String driverPath, final String dbUrl, final String dbUser, final String dbPassword) throws DBException{ + // Normalize the DB URL: String url = dbUrl.startsWith(JDBC_PREFIX) ? dbUrl : (JDBC_PREFIX + dbUrl); + + // Select the JDBDC driver: + Driver d; + try{ + d = DriverManager.getDriver(dbUrl); + }catch(SQLException e){ + try{ + // ...load it, if necessary: + if (driverPath == null) + throw new DBException("Missing JDBC driver path! Since the required JDBC driver is not yet loaded, this path is needed to load it."); + Class.forName(driverPath); + // ...and try again: + d = DriverManager.getDriver(dbUrl); + }catch(ClassNotFoundException cnfe){ + throw new DBException("Impossible to find the JDBC driver \"" + driverPath + "\" !", cnfe); + }catch(SQLException se){ + throw new DBException("No suitable JDBC driver found for the database URL \"" + dbUrl + "\" and the driver path \"" + driverPath + "\"!", se); + } + } + + // Build a connection to the specified database: try{ - connection = DriverManager.getConnection(url, dbUser, dbPassword); - logger.connectionOpened(this, (dbUrl.lastIndexOf('/') > 0 ? dbUrl.substring(dbUrl.lastIndexOf('/')) : dbUrl.substring(dbUrl.lastIndexOf(':')))); + Properties p = new Properties(); + if (dbUser != null) + p.setProperty("user", dbUser); + if (dbPassword != null) + p.setProperty("password", dbPassword); + Connection con = d.connect(url, p); + return con; }catch(SQLException se){ - logger.dbError("Impossible to establish a connection to the database \"" + url + "\" !", se); - throw new DBException("Impossible to establish a connection to the database \"" + url + "\" !", se); + throw new DBException("Impossible to establish a connection to the database \"" + url + "\"!", se); } } @@ -99,214 +384,2395 @@ public class JDBCConnection implements DBConnection { return ID; } + /** + *

    Get the JDBC connection wrapped by this {@link JDBCConnection} object.

    + * + *

    Note: + * This is the best way to get the JDBC connection in order to properly close it. + *

    + * + * @return The wrapped JDBC connection. + */ + public final Connection getInnerConnection(){ + return connection; + } + + /* ********************* */ + /* INTERROGATION METHODS */ + /* ********************* */ @Override - public void startTransaction() throws DBException{ + public TableIterator executeQuery(final ADQLQuery adqlQuery) throws DBException{ + String sql = null; + ResultSet result = null; try{ - Statement st = connection.createStatement(); - st.execute("begin"); - logger.transactionStarted(this); + // 1. Translate the ADQL query into SQL: + if (logger != null) + logger.logDB(LogLevel.INFO, this, "TRANSLATE", "Translating ADQL: " + adqlQuery.toADQL().replaceAll("(\t|\r?\n)+", " "), null); + sql = translator.translate(adqlQuery); + + // 2. Create the statement and if needed, configure it for the given fetch size: + if (supportsFetchSize && fetchSize > 0){ + try{ + connection.setAutoCommit(false); + }catch(SQLException se){ + supportsFetchSize = false; + if (logger != null) + logger.logDB(LogLevel.WARNING, this, "RESULT", "Fetch size unsupported!", null); + } + } + Statement stmt = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); + if (supportsFetchSize){ + try{ + stmt.setFetchSize(fetchSize); + }catch(SQLException se){ + supportsFetchSize = false; + if (logger != null) + logger.logDB(LogLevel.WARNING, this, "RESULT", "Fetch size unsupported!", null); + } + } + + // 3. Execute the SQL query: + result = stmt.executeQuery(sql); + if (logger != null) + logger.logDB(LogLevel.INFO, this, "EXECUTE", "SQL query: " + sql.replaceAll("(\t|\r?\n)+", " "), null); + + // 4. Return the result through a TableIterator object: + if (logger != null) + logger.logDB(LogLevel.INFO, this, "RESULT", "Returning result (" + (supportsFetchSize ? "fetch size = " + fetchSize : "all in once") + ").", null); + return createTableIterator(result, adqlQuery.getResultingColumns()); + }catch(SQLException se){ - logger.dbError("Impossible to begin a transaction !", se); - throw new DBException("Impossible to begin a transaction !", se); + close(result); + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "EXECUTE", "Unexpected error while EXECUTING SQL query!", null); + throw new DBException("Unexpected error while executing a SQL query: " + se.getMessage(), se); + }catch(TranslationException te){ + close(result); + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "TRANSLATE", "Unexpected error while TRANSLATING ADQL into SQL!", null); + throw new DBException("Unexpected error while translating ADQL into SQL: " + te.getMessage(), te); + }catch(DataReadException dre){ + close(result); + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "RESULT", "Unexpected error while reading the query result!", null); + throw new DBException("Impossible to read the query result, because: " + dre.getMessage(), dre); } } - @Override - public void cancelTransaction() throws DBException{ - try{ - connection.rollback(); - logger.transactionCancelled(this); - }catch(SQLException se){ - logger.dbError("Impossible to cancel/rollback a transaction !", se); - throw new DBException("Impossible to cancel (rollback) the transaction !", se); + /** + * Create a {@link TableIterator} instance which lets reading the given result table. + * + * @param rs Result of an SQL query. + * @param resultingColumns Metadata corresponding to each columns of the result. + * + * @return A {@link TableIterator} instance. + * + * @throws DataReadException If the metadata (columns count and types) can not be fetched + * or if any other error occurs. + */ + protected TableIterator createTableIterator(final ResultSet rs, final DBColumn[] resultingColumns) throws DataReadException{ + return new ResultSetTableIterator(rs, translator, dbms, resultingColumns); + } + + /* *********************** */ + /* TAP_SCHEMA MANIPULATION */ + /* *********************** */ + + /** + * Tell when, compared to the other TAP standard tables, a given standard TAP table should be created. + * + * @param table Standard TAP table. + * + * @return An index between 0 and 4 (included) - 0 meaning the first table to create whereas 4 is the last one. + * -1 is returned if NULL is given in parameter of if the standard table is not taken into account here. + */ + protected int getCreationOrder(final STDTable table){ + if (table == null) + return -1; + + switch(table){ + case SCHEMAS: + return 0; + case TABLES: + return 1; + case COLUMNS: + return 2; + case KEYS: + return 3; + case KEY_COLUMNS: + return 4; + default: + return -1; } } + /* ************************************ */ + /* GETTING TAP_SCHEMA FROM THE DATABASE */ + /* ************************************ */ + + /** + *

    In this implementation, this function is first creating a virgin {@link TAPMetadata} object + * that will be filled progressively by calling the following functions:

    + *
      + *
    1. {@link #loadSchemas(TAPTable, TAPMetadata, Statement)}
    2. + *
    3. {@link #loadTables(TAPTable, TAPMetadata, Statement)}
    4. + *
    5. {@link #loadColumns(TAPTable, List, Statement)}
    6. + *
    7. {@link #loadKeys(TAPTable, TAPTable, List, Statement)}
    8. + *
    + * + *

    Note: + * If schemas are not supported by this DBMS connection, the DB name of all tables will be set to NULL + * and the DB name of all tables will be prefixed by the ADQL name of their respective schema. + *

    + * + * @see tap.db.DBConnection#getTAPSchema() + */ @Override - public void endTransaction() throws DBException{ + public TAPMetadata getTAPSchema() throws DBException{ + // Build a virgin TAP metadata: + TAPMetadata metadata = new TAPMetadata(); + + // Get the definition of the standard TAP_SCHEMA tables: + TAPSchema tap_schema = TAPMetadata.getStdSchema(supportsSchema); + + // LOAD ALL METADATA FROM THE STANDARD TAP TABLES: + Statement stmt = null; try{ - connection.commit(); - logger.transactionEnded(this); + // create a common statement for all loading functions: + stmt = connection.createStatement(); + + // load all schemas from TAP_SCHEMA.schemas: + if (logger != null) + logger.logDB(LogLevel.INFO, this, "LOAD_TAP_SCHEMA", "Loading TAP_SCHEMA.schemas.", null); + loadSchemas(tap_schema.getTable(STDTable.SCHEMAS.label), metadata, stmt); + + // load all tables from TAP_SCHEMA.tables: + if (logger != null) + logger.logDB(LogLevel.INFO, this, "LOAD_TAP_SCHEMA", "Loading TAP_SCHEMA.tables.", null); + List lstTables = loadTables(tap_schema.getTable(STDTable.TABLES.label), metadata, stmt); + + // load all columns from TAP_SCHEMA.columns: + if (logger != null) + logger.logDB(LogLevel.INFO, this, "LOAD_TAP_SCHEMA", "Loading TAP_SCHEMA.columns.", null); + loadColumns(tap_schema.getTable(STDTable.COLUMNS.label), lstTables, stmt); + + // load all foreign keys from TAP_SCHEMA.keys and TAP_SCHEMA.key_columns: + if (logger != null) + logger.logDB(LogLevel.INFO, this, "LOAD_TAP_SCHEMA", "Loading TAP_SCHEMA.keys and TAP_SCHEMA.key_columns.", null); + loadKeys(tap_schema.getTable(STDTable.KEYS.label), tap_schema.getTable(STDTable.KEY_COLUMNS.label), lstTables, stmt); + }catch(SQLException se){ - logger.dbError("Impossible to end/commit a transaction !", se); - throw new DBException("Impossible to end/commit the transaction !", se); + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "LOAD_TAP_SCHEMA", "Impossible to create a Statement!", se); + throw new DBException("Can not create a Statement!", se); + }finally{ + close(stmt); } + + return metadata; } - @Override - public void close() throws DBException{ + /** + *

    Load into the given metadata all schemas listed in TAP_SCHEMA.schemas.

    + * + *

    Note: + * If schemas are not supported by this DBMS connection, the DB name of the loaded schemas is set to NULL. + *

    + * + * @param tableDef Definition of the table TAP_SCHEMA.schemas. + * @param metadata Metadata to fill with all found schemas. + * @param stmt Statement to use in order to interact with the database. + * + * @throws DBException If any error occurs while interacting with the database. + */ + protected void loadSchemas(final TAPTable tableDef, final TAPMetadata metadata, final Statement stmt) throws DBException{ + ResultSet rs = null; try{ - connection.close(); - logger.connectionClosed(this); + // Determine whether the dbName column exists: + /* note: if the schema notion is not supported by this DBMS, the column "dbname" is ignored. */ + boolean hasDBName = supportsSchema && isColumnExisting(tableDef.getDBSchemaName(), tableDef.getDBName(), DB_NAME_COLUMN, connection.getMetaData()); + + // Build the SQL query: + StringBuffer sqlBuf = new StringBuffer("SELECT "); + sqlBuf.append(translator.getColumnName(tableDef.getColumn("schema_name"))); + sqlBuf.append(", ").append(translator.getColumnName(tableDef.getColumn("description"))); + sqlBuf.append(", ").append(translator.getColumnName(tableDef.getColumn("utype"))); + if (hasDBName) + sqlBuf.append(", ").append(DB_NAME_COLUMN); + sqlBuf.append(" FROM ").append(translator.getTableName(tableDef, supportsSchema)).append(';'); + + // Execute the query: + rs = stmt.executeQuery(sqlBuf.toString()); + + // Create all schemas: + while(rs.next()){ + String schemaName = rs.getString(1), description = rs.getString(2), utype = rs.getString(3), dbName = (hasDBName ? rs.getString(4) : null); + + // create the new schema: + TAPSchema newSchema = new TAPSchema(schemaName, nullifyIfNeeded(description), nullifyIfNeeded(utype)); + if (dbName != null && dbName.trim().length() > 0) + newSchema.setDBName(dbName); + + // add the new schema inside the given metadata: + metadata.addSchema(newSchema); + } }catch(SQLException se){ - logger.dbError("Impossible to close a database transaction !", se); - throw new DBException("Impossible to close the database transaction !", se); + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "LOAD_TAP_SCHEMA", "Impossible to load schemas from TAP_SCHEMA.schemas!", se); + throw new DBException("Impossible to load schemas from TAP_SCHEMA.schemas!", se); + }finally{ + close(rs); } } - /* ********************* */ - /* INTERROGATION METHODS */ - /* ********************* */ - @Override - public ResultSet executeQuery(final String sqlQuery, final ADQLQuery adqlQuery) throws DBException{ + /** + *

    Load into the corresponding metadata all tables listed in TAP_SCHEMA.tables.

    + * + *

    Note: + * Schemas are searched in the given metadata by their ADQL name and case sensitively. + * If they can not be found a {@link DBException} is thrown. + *

    + * + *

    Note: + * If schemas are not supported by this DBMS connection, the DB name of the loaded + * {@link TAPTable}s is prefixed by the ADQL name of their respective schema. + *

    + * + * @param tableDef Definition of the table TAP_SCHEMA.tables. + * @param metadata Metadata (containing already all schemas listed in TAP_SCHEMA.schemas). + * @param stmt Statement to use in order to interact with the database. + * + * @return The complete list of all loaded tables. note: this list is required by {@link #loadColumns(TAPTable, List, Statement)}. + * + * @throws DBException If a schema can not be found, or if any other error occurs while interacting with the database. + */ + protected List loadTables(final TAPTable tableDef, final TAPMetadata metadata, final Statement stmt) throws DBException{ + ResultSet rs = null; try{ - Statement stmt = connection.createStatement(); - logger.sqlQueryExecuting(this, sqlQuery); - ResultSet result = stmt.executeQuery(sqlQuery); - logger.sqlQueryExecuted(this, sqlQuery); - return result; + // Determine whether the dbName column exists: + boolean hasDBName = isColumnExisting(tableDef.getDBSchemaName(), tableDef.getDBName(), DB_NAME_COLUMN, connection.getMetaData()); + + // Build the SQL query: + StringBuffer sqlBuf = new StringBuffer("SELECT "); + sqlBuf.append(translator.getColumnName(tableDef.getColumn("schema_name"))); + sqlBuf.append(", ").append(translator.getColumnName(tableDef.getColumn("table_name"))); + sqlBuf.append(", ").append(translator.getColumnName(tableDef.getColumn("table_type"))); + sqlBuf.append(", ").append(translator.getColumnName(tableDef.getColumn("description"))); + sqlBuf.append(", ").append(translator.getColumnName(tableDef.getColumn("utype"))); + if (hasDBName) + sqlBuf.append(", ").append(DB_NAME_COLUMN); + sqlBuf.append(" FROM ").append(translator.getTableName(tableDef, supportsSchema)).append(';'); + + // Execute the query: + rs = stmt.executeQuery(sqlBuf.toString()); + + // Create all tables: + ArrayList lstTables = new ArrayList(); + while(rs.next()){ + String schemaName = rs.getString(1), tableName = rs.getString(2), typeStr = rs.getString(3), description = rs.getString(4), utype = rs.getString(5), dbName = (hasDBName ? rs.getString(6) : null); + + // get the schema: + TAPSchema schema = metadata.getSchema(schemaName); + if (schema == null){ + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "LOAD_TAP_SCHEMA", "Impossible to find the schema of the table \"" + tableName + "\": \"" + schemaName + "\"!", null); + throw new DBException("Impossible to find the schema of the table \"" + tableName + "\": \"" + schemaName + "\"!"); + } + + // If the table name is qualified, check its prefix (it must match to the schema name): + int endPrefix = tableName.indexOf('.'); + if (endPrefix >= 0){ + if (endPrefix == 0) + throw new DBException("Incorrect table name syntax: \"" + tableName + "\"! Missing schema name (before '.')."); + else if (endPrefix == tableName.length() - 1) + throw new DBException("Incorrect table name syntax: \"" + tableName + "\"! Missing table name (after '.')."); + else if (schemaName == null) + throw new DBException("Incorrect schema prefix for the table \"" + tableName.substring(endPrefix + 1) + "\": this table is not in a schema, according to the column \"schema_name\" of TAP_SCHEMA.tables!"); + else if (!tableName.substring(0, endPrefix).trim().equalsIgnoreCase(schemaName)) + throw new DBException("Incorrect schema prefix for the table \"" + schemaName + "." + tableName.substring(tableName.indexOf('.') + 1) + "\": " + tableName + "! Mismatch between the schema specified in prefix of the column \"table_name\" and in the column \"schema_name\"."); + } + + // resolve the table type (if any) ; by default, it will be "table": + TableType type = TableType.table; + if (typeStr != null){ + try{ + type = TableType.valueOf(typeStr.toLowerCase()); + }catch(IllegalArgumentException iae){} + } + + // create the new table: + TAPTable newTable = new TAPTable(tableName, type, nullifyIfNeeded(description), nullifyIfNeeded(utype)); + newTable.setDBName(dbName); + + // add the new table inside its corresponding schema: + schema.addTable(newTable); + lstTables.add(newTable); + } + + return lstTables; }catch(SQLException se){ - logger.sqlQueryError(this, sqlQuery, se); - throw new DBException("Unexpected error while executing a SQL query: " + se.getMessage(), se); + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "LOAD_TAP_SCHEMA", "Impossible to load tables from TAP_SCHEMA.tables!", se); + throw new DBException("Impossible to load tables from TAP_SCHEMA.tables!", se); + }finally{ + close(rs); } } - /* ************** */ - /* UPLOAD METHODS */ - /* ************** */ - @Override - public void createSchema(final String schemaName) throws DBException{ - String sql = "CREATE SCHEMA " + schemaName + ";"; + /** + *

    Load into the corresponding tables all columns listed in TAP_SCHEMA.columns.

    + * + *

    Note: + * Tables are searched in the given list by their ADQL name and case sensitively. + * If they can not be found a {@link DBException} is thrown. + *

    + * + * @param tableDef Definition of the table TAP_SCHEMA.columns. + * @param lstTables List of all published tables (= all tables listed in TAP_SCHEMA.tables). + * @param stmt Statement to use in order to interact with the database. + * + * @throws DBException If a table can not be found, or if any other error occurs while interacting with the database. + */ + protected void loadColumns(final TAPTable tableDef, final List lstTables, final Statement stmt) throws DBException{ + ResultSet rs = null; try{ - Statement stmt = connection.createStatement(); - stmt.executeUpdate(sql); - logger.schemaCreated(this, schemaName); + // Determine whether the dbName column exists: + boolean hasDBName = isColumnExisting(tableDef.getDBSchemaName(), tableDef.getDBName(), DB_NAME_COLUMN, connection.getMetaData()); + + // Build the SQL query: + StringBuffer sqlBuf = new StringBuffer("SELECT "); + sqlBuf.append(translator.getColumnName(tableDef.getColumn("table_name"))); + sqlBuf.append(", ").append(translator.getColumnName(tableDef.getColumn("column_name"))); + sqlBuf.append(", ").append(translator.getColumnName(tableDef.getColumn("description"))); + sqlBuf.append(", ").append(translator.getColumnName(tableDef.getColumn("unit"))); + sqlBuf.append(", ").append(translator.getColumnName(tableDef.getColumn("ucd"))); + sqlBuf.append(", ").append(translator.getColumnName(tableDef.getColumn("utype"))); + sqlBuf.append(", ").append(translator.getColumnName(tableDef.getColumn("datatype"))); + sqlBuf.append(", ").append(translator.getColumnName(tableDef.getColumn("size"))); + sqlBuf.append(", ").append(translator.getColumnName(tableDef.getColumn("principal"))); + sqlBuf.append(", ").append(translator.getColumnName(tableDef.getColumn("indexed"))); + sqlBuf.append(", ").append(translator.getColumnName(tableDef.getColumn("std"))); + if (hasDBName) + sqlBuf.append(", ").append(DB_NAME_COLUMN); + sqlBuf.append(" FROM ").append(translator.getTableName(tableDef, supportsSchema)).append(';'); + + // Execute the query: + rs = stmt.executeQuery(sqlBuf.toString()); + + // Create all tables: + while(rs.next()){ + String tableName = rs.getString(1), columnName = rs.getString(2), description = rs.getString(3), unit = rs.getString(4), ucd = rs.getString(5), utype = rs.getString(6), datatype = rs.getString(7), dbName = (hasDBName ? rs.getString(12) : null); + int size = rs.getInt(8); + boolean principal = toBoolean(rs.getObject(9)), indexed = toBoolean(rs.getObject(10)), std = toBoolean(rs.getObject(11)); + + // get the table: + TAPTable table = searchTable(tableName, lstTables.iterator()); + if (table == null){ + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "LOAD_TAP_SCHEMA", "Impossible to find the table of the column \"" + columnName + "\": \"" + tableName + "\"!", null); + throw new DBException("Impossible to find the table of the column \"" + columnName + "\": \"" + tableName + "\"!"); + } + + // resolve the column type (if any) ; by default, it will be "VARCHAR" if unknown or missing: + DBDatatype tapDatatype = null; + // ...try to resolve the datatype in function of all datatypes declared by the TAP standard. + if (datatype != null){ + try{ + tapDatatype = DBDatatype.valueOf(datatype.toUpperCase()); + }catch(IllegalArgumentException iae){} + } + // ...build the column type: + DBType type; + if (tapDatatype == null) + type = new DBType(DBDatatype.VARCHAR); + else + type = new DBType(tapDatatype, size); + + // create the new column: + TAPColumn newColumn = new TAPColumn(columnName, type, nullifyIfNeeded(description), nullifyIfNeeded(unit), nullifyIfNeeded(ucd), nullifyIfNeeded(utype)); + newColumn.setPrincipal(principal); + newColumn.setIndexed(indexed); + newColumn.setStd(std); + newColumn.setDBName(dbName); + + // add the new column inside its corresponding table: + table.addColumn(newColumn); + } }catch(SQLException se){ - logger.dbError("Impossible to create the schema \"" + schemaName + "\" !", se); - throw new DBException("Impossible to create the schema \"" + schemaName + "\" !", se); + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "LOAD_TAP_SCHEMA", "Impossible to load columns from TAP_SCHEMA.columns!", se); + throw new DBException("Impossible to load columns from TAP_SCHEMA.columns!", se); + }finally{ + close(rs); } } - @Override - public void dropSchema(final String schemaName) throws DBException{ - String sql = "DROP SCHEMA IF EXISTS " + schemaName + " CASCADE;"; + /** + *

    Load into the corresponding tables all keys listed in TAP_SCHEMA.keys and detailed in TAP_SCHEMA.key_columns.

    + * + *

    Note: + * Tables and columns are searched in the given list by their ADQL name and case sensitively. + * If they can not be found a {@link DBException} is thrown. + *

    + * + * @param keysDef Definition of the table TAP_SCHEMA.keys. + * @param keyColumnsDef Definition of the table TAP_SCHEMA.key_columns. + * @param lstTables List of all published tables (= all tables listed in TAP_SCHEMA.tables). + * @param stmt Statement to use in order to interact with the database. + * + * @throws DBException If a table or a column can not be found, or if any other error occurs while interacting with the database. + */ + protected void loadKeys(final TAPTable keysDef, final TAPTable keyColumnsDef, final List lstTables, final Statement stmt) throws DBException{ + ResultSet rs = null; + PreparedStatement keyColumnsStmt = null; try{ - Statement stmt = connection.createStatement(); - stmt.executeUpdate(sql); - logger.schemaDropped(this, schemaName); + // Prepare the query to get the columns of each key: + StringBuffer sqlBuf = new StringBuffer("SELECT "); + sqlBuf.append(translator.getColumnName(keyColumnsDef.getColumn("key_id"))); + sqlBuf.append(", ").append(translator.getColumnName(keyColumnsDef.getColumn("from_column"))); + sqlBuf.append(", ").append(translator.getColumnName(keyColumnsDef.getColumn("target_column"))); + sqlBuf.append(" FROM ").append(translator.getTableName(keyColumnsDef, supportsSchema)); + sqlBuf.append(" WHERE ").append(translator.getColumnName(keyColumnsDef.getColumn("key_id"))).append(" = ?").append(';'); + keyColumnsStmt = connection.prepareStatement(sqlBuf.toString()); + + // Build the SQL query to get the keys: + sqlBuf.delete(0, sqlBuf.length()); + sqlBuf.append("SELECT ").append(translator.getColumnName(keysDef.getColumn("key_id"))); + sqlBuf.append(", ").append(translator.getColumnName(keysDef.getColumn("from_table"))); + sqlBuf.append(", ").append(translator.getColumnName(keysDef.getColumn("target_table"))); + sqlBuf.append(", ").append(translator.getColumnName(keysDef.getColumn("description"))); + sqlBuf.append(", ").append(translator.getColumnName(keysDef.getColumn("utype"))); + sqlBuf.append(" FROM ").append(translator.getTableName(keysDef, supportsSchema)).append(';'); + + // Execute the query: + rs = stmt.executeQuery(sqlBuf.toString()); + + // Create all foreign keys: + while(rs.next()){ + String key_id = rs.getString(1), from_table = rs.getString(2), target_table = rs.getString(3), description = rs.getString(4), utype = rs.getString(5); + + // get the two tables (source and target): + TAPTable sourceTable = searchTable(from_table, lstTables.iterator()); + if (sourceTable == null){ + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "LOAD_TAP_SCHEMA", "Impossible to find the source table of the foreign key \"" + key_id + "\": \"" + from_table + "\"!", null); + throw new DBException("Impossible to find the source table of the foreign key \"" + key_id + "\": \"" + from_table + "\"!"); + } + TAPTable targetTable = searchTable(target_table, lstTables.iterator()); + if (targetTable == null){ + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "LOAD_TAP_SCHEMA", "Impossible to find the target table of the foreign key \"" + key_id + "\": \"" + target_table + "\"!", null); + throw new DBException("Impossible to find the target table of the foreign key \"" + key_id + "\": \"" + target_table + "\"!"); + } + + // get the list of columns joining the two tables of the foreign key: + HashMap columns = new HashMap(); + ResultSet rsKeyCols = null; + try{ + keyColumnsStmt.setString(1, key_id); + rsKeyCols = keyColumnsStmt.executeQuery(); + while(rsKeyCols.next()) + columns.put(rsKeyCols.getString(1), rsKeyCols.getString(2)); + }catch(SQLException se){ + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "LOAD_TAP_SCHEMA", "Impossible to load key columns from TAP_SCHEMA.key_columns for the foreign key: \"" + key_id + "\"!", se); + throw new DBException("Impossible to load key columns from TAP_SCHEMA.key_columns for the foreign key: \"" + key_id + "\"!", se); + }finally{ + close(rsKeyCols); + } + + // create and add the new foreign key inside the source table: + try{ + sourceTable.addForeignKey(key_id, targetTable, columns, nullifyIfNeeded(description), nullifyIfNeeded(utype)); + }catch(Exception ex){ + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "LOAD_TAP_SCHEMA", "Impossible to create the foreign key \"" + key_id + "\" because: " + ex.getMessage(), ex); + throw new DBException("Impossible to create the foreign key \"" + key_id + "\" because: " + ex.getMessage(), ex); + } + } }catch(SQLException se){ - logger.dbError("Impossible to drop the schema \"" + schemaName + "\" !", se); - throw new DBException("Impossible to drop the schema \"" + schemaName + "\" !", se); + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "LOAD_TAP_SCHEMA", "Impossible to load columns from TAP_SCHEMA.columns!", se); + throw new DBException("Impossible to load columns from TAP_SCHEMA.columns!", se); + }finally{ + close(rs); + close(keyColumnsStmt); } } + /* ********************************** */ + /* SETTING TAP_SCHEMA IN THE DATABASE */ + /* ********************************** */ + + /** + *

    This function is just calling the following functions:

    + *
      + *
    1. {@link #mergeTAPSchemaDefs(TAPMetadata)}
    2. + *
    3. {@link #startTransaction()}
    4. + *
    5. {@link #resetTAPSchema(Statement, TAPTable[])}
    6. + *
    7. {@link #createTAPSchemaTable(TAPTable, Statement)} for each standard TAP_SCHEMA table
    8. + *
    9. {@link #fillTAPSchema(TAPMetadata)}
    10. + *
    11. {@link #createTAPTableIndexes(TAPTable, Statement)} for each standard TA_SCHEMA table
    12. + *
    13. {@link #commit()} or {@link #rollback()}
    14. + *
    15. {@link #endTransaction()}
    16. + *
    + * + *

    Important note: + * If the connection does not support transactions, then there will be merely no transaction. + * Consequently, any failure (exception/error) will not clean the partial modifications done by this function. + *

    + * + * @see tap.db.DBConnection#setTAPSchema(tap.metadata.TAPMetadata) + */ @Override - public void createTable(final TAPTable table) throws DBException{ - // Build the SQL query: - StringBuffer sqlBuf = new StringBuffer(); - sqlBuf.append("CREATE TABLE ").append(table.getDBSchemaName()).append('.').append(table.getDBName()).append("("); - Iterator it = table.getColumns(); - while(it.hasNext()){ - TAPColumn col = it.next(); - sqlBuf.append('"').append(col.getDBName()).append("\" ").append(' ').append(getDBType(col.getDatatype(), col.getArraySize(), logger)); - if (it.hasNext()) - sqlBuf.append(','); - } - sqlBuf.append(");"); + public void setTAPSchema(final TAPMetadata metadata) throws DBException{ + Statement stmt = null; - // Execute the creation query: - String sql = sqlBuf.toString(); try{ - Statement stmt = connection.createStatement(); - stmt.executeUpdate(sql); - logger.tableCreated(this, table); + // A. GET THE DEFINITION OF ALL STANDARD TAP TABLES: + TAPTable[] stdTables = mergeTAPSchemaDefs(metadata); + + startTransaction(); + + // B. RE-CREATE THE STANDARD TAP_SCHEMA TABLES: + stmt = connection.createStatement(); + + // 1. Ensure TAP_SCHEMA exists and drop all its standard TAP tables: + if (logger != null) + logger.logDB(LogLevel.INFO, this, "CLEAN_TAP_SCHEMA", "Cleaning TAP_SCHEMA.", null); + resetTAPSchema(stmt, stdTables); + + // 2. Create all standard TAP tables: + if (logger != null) + logger.logDB(LogLevel.INFO, this, "CREATE_TAP_SCHEMA", "Creating TAP_SCHEMA tables.", null); + for(TAPTable table : stdTables) + createTAPSchemaTable(table, stmt); + + // C. FILL THE NEW TABLE USING THE GIVEN DATA ITERATOR: + if (logger != null) + logger.logDB(LogLevel.INFO, this, "CREATE_TAP_SCHEMA", "Filling TAP_SCHEMA tables.", null); + fillTAPSchema(metadata); + + // D. CREATE THE INDEXES OF ALL STANDARD TAP TABLES: + if (logger != null) + logger.logDB(LogLevel.INFO, this, "CREATE_TAP_SCHEMA", "Creating TAP_SCHEMA tables' indexes.", null); + for(TAPTable table : stdTables) + createTAPTableIndexes(table, stmt); + + commit(); }catch(SQLException se){ - logger.dbError("Impossible to create the table \"" + table.getFullName() + "\" !", se); - throw new DBException("Impossible to create the table \"" + table.getFullName() + "\" !", se); + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "CREATE_TAP_SCHEMA", "Impossible to SET TAP_SCHEMA in DB!", se); + rollback(); + throw new DBException("Impossible to SET TAP_SCHEMA in DB!", se); + }finally{ + close(stmt); + endTransaction(); } } /** - * Gets the database type corresponding to the given {@link TAPColumn} type. + *

    Merge the definition of TAP_SCHEMA tables given in parameter with the definition provided in the TAP standard.

    + * + *

    + * The goal is to get in output the list of all standard TAP_SCHEMA tables. But it must take into account the customized + * definition given in parameter if there is one. Indeed, if a part of TAP_SCHEMA is not provided, it will be completed here by the + * definition provided in the TAP standard. And so, if the whole TAP_SCHEMA is not provided at all, the returned tables will be those + * of the IVOA standard. + *

    * - * @param datatype Column datatype (short, int, long, float, double, boolea, char or unsignedByte). - * @param arraysize Size of the array type (1 if not an array, a value > 1 for an array). - * @param logger Object to use to print warnings (for instance, if a given datatype is unknown). + *

    Important note: + * If the TAP_SCHEMA definition is missing or incomplete in the given metadata, it will be added or completed automatically + * by this function with the definition provided in the IVOA TAP standard. + *

    * - * @return The corresponding database type or the given datatype if unknown. + *

    Note: + * Only the standard tables of TAP_SCHEMA are considered. The others are skipped (that's to say: never returned by this function ; + * however, they will stay in the given metadata). + *

    + * + *

    Note: + * If schemas are not supported by this DBMS connection, the DB name of schemas is set to NULL and + * the DB name of tables is prefixed by the schema name. + *

    + * + * @param metadata Metadata (with or without TAP_SCHEMA schema or some of its table). Must not be NULL + * + * @return The list of all standard TAP_SCHEMA tables, ordered by creation order (see {@link #getCreationOrder(tap.metadata.TAPMetadata.STDTable)}). + * + * @see TAPMetadata#resolveStdTable(String) + * @see TAPMetadata#getStdSchema(boolean) + * @see TAPMetadata#getStdTable(STDTable) */ - public static String getDBType(String datatype, final int arraysize, final TAPLog logger){ - datatype = (datatype == null) ? null : datatype.trim().toLowerCase(); + protected TAPTable[] mergeTAPSchemaDefs(final TAPMetadata metadata){ + // 1. Get the TAP_SCHEMA schema from the given metadata: + TAPSchema tapSchema = null; + Iterator itSchema = metadata.iterator(); + while(tapSchema == null && itSchema.hasNext()){ + TAPSchema schema = itSchema.next(); + if (schema.getADQLName().equalsIgnoreCase(STDSchema.TAPSCHEMA.label)) + tapSchema = schema; + } - if (datatype == null || datatype.isEmpty()){ - if (logger != null) - logger.warning("undefined datatype => considered as VARCHAR !"); - return "VARCHAR"; - } - - if (datatype.equals("short")) - return (arraysize == 1) ? "INT2" : "BYTEA"; - else if (datatype.equals("int")) - return (arraysize == 1) ? "INT4" : "BYTEA"; - else if (datatype.equals("long")) - return (arraysize == 1) ? "INT8" : "BYTEA"; - else if (datatype.equals("float")) - return (arraysize == 1) ? "FLOAT4" : "BYTEA"; - else if (datatype.equals("double")) - return (arraysize == 1) ? "FLOAT8" : "BYTEA"; - else if (datatype.equals("boolean")) - return (arraysize == 1) ? "BOOL" : "BYTEA"; - else if (datatype.equals("char")) - return (arraysize == 1) ? "CHAR(1)" : ((arraysize <= 0) ? "VARCHAR" : ("VARCHAR(" + arraysize + ")")); - else if (datatype.equals("unsignedbyte")) - return "BYTEA"; - else{ - if (logger != null) - logger.dbInfo("Warning: unknown datatype: \"" + datatype + "\" => considered as \"" + datatype + "\" !"); - return datatype; + // 2. Get the provided definition of the standard TAP tables: + TAPTable[] customStdTables = new TAPTable[5]; + if (tapSchema != null){ + + /* if the schemas are not supported with this DBMS, + * remove its DB name: */ + if (!supportsSchema) + tapSchema.setDBName(null); + + // retrieve only the standard TAP tables: + Iterator itTable = tapSchema.iterator(); + while(itTable.hasNext()){ + TAPTable table = itTable.next(); + int indStdTable = getCreationOrder(TAPMetadata.resolveStdTable(table.getADQLName())); + if (indStdTable > -1) + customStdTables[indStdTable] = table; + } } - } - @Override - public void dropTable(final TAPTable table) throws DBException{ - String sql = "DROP TABLE " + table.getDBSchemaName() + "." + table.getDBName() + ";"; - try{ - Statement stmt = connection.createStatement(); - stmt.executeUpdate(sql); - logger.tableDropped(this, table); - }catch(SQLException se){ - logger.dbError("Impossible to drop the table \"" + table.getFullName() + "\" !", se); - throw new DBException("Impossible to drop the table \"" + table.getFullName() + "\" !", se); + // 3. Build a common TAPSchema, if needed: + if (tapSchema == null){ + + // build a new TAP_SCHEMA definition based on the standard definition: + tapSchema = TAPMetadata.getStdSchema(supportsSchema); + + // add the new TAP_SCHEMA definition in the given metadata object: + metadata.addSchema(tapSchema); } - } - @Override - public void insertRow(final SavotTR row, final TAPTable table) throws DBException{ - StringBuffer sql = new StringBuffer("INSERT INTO "); - sql.append(table.getDBSchemaName()).append('.').append(table.getDBName()).append(" VALUES ("); + // 4. Finally, build the join between the standard tables and the custom ones: + TAPTable[] stdTables = new TAPTable[]{TAPMetadata.getStdTable(STDTable.SCHEMAS),TAPMetadata.getStdTable(STDTable.TABLES),TAPMetadata.getStdTable(STDTable.COLUMNS),TAPMetadata.getStdTable(STDTable.KEYS),TAPMetadata.getStdTable(STDTable.KEY_COLUMNS)}; + for(int i = 0; i < stdTables.length; i++){ - TDSet cells = row.getTDs(); - Iterator it = table.getColumns(); - String datatype, value; - TAPColumn col; - int i = 0; - while(it.hasNext()){ - col = it.next(); - if (i > 0) - sql.append(','); - datatype = col.getDatatype(); - value = cells.getContent(i); - if (value == null || value.isEmpty()) - sql.append("NULL"); - else if (datatype.equalsIgnoreCase("char") || datatype.equalsIgnoreCase("varchar") || datatype.equalsIgnoreCase("unsignedByte")) - sql.append('\'').append(value.replaceAll("'", "''").replaceAll("\0", "")).append('\''); - else{ - if (value.equalsIgnoreCase("nan")) - sql.append("'NaN'"); - else - sql.append(value.replaceAll("\0", "")); + // CASE: no custom definition: + if (customStdTables[i] == null){ + if (!supportsSchema) + stdTables[i].setDBName(STDSchema.TAPSCHEMA.label + "_" + stdTables[i].getADQLName()); + // add the table to the fetched or built-in schema: + tapSchema.addTable(stdTables[i]); } - i++; + // CASE: custom definition + else + stdTables[i] = customStdTables[i]; } - sql.append(");"); - try{ - Statement stmt = connection.createStatement(); - int nbInsertedRows = stmt.executeUpdate(sql.toString()); - logger.rowsInserted(this, table, nbInsertedRows); - }catch(SQLException se){ - logger.dbError("Impossible to insert a row into the table \"" + table.getFullName() + "\" !", se); - throw new DBException("Impossible to insert a row in the table \"" + table.getFullName() + "\" !", se); - } + return stdTables; + } + + /** + *

    Ensure the TAP_SCHEMA schema exists in the database AND it must especially drop all of its standard tables + * (schemas, tables, columns, keys and key_columns), if they exist.

    + * + *

    Important note: + * If TAP_SCHEMA already exists and contains other tables than the standard ones, they will not be dropped and they will stay in place. + *

    + * + * @param stmt The statement to use in order to interact with the database. + * @param stdTables List of all standard tables that must be (re-)created. + * They will be used just to know the name of the standard tables that should be dropped here. + * + * @throws SQLException If any error occurs while querying or updating the database. + */ + protected void resetTAPSchema(final Statement stmt, final TAPTable[] stdTables) throws SQLException{ + DatabaseMetaData dbMeta = connection.getMetaData(); + + // 1. Get the qualified DB schema name: + String dbSchemaName = (supportsSchema ? stdTables[0].getDBSchemaName() : null); + + /* 2. Test whether the schema TAP_SCHEMA exists + * and if it does not, create it: */ + if (dbSchemaName != null){ + // test whether the schema TAP_SCHEMA exists: + boolean hasTAPSchema = isSchemaExisting(dbSchemaName, dbMeta); + + // create TAP_SCHEMA if it does not exist: + if (!hasTAPSchema) + stmt.executeUpdate("CREATE SCHEMA " + translator.getQualifiedSchemaName(stdTables[0]) + ";"); + } + + // 2-bis. Drop all its standard tables: + dropTAPSchemaTables(stdTables, stmt, dbMeta); + } + + /** + *

    Remove/Drop all standard TAP_SCHEMA tables given in parameter.

    + * + *

    Note: + * To test the existence of tables to drop, {@link DatabaseMetaData#getTables(String, String, String, String[])} is called. + * Then the schema and table names are compared with the case sensitivity defined by the translator. + * Only tables matching with these comparisons will be dropped. + *

    + * + * @param stdTables Tables to drop. (they should be provided ordered by their creation order (see {@link #getCreationOrder(STDTable)})). + * @param stmt Statement to use in order to interact with the database. + * @param dbMeta Database metadata. Used to list all existing tables. + * + * @throws SQLException If any error occurs while querying or updating the database. + * + * @see JDBCTranslator#isCaseSensitive(IdentifierField) + */ + private void dropTAPSchemaTables(final TAPTable[] stdTables, final Statement stmt, final DatabaseMetaData dbMeta) throws SQLException{ + String[] stdTablesToDrop = new String[]{null,null,null,null,null}; + + ResultSet rs = null; + try{ + // Retrieve only the schema name and determine whether the search should be case sensitive: + String tapSchemaName = stdTables[0].getDBSchemaName(); + boolean schemaCaseSensitive = translator.isCaseSensitive(IdentifierField.SCHEMA); + boolean tableCaseSensitive = translator.isCaseSensitive(IdentifierField.TABLE); + + // Identify which standard TAP tables must be dropped: + rs = dbMeta.getTables(null, null, null, null); + while(rs.next()){ + String rsSchema = nullifyIfNeeded(rs.getString(2)), rsTable = rs.getString(3); + if (!supportsSchema || (tapSchemaName == null && rsSchema == null) || equals(rsSchema, tapSchemaName, schemaCaseSensitive)){ + int indStdTable; + indStdTable = getCreationOrder(isStdTable(rsTable, stdTables, tableCaseSensitive)); + if (indStdTable > -1){ + stdTablesToDrop[indStdTable] = (rsSchema != null ? "\"" + rsSchema + "\"." : "") + "\"" + rsTable + "\""; + } + } + } + }finally{ + close(rs); + } + + // Drop the existing tables (in the reverse order of creation): + for(int i = stdTablesToDrop.length - 1; i >= 0; i--){ + if (stdTablesToDrop[i] != null) + stmt.executeUpdate("DROP TABLE " + stdTablesToDrop[i] + ";"); + } + } + + /** + *

    Create the specified standard TAP_SCHEMA tables into the database.

    + * + *

    Important note: + * Only standard TAP_SCHEMA tables (schemas, tables, columns, keys and key_columns) can be created here. + * If the given table is not part of the schema TAP_SCHEMA (comparison done on the ADQL name case-sensitively) + * and is not a standard TAP_SCHEMA table (comparison done on the ADQL name case-sensitively), + * this function will do nothing and will throw an exception. + *

    + * + *

    Note: + * An extra column is added in TAP_SCHEMA.schemas, TAP_SCHEMA.tables and TAP_SCHEMA.columns: {@value #DB_NAME_COLUMN}. + * This column is particularly used when getting the TAP metadata from the database to alias some schema, table and/or column names in ADQL. + *

    + * + * @param table Table to create. + * @param stmt Statement to use in order to interact with the database. + * + * @throws DBException If the given table is not a standard TAP_SCHEMA table. + * @throws SQLException If any error occurs while querying or updating the database. + */ + protected void createTAPSchemaTable(final TAPTable table, final Statement stmt) throws DBException, SQLException{ + // 1. ENSURE THE GIVEN TABLE IS REALLY A TAP_SCHEMA TABLE (according to the ADQL names): + if (!table.getADQLSchemaName().equalsIgnoreCase(STDSchema.TAPSCHEMA.label) || TAPMetadata.resolveStdTable(table.getADQLName()) == null) + throw new DBException("Forbidden table creation: " + table + " is not a standard table of TAP_SCHEMA!"); + + // 2. BUILD THE SQL QUERY TO CREATE THE TABLE: + StringBuffer sql = new StringBuffer("CREATE TABLE "); + + // a. Write the fully qualified table name: + sql.append(translator.getTableName(table, supportsSchema)); + + // b. List all the columns: + sql.append('('); + Iterator it = table.getColumns(); + while(it.hasNext()){ + TAPColumn col = it.next(); + + // column name: + sql.append(translator.getColumnName(col)); + + // column type: + sql.append(' ').append(convertTypeToDB(col.getDatatype())); + + // last column ? + if (it.hasNext()) + sql.append(','); + } + + // b bis. Add the extra dbName column (giving the database name of a schema, table or column): + if ((supportsSchema && table.getADQLName().equalsIgnoreCase(STDTable.SCHEMAS.label)) || table.getADQLName().equalsIgnoreCase(STDTable.TABLES.label) || table.getADQLName().equalsIgnoreCase(STDTable.COLUMNS.label)) + sql.append(',').append(DB_NAME_COLUMN).append(" VARCHAR"); + + // c. Append the primary key definition, if needed: + String primaryKey = getPrimaryKeyDef(table.getADQLName()); + if (primaryKey != null) + sql.append(',').append(primaryKey); + + // d. End the query: + sql.append(')').append(';'); + + // 3. FINALLY CREATE THE TABLE: + stmt.executeUpdate(sql.toString()); + } + + /** + *

    Get the primary key corresponding to the specified table.

    + * + *

    If the specified table is not a standard TAP_SCHEMA table, NULL will be returned.

    + * + * @param tableName ADQL table name. + * + * @return The primary key definition (prefixed by a space) corresponding to the specified table (ex: " PRIMARY KEY(schema_name)"), + * or NULL if the specified table is not a standard TAP_SCHEMA table. + */ + private String getPrimaryKeyDef(final String tableName){ + STDTable stdTable = TAPMetadata.resolveStdTable(tableName); + if (stdTable == null) + return null; + + boolean caseSensitive = translator.isCaseSensitive(IdentifierField.COLUMN); + switch(stdTable){ + case SCHEMAS: + return " PRIMARY KEY(" + (caseSensitive ? "\"schema_name\"" : "schema_name") + ")"; + case TABLES: + return " PRIMARY KEY(" + (caseSensitive ? "\"table_name\"" : "table_name") + ")"; + case COLUMNS: + return " PRIMARY KEY(" + (caseSensitive ? "\"table_name\"" : "table_name") + ", " + (caseSensitive ? "\"column_name\"" : "column_name") + ")"; + case KEYS: + case KEY_COLUMNS: + return " PRIMARY KEY(" + (caseSensitive ? "\"key_id\"" : "key_id") + ")"; + default: + return null; + } + } + + /** + *

    Create the DB indexes corresponding to the given TAP_SCHEMA table.

    + * + *

    Important note: + * Only standard TAP_SCHEMA tables (schemas, tables, columns, keys and key_columns) can be created here. + * If the given table is not part of the schema TAP_SCHEMA (comparison done on the ADQL name case-sensitively) + * and is not a standard TAP_SCHEMA table (comparison done on the ADQL name case-sensitively), + * this function will do nothing and will throw an exception. + *

    + * + * @param table Table whose indexes must be created here. + * @param stmt Statement to use in order to interact with the database. + * + * @throws DBException If the given table is not a standard TAP_SCHEMA table. + * @throws SQLException If any error occurs while querying or updating the database. + */ + protected void createTAPTableIndexes(final TAPTable table, final Statement stmt) throws DBException, SQLException{ + // 1. Ensure the given table is really a TAP_SCHEMA table (according to the ADQL names): + if (!table.getADQLSchemaName().equalsIgnoreCase(STDSchema.TAPSCHEMA.label) || TAPMetadata.resolveStdTable(table.getADQLName()) == null) + throw new DBException("Forbidden index creation: " + table + " is not a standard table of TAP_SCHEMA!"); + + // Build the fully qualified DB name of the table: + final String dbTableName = translator.getTableName(table, supportsSchema); + + // Build the name prefix of all the indexes to create: + final String indexNamePrefix = "INDEX_" + ((table.getADQLSchemaName() != null) ? (table.getADQLSchemaName() + "_") : "") + table.getADQLName() + "_"; + + Iterator it = table.getColumns(); + while(it.hasNext()){ + TAPColumn col = it.next(); + // Create an index only for columns that have the 'indexed' flag: + if (col.isIndexed() && !isPartOfPrimaryKey(col.getADQLName())) + stmt.executeUpdate("CREATE INDEX " + indexNamePrefix + col.getADQLName() + " ON " + dbTableName + "(" + translator.getColumnName(col) + ");"); + } + } + + /** + * Tell whether the specified column is part of the primary key of its table. + * + * @param adqlName ADQL name of a column. + * + * @return true if the specified column is part of the primary key, + * false otherwise. + */ + private boolean isPartOfPrimaryKey(final String adqlName){ + if (adqlName == null) + return false; + else + return (adqlName.equalsIgnoreCase("schema_name") || adqlName.equalsIgnoreCase("table_name") || adqlName.equalsIgnoreCase("column_name") || adqlName.equalsIgnoreCase("key_id")); + } + + /** + *

    Fill all the standard tables of TAP_SCHEMA (schemas, tables, columns, keys and key_columns).

    + * + *

    This function just call the following functions:

    + *
      + *
    1. {@link #fillSchemas(TAPTable, Iterator)}
    2. + *
    3. {@link #fillTables(TAPTable, Iterator)}
    4. + *
    5. {@link #fillColumns(TAPTable, Iterator)}
    6. + *
    7. {@link #fillKeys(TAPTable, TAPTable, Iterator)}
    8. + *
    + * + * @param meta All schemas and tables to list inside the TAP_SCHEMA tables. + * + * @throws DBException If rows can not be inserted because the SQL update query has failed. + * @throws SQLException If any other SQL exception occurs. + */ + protected void fillTAPSchema(final TAPMetadata meta) throws SQLException, DBException{ + TAPTable metaTable; + + // 1. Fill SCHEMAS: + metaTable = meta.getTable(STDSchema.TAPSCHEMA.label, STDTable.SCHEMAS.label); + Iterator allTables = fillSchemas(metaTable, meta.iterator()); + + // 2. Fill TABLES: + metaTable = meta.getTable(STDSchema.TAPSCHEMA.label, STDTable.TABLES.label); + Iterator allColumns = fillTables(metaTable, allTables); + allTables = null; + + // Fill COLUMNS: + metaTable = meta.getTable(STDSchema.TAPSCHEMA.label, STDTable.COLUMNS.label); + Iterator allKeys = fillColumns(metaTable, allColumns); + allColumns = null; + + // Fill KEYS and KEY_COLUMNS: + metaTable = meta.getTable(STDSchema.TAPSCHEMA.label, STDTable.KEYS.label); + TAPTable metaTable2 = meta.getTable(STDSchema.TAPSCHEMA.label, STDTable.KEY_COLUMNS.label); + fillKeys(metaTable, metaTable2, allKeys); + } + + /** + *

    Fill the standard table TAP_SCHEMA.schemas with the list of all published schemas.

    + * + *

    Note: + * Batch updates may be done here if its supported by the DBMS connection. + * In case of any failure while using this feature, it will be flagged as unsupported and one-by-one updates will be processed. + *

    + * + * @param metaTable Description of TAP_SCHEMA.schemas. + * @param itSchemas Iterator over the list of schemas. + * + * @return Iterator over the full list of all tables (whatever is their schema). + * + * @throws DBException If rows can not be inserted because the SQL update query has failed. + * @throws SQLException If any other SQL exception occurs. + */ + private Iterator fillSchemas(final TAPTable metaTable, final Iterator itSchemas) throws SQLException, DBException{ + List allTables = new ArrayList(); + + // Build the SQL update query: + StringBuffer sql = new StringBuffer("INSERT INTO "); + sql.append(translator.getTableName(metaTable, supportsSchema)).append(" ("); + sql.append(translator.getColumnName(metaTable.getColumn("schema_name"))); + sql.append(", ").append(translator.getColumnName(metaTable.getColumn("description"))); + sql.append(", ").append(translator.getColumnName(metaTable.getColumn("utype"))); + if (supportsSchema){ + sql.append(", ").append(DB_NAME_COLUMN); + sql.append(") VALUES (?, ?, ?, ?);"); + }else + sql.append(") VALUES (?, ?, ?);"); + + // Prepare the statement: + PreparedStatement stmt = null; + try{ + stmt = connection.prepareStatement(sql.toString()); + + // Execute the query for each schema: + int nbRows = 0; + while(itSchemas.hasNext()){ + TAPSchema schema = itSchemas.next(); + nbRows++; + + // list all tables of this schema: + appendAllInto(allTables, schema.iterator()); + + // add the schema entry into the DB: + stmt.setString(1, schema.getADQLName()); + stmt.setString(2, schema.getDescription()); + stmt.setString(3, schema.getUtype()); + if (supportsSchema) + stmt.setString(4, (schema.getDBName() == null || schema.getDBName().equals(schema.getADQLName())) ? null : schema.getDBName()); + executeUpdate(stmt, nbRows); + } + executeBatchUpdates(stmt, nbRows); + }finally{ + close(stmt); + } + + return allTables.iterator(); + } + + /** + *

    Fill the standard table TAP_SCHEMA.tables with the list of all published tables.

    + * + *

    Note: + * Batch updates may be done here if its supported by the DBMS connection. + * In case of any failure while using this feature, it will be flagged as unsupported and one-by-one updates will be processed. + *

    + * + * @param metaTable Description of TAP_SCHEMA.tables. + * @param itTables Iterator over the list of tables. + * + * @return Iterator over the full list of all columns (whatever is their table). + * + * @throws DBException If rows can not be inserted because the SQL update query has failed. + * @throws SQLException If any other SQL exception occurs. + */ + private Iterator fillTables(final TAPTable metaTable, final Iterator itTables) throws SQLException, DBException{ + List allColumns = new ArrayList(); + + // Build the SQL update query: + StringBuffer sql = new StringBuffer("INSERT INTO "); + sql.append(translator.getTableName(metaTable, supportsSchema)).append(" ("); + sql.append(translator.getColumnName(metaTable.getColumn("schema_name"))); + sql.append(", ").append(translator.getColumnName(metaTable.getColumn("table_name"))); + sql.append(", ").append(translator.getColumnName(metaTable.getColumn("table_type"))); + sql.append(", ").append(translator.getColumnName(metaTable.getColumn("description"))); + sql.append(", ").append(translator.getColumnName(metaTable.getColumn("utype"))); + sql.append(", ").append(DB_NAME_COLUMN); + sql.append(") VALUES (?, ?, ?, ?, ?, ?);"); + + // Prepare the statement: + PreparedStatement stmt = null; + try{ + stmt = connection.prepareStatement(sql.toString()); + + // Execute the query for each table: + int nbRows = 0; + while(itTables.hasNext()){ + TAPTable table = itTables.next(); + nbRows++; + + // list all columns of this table: + appendAllInto(allColumns, table.getColumns()); + + // add the table entry into the DB: + stmt.setString(1, table.getADQLSchemaName()); + if (table.isInitiallyQualified()) + stmt.setString(2, table.getADQLSchemaName() + "." + table.getADQLName()); + else + stmt.setString(2, table.getADQLName()); + stmt.setString(3, table.getType().toString()); + stmt.setString(4, table.getDescription()); + stmt.setString(5, table.getUtype()); + stmt.setString(6, (table.getDBName() == null || table.getDBName().equals(table.getADQLName())) ? null : table.getDBName()); + executeUpdate(stmt, nbRows); + } + executeBatchUpdates(stmt, nbRows); + }finally{ + close(stmt); + } + + return allColumns.iterator(); + } + + /** + *

    Fill the standard table TAP_SCHEMA.columns with the list of all published columns.

    + * + *

    Note: + * Batch updates may be done here if its supported by the DBMS connection. + * In case of any failure while using this feature, it will be flagged as unsupported and one-by-one updates will be processed. + *

    + * + * @param metaTable Description of TAP_SCHEMA.columns. + * @param itColumns Iterator over the list of columns. + * + * @return Iterator over the full list of all foreign keys. + * + * @throws DBException If rows can not be inserted because the SQL update query has failed. + * @throws SQLException If any other SQL exception occurs. + */ + private Iterator fillColumns(final TAPTable metaTable, final Iterator itColumns) throws SQLException, DBException{ + List allKeys = new ArrayList(); + + // Build the SQL update query: + StringBuffer sql = new StringBuffer("INSERT INTO "); + sql.append(translator.getTableName(metaTable, supportsSchema)).append(" ("); + sql.append(translator.getColumnName(metaTable.getColumn("table_name"))); + sql.append(", ").append(translator.getColumnName(metaTable.getColumn("column_name"))); + sql.append(", ").append(translator.getColumnName(metaTable.getColumn("description"))); + sql.append(", ").append(translator.getColumnName(metaTable.getColumn("unit"))); + sql.append(", ").append(translator.getColumnName(metaTable.getColumn("ucd"))); + sql.append(", ").append(translator.getColumnName(metaTable.getColumn("utype"))); + sql.append(", ").append(translator.getColumnName(metaTable.getColumn("datatype"))); + sql.append(", ").append(translator.getColumnName(metaTable.getColumn("size"))); + sql.append(", ").append(translator.getColumnName(metaTable.getColumn("principal"))); + sql.append(", ").append(translator.getColumnName(metaTable.getColumn("indexed"))); + sql.append(", ").append(translator.getColumnName(metaTable.getColumn("std"))); + sql.append(", ").append(DB_NAME_COLUMN); + sql.append(") VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);"); + + // Prepare the statement: + PreparedStatement stmt = null; + try{ + stmt = connection.prepareStatement(sql.toString()); + + // Execute the query for each column: + int nbRows = 0; + while(itColumns.hasNext()){ + TAPColumn col = itColumns.next(); + nbRows++; + + // list all foreign keys of this column: + appendAllInto(allKeys, col.getTargets()); + + // add the column entry into the DB: + if (!(col.getTable() instanceof TAPTable) || ((TAPTable)col.getTable()).isInitiallyQualified()) + stmt.setString(1, col.getTable().getADQLSchemaName() + "." + col.getTable().getADQLName()); + else + stmt.setString(1, col.getTable().getADQLName()); + stmt.setString(2, col.getADQLName()); + stmt.setString(3, col.getDescription()); + stmt.setString(4, col.getUnit()); + stmt.setString(5, col.getUcd()); + stmt.setString(6, col.getUtype()); + stmt.setString(7, col.getDatatype().type.toString()); + stmt.setInt(8, col.getDatatype().length); + stmt.setInt(9, col.isPrincipal() ? 1 : 0); + stmt.setInt(10, col.isIndexed() ? 1 : 0); + stmt.setInt(11, col.isStd() ? 1 : 0); + stmt.setString(12, (col.getDBName() == null || col.getDBName().equals(col.getADQLName())) ? null : col.getDBName()); + executeUpdate(stmt, nbRows); + } + executeBatchUpdates(stmt, nbRows); + }finally{ + close(stmt); + } + + return allKeys.iterator(); + } + + /** + *

    Fill the standard tables TAP_SCHEMA.keys and TAP_SCHEMA.key_columns with the list of all published foreign keys.

    + * + *

    Note: + * Batch updates may be done here if its supported by the DBMS connection. + * In case of any failure while using this feature, it will be flagged as unsupported and one-by-one updates will be processed. + *

    + * + * @param metaKeys Description of TAP_SCHEMA.keys. + * @param metaKeyColumns Description of TAP_SCHEMA.key_columns. + * @param itKeys Iterator over the list of foreign keys. + * + * @throws DBException If rows can not be inserted because the SQL update query has failed. + * @throws SQLException If any other SQL exception occurs. + */ + private void fillKeys(final TAPTable metaKeys, final TAPTable metaKeyColumns, final Iterator itKeys) throws SQLException, DBException{ + // Build the SQL update query for KEYS: + StringBuffer sqlKeys = new StringBuffer("INSERT INTO "); + sqlKeys.append(translator.getTableName(metaKeys, supportsSchema)).append(" ("); + sqlKeys.append(translator.getColumnName(metaKeys.getColumn("key_id"))); + sqlKeys.append(", ").append(translator.getColumnName(metaKeys.getColumn("from_table"))); + sqlKeys.append(", ").append(translator.getColumnName(metaKeys.getColumn("target_table"))); + sqlKeys.append(", ").append(translator.getColumnName(metaKeys.getColumn("description"))); + sqlKeys.append(", ").append(translator.getColumnName(metaKeys.getColumn("utype"))); + sqlKeys.append(") VALUES (?, ?, ?, ?, ?);"); + + PreparedStatement stmtKeys = null, stmtKeyCols = null; + try{ + // Prepare the statement for KEYS: + stmtKeys = connection.prepareStatement(sqlKeys.toString()); + + // Build the SQL update query for KEY_COLUMNS: + StringBuffer sqlKeyCols = new StringBuffer("INSERT INTO "); + sqlKeyCols.append(translator.getTableName(metaKeyColumns, supportsSchema)).append(" ("); + sqlKeyCols.append(translator.getColumnName(metaKeyColumns.getColumn("key_id"))); + sqlKeyCols.append(", ").append(translator.getColumnName(metaKeyColumns.getColumn("from_column"))); + sqlKeyCols.append(", ").append(translator.getColumnName(metaKeyColumns.getColumn("target_column"))); + sqlKeyCols.append(") VALUES (?, ?, ?);"); + + // Prepare the statement for KEY_COLUMNS: + stmtKeyCols = connection.prepareStatement(sqlKeyCols.toString()); + + // Execute the query for each column: + int nbKeys = 0, nbKeyColumns = 0; + while(itKeys.hasNext()){ + TAPForeignKey key = itKeys.next(); + nbKeys++; + + // add the key entry into KEYS: + stmtKeys.setString(1, key.getKeyId()); + if (key.getFromTable().isInitiallyQualified()) + stmtKeys.setString(2, key.getFromTable().getADQLSchemaName() + "." + key.getFromTable().getADQLName()); + else + stmtKeys.setString(2, key.getFromTable().getADQLName()); + if (key.getTargetTable().isInitiallyQualified()) + stmtKeys.setString(3, key.getTargetTable().getADQLSchemaName() + "." + key.getTargetTable().getADQLName()); + else + stmtKeys.setString(3, key.getTargetTable().getADQLName()); + stmtKeys.setString(4, key.getDescription()); + stmtKeys.setString(5, key.getUtype()); + executeUpdate(stmtKeys, nbKeys); + + // add the key columns into KEY_COLUMNS: + Iterator> itAssoc = key.iterator(); + while(itAssoc.hasNext()){ + nbKeyColumns++; + Map.Entry assoc = itAssoc.next(); + stmtKeyCols.setString(1, key.getKeyId()); + stmtKeyCols.setString(2, assoc.getKey()); + stmtKeyCols.setString(3, assoc.getValue()); + executeUpdate(stmtKeyCols, nbKeyColumns); + } + } + + executeBatchUpdates(stmtKeys, nbKeys); + executeBatchUpdates(stmtKeyCols, nbKeyColumns); + }finally{ + close(stmtKeys); + close(stmtKeyCols); + } + } + + /* ***************** */ + /* UPLOAD MANAGEMENT */ + /* ***************** */ + + /** + *

    Important note: + * Only tables uploaded by users can be created in the database. To ensure that, the schema name of this table MUST be {@link STDSchema#UPLOADSCHEMA} ("TAP_UPLOAD") in ADQL. + * If it has another ADQL name, an exception will be thrown. Of course, the DB name of this schema MAY be different. + *

    + * + *

    Important note: + * This function may modify the given {@link TAPTable} object if schemas are not supported by this connection. + * In this case, this function will prefix the table's DB name by the schema's DB name directly inside the given + * {@link TAPTable} object. Then the DB name of the schema will be set to NULL. + *

    + * + *

    Note: + * If the upload schema does not already exist in the database, it will be created. + *

    + * + * @see tap.db.DBConnection#addUploadedTable(tap.metadata.TAPTable, tap.data.TableIterator) + * @see #checkUploadedTableDef(TAPTable) + */ + @Override + public boolean addUploadedTable(TAPTable tableDef, TableIterator data) throws DBException, DataReadException{ + // If no table to upload, consider it has been dropped and return TRUE: + if (tableDef == null) + return true; + + // Check the table is well defined (and particularly the schema is well set with an ADQL name = TAP_UPLOAD): + checkUploadedTableDef(tableDef); + + Statement stmt = null; + try{ + + // Start a transaction: + startTransaction(); + // ...create a statement: + stmt = connection.createStatement(); + + DatabaseMetaData dbMeta = connection.getMetaData(); + + // 1. Create the upload schema, if it does not already exist: + if (!isSchemaExisting(tableDef.getDBSchemaName(), dbMeta)){ + stmt.executeUpdate("CREATE SCHEMA " + translator.getQualifiedSchemaName(tableDef) + ";"); + if (logger != null) + logger.logDB(LogLevel.INFO, this, "SCHEMA_CREATED", "Schema \"" + tableDef.getADQLSchemaName() + "\" (in DB: " + translator.getQualifiedSchemaName(tableDef) + ") created.", null); + } + // 1bis. Ensure the table does not already exist and if it is the case, throw an understandable exception: + else if (isTableExisting(tableDef.getDBSchemaName(), tableDef.getDBName(), dbMeta)){ + DBException de = new DBException("Impossible to create the user uploaded table in the database: " + translator.getTableName(tableDef, supportsSchema) + "! This table already exists."); + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "ADD_UPLOAD_TABLE", de.getMessage(), de); + throw de; + } + + // 2. Create the table: + // ...build the SQL query: + StringBuffer sqlBuf = new StringBuffer("CREATE TABLE "); + sqlBuf.append(translator.getTableName(tableDef, supportsSchema)).append(" ("); + Iterator it = tableDef.getColumns(); + while(it.hasNext()){ + TAPColumn col = it.next(); + // column name: + sqlBuf.append(translator.getColumnName(col)); + // column type: + sqlBuf.append(' ').append(convertTypeToDB(col.getDatatype())); + // last column ? + if (it.hasNext()) + sqlBuf.append(','); + } + sqlBuf.append(");"); + // ...execute the update query: + stmt.executeUpdate(sqlBuf.toString()); + + // 3. Fill the table: + fillUploadedTable(tableDef, data); + + // Commit the transaction: + commit(); + + // Log the end: + if (logger != null) + logger.logDB(LogLevel.INFO, this, "TABLE_CREATED", "Table \"" + tableDef.getADQLName() + "\" (in DB: " + translator.getTableName(tableDef, supportsSchema) + ") created.", null); + + return true; + + }catch(SQLException se){ + rollback(); + if (logger != null) + logger.logDB(LogLevel.WARNING, this, "ADD_UPLOAD_TABLE", "Impossible to create the uploaded table: " + translator.getTableName(tableDef, supportsSchema) + "!", se); + throw new DBException("Impossible to create the uploaded table: " + translator.getTableName(tableDef, supportsSchema) + "!", se); + }catch(DBException de){ + rollback(); + throw de; + }catch(DataReadException dre){ + rollback(); + throw dre; + }finally{ + close(stmt); + endTransaction(); + } + } + + /** + *

    Fill the table uploaded by the user with the given data.

    + * + *

    Note: + * Batch updates may be done here if its supported by the DBMS connection. + * In case of any failure while using this feature, it will be flagged as unsupported and one-by-one updates will be processed. + *

    + * + *

    Note: + * This function proceeds to a formatting of TIMESTAMP and GEOMETRY (point, circle, box, polygon) values. + *

    + * + * @param metaTable Description of the updated table. + * @param data Iterator over the rows to insert. + * + * @return Number of inserted rows. + * + * @throws DBException If rows can not be inserted because the SQL update query has failed. + * @throws SQLException If any other SQL exception occurs. + * @throws DataReadException If there is any error while reading the data from the given {@link TableIterator} (and particularly if a limit - in byte or row - has been reached). + */ + protected int fillUploadedTable(final TAPTable metaTable, final TableIterator data) throws SQLException, DBException, DataReadException{ + // 1. Build the SQL update query: + StringBuffer sql = new StringBuffer("INSERT INTO "); + StringBuffer varParam = new StringBuffer(); + // ...table name: + sql.append(translator.getTableName(metaTable, supportsSchema)).append(" ("); + // ...list of columns: + TAPColumn[] cols = data.getMetadata(); + for(int c = 0; c < cols.length; c++){ + if (c > 0){ + sql.append(", "); + varParam.append(", "); + } + sql.append(translator.getColumnName(cols[c])); + varParam.append('?'); + } + // ...values pattern: + sql.append(") VALUES (").append(varParam).append(");"); + + // 2. Prepare the statement: + PreparedStatement stmt = null; + int nbRows = 0; + try{ + stmt = connection.prepareStatement(sql.toString()); + + // 3. Execute the query for each given row: + while(data.nextRow()){ + nbRows++; + int c = 1; + while(data.hasNextCol()){ + Object val = data.nextCol(); + if (val != null && cols[c - 1] != null){ + /* TIMESTAMP FORMATTING */ + if (cols[c - 1].getDatatype().type == DBDatatype.TIMESTAMP){ + try{ + val = new Timestamp(ISO8601Format.parse(val.toString())); + }catch(ParseException pe){ + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "UPLOAD", "[l. " + nbRows + ", c. " + c + "] Unexpected date format for the value: \"" + val + "\"! A date formatted in ISO8601 was expected.", pe); + throw new DBException("[l. " + nbRows + ", c. " + c + "] Unexpected date format for the value: \"" + val + "\"! A date formatted in ISO8601 was expected.", pe); + } + } + /* GEOMETRY FORMATTING */ + else if (cols[c - 1].getDatatype().type == DBDatatype.POINT || cols[c - 1].getDatatype().type == DBDatatype.REGION){ + Region region; + // parse the region as an STC-S expression: + try{ + region = STCS.parseRegion(val.toString()); + }catch(adql.parser.ParseException e){ + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "UPLOAD", "[l. " + nbRows + ", c. " + c + "] Incorrect STC-S syntax for the geometrical value \"" + val + "\"! " + e.getMessage(), e); + throw new DataReadException("[l. " + nbRows + ", c. " + c + "] Incorrect STC-S syntax for the geometrical value \"" + val + "\"! " + e.getMessage(), e); + } + // translate this STC region into the corresponding column value: + try{ + val = translator.translateGeometryToDB(region); + }catch(adql.parser.ParseException e){ + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "UPLOAD", "[l. " + nbRows + ", c. " + c + "] Impossible to import the ADQL geometry \"" + val + "\" into the database! " + e.getMessage(), e); + throw new DataReadException("[l. " + nbRows + ", c. " + c + "] Impossible to import the ADQL geometry \"" + val + "\" into the database! " + e.getMessage(), e); + } + } + /* BOOLEAN CASE (more generally, type incompatibility) */ + else if (val != null && cols[c - 1].getDatatype().type == DBDatatype.SMALLINT && val instanceof Boolean) + val = ((Boolean)val) ? (short)1 : (short)0; + } + stmt.setObject(c++, val); + } + executeUpdate(stmt, nbRows); + } + executeBatchUpdates(stmt, nbRows); + + return nbRows; + + }finally{ + close(stmt); + } + } + + /** + *

    Important note: + * Only tables uploaded by users can be dropped from the database. To ensure that, the schema name of this table MUST be {@link STDSchema#UPLOADSCHEMA} ("TAP_UPLOAD") in ADQL. + * If it has another ADQL name, an exception will be thrown. Of course, the DB name of this schema MAY be different. + *

    + * + *

    Important note: + * This function may modify the given {@link TAPTable} object if schemas are not supported by this connection. + * In this case, this function will prefix the table's DB name by the schema's DB name directly inside the given + * {@link TAPTable} object. Then the DB name of the schema will be set to NULL. + *

    + * + *

    Note: + * This implementation is able to drop only one uploaded table. So if this function finds more than one table matching to the given one, + * an exception will be thrown and no table will be dropped. + *

    + * + * @see tap.db.DBConnection#dropUploadedTable(tap.metadata.TAPTable) + * @see #checkUploadedTableDef(TAPTable) + */ + @Override + public boolean dropUploadedTable(final TAPTable tableDef) throws DBException{ + // If no table to upload, consider it has been dropped and return TRUE: + if (tableDef == null) + return true; + + // Check the table is well defined (and particularly the schema is well set with an ADQL name = TAP_UPLOAD): + checkUploadedTableDef(tableDef); + + Statement stmt = null; + try{ + + // Check the existence of the table to drop: + if (!isTableExisting(tableDef.getDBSchemaName(), tableDef.getDBName(), connection.getMetaData())) + return true; + + // Execute the update: + stmt = connection.createStatement(); + int cnt = stmt.executeUpdate("DROP TABLE " + translator.getTableName(tableDef, supportsSchema) + ";"); + + // Log the end: + if (logger != null){ + if (cnt >= 0) + logger.logDB(LogLevel.INFO, this, "TABLE_DROPPED", "Table \"" + tableDef.getADQLName() + "\" (in DB: " + translator.getTableName(tableDef, supportsSchema) + ") dropped.", null); + else + logger.logDB(LogLevel.ERROR, this, "TABLE_DROPPED", "Table \"" + tableDef.getADQLName() + "\" (in DB: " + translator.getTableName(tableDef, supportsSchema) + ") NOT dropped.", null); + } + + // Ensure the update is successful: + return (cnt >= 0); + + }catch(SQLException se){ + if (logger != null) + logger.logDB(LogLevel.WARNING, this, "DROP_UPLOAD_TABLE", "Impossible to drop the uploaded table: " + translator.getTableName(tableDef, supportsSchema) + "!", se); + throw new DBException("Impossible to drop the uploaded table: " + translator.getTableName(tableDef, supportsSchema) + "!", se); + }finally{ + close(stmt); + } + } + + /** + *

    Ensures that the given table MUST be inside the upload schema in ADQL.

    + * + *

    Thus, the following cases are taken into account:

    + *
      + *
    • + * The schema name of the given table MUST be {@link STDSchema#UPLOADSCHEMA} ("TAP_UPLOAD") in ADQL. + * If it has another ADQL name, an exception will be thrown. Of course, the DB name of this schema MAY be different. + *
    • + *
    • + * If schemas are not supported by this connection, this function will prefix the table DB name by the schema DB name directly + * inside the given {@link TAPTable} object. Then the DB name of the schema will be set to NULL. + *
    • + *
    + * + * @param tableDef Definition of the table to create/drop. + * + * @throws DBException If the given table is not in a schema + * or if the ADQL name of this schema is not {@link STDSchema#UPLOADSCHEMA} ("TAP_UPLOAD"). + */ + protected void checkUploadedTableDef(final TAPTable tableDef) throws DBException{ + // If the table has no defined schema or if the ADQL name of the schema is not TAP_UPLOAD, throw an exception: + if (tableDef.getSchema() == null || !tableDef.getSchema().getADQLName().equals(STDSchema.UPLOADSCHEMA.label)) + throw new DBException("Missing upload schema! An uploaded table must be inside a schema whose the ADQL name is strictly equals to \"" + STDSchema.UPLOADSCHEMA.label + "\" (but the DB name may be different)."); + + if (!supportsSchema){ + if (tableDef.getADQLSchemaName() != null && tableDef.getADQLSchemaName().trim().length() > 0 && !tableDef.getDBName().startsWith(tableDef.getADQLSchemaName() + "_")) + tableDef.setDBName(tableDef.getADQLSchemaName() + "_" + tableDef.getDBName()); + if (tableDef.getSchema() != null) + tableDef.getSchema().setDBName(null); + } + } + + /* ************** */ + /* TOOL FUNCTIONS */ + /* ************** */ + + /** + *

    Convert the given TAP type into the corresponding DBMS column type.

    + * + *

    + * This function tries first the type conversion using the translator ({@link JDBCTranslator#convertTypeToDB(DBType)}). + * If it fails, a default conversion is done considering all the known types of the following DBMS: + * PostgreSQL, SQLite, MySQL, Oracle and JavaDB/Derby. + *

    + * + * @param type TAP type to convert. + * + * @return The corresponding DBMS type. + * + * @see JDBCTranslator#convertTypeToDB(DBType) + * @see #defaultTypeConversion(DBType) + */ + protected String convertTypeToDB(final DBType type){ + String dbmsType = translator.convertTypeToDB(type); + return (dbmsType == null) ? defaultTypeConversion(type) : dbmsType; + } + + /** + *

    Get the DBMS compatible datatype corresponding to the given column {@link DBType}.

    + * + *

    Note 1: + * This function is able to generate a DB datatype compatible with the currently used DBMS. + * In this current implementation, only Postgresql, Oracle, SQLite, MySQL and Java DB/Derby have been considered. + * Most of the TAP types have been tested only with Postgresql and SQLite without any problem. + * If the DBMS you are using has not been considered, note that this function will return the TAP type expression by default. + *

    + * + *

    Note 2: + * In case the given datatype is NULL or not managed here, the DBMS type corresponding to "VARCHAR" will be returned. + *

    + * + *

    Note 3: + * The special TAP types POINT and REGION are converted into the DBMS type corresponding to "VARCHAR". + *

    + * + * @param datatype Column TAP type. + * + * @return The corresponding DB type, or NULL if the given type is not managed or is NULL. + */ + protected String defaultTypeConversion(DBType datatype){ + if (datatype == null) + datatype = new DBType(DBDatatype.VARCHAR); + + switch(datatype.type){ + + case SMALLINT: + return dbms.equals("sqlite") ? "INTEGER" : "SMALLINT"; + + case INTEGER: + case REAL: + return datatype.type.toString(); + + case BIGINT: + if (dbms.equals("oracle")) + return "NUMBER(19,0)"; + else if (dbms.equals("sqlite")) + return "INTEGER"; + else + return "BIGINT"; + + case DOUBLE: + if (dbms.equals("postgresql") || dbms.equals("oracle")) + return "DOUBLE PRECISION"; + else if (dbms.equals("sqlite")) + return "REAL"; + else + return "DOUBLE"; + + case BINARY: + if (dbms.equals("postgresql")) + return "bytea"; + else if (dbms.equals("sqlite")) + return "BLOB"; + else if (dbms.equals("oracle")) + return "RAW" + (datatype.length > 0 ? "(" + datatype.length + ")" : ""); + else if (dbms.equals("derby")) + return "CHAR" + (datatype.length > 0 ? "(" + datatype.length + ")" : "") + " FOR BIT DATA"; + else + return datatype.type.toString(); + + case VARBINARY: + if (dbms.equals("postgresql")) + return "bytea"; + else if (dbms.equals("sqlite")) + return "BLOB"; + else if (dbms.equals("oracle")) + return "LONG RAW" + (datatype.length > 0 ? "(" + datatype.length + ")" : ""); + else if (dbms.equals("derby")) + return "VARCHAR" + (datatype.length > 0 ? "(" + datatype.length + ")" : "") + " FOR BIT DATA"; + else + return datatype.type.toString(); + + case CHAR: + if (dbms.equals("sqlite")) + return "TEXT"; + else + return "CHAR"; + + case BLOB: + if (dbms.equals("postgresql")) + return "bytea"; + else + return "BLOB"; + + case CLOB: + if (dbms.equals("postgresql") || dbms.equals("mysql") || dbms.equals("sqlite")) + return "TEXT"; + else + return "CLOB"; + + case TIMESTAMP: + if (dbms.equals("sqlite")) + return "TEXT"; + else + return "TIMESTAMP"; + + case POINT: + case REGION: + case VARCHAR: + default: + if (dbms.equals("sqlite")) + return "TEXT"; + else + return "VARCHAR"; + } + } + + /** + *

    Start a transaction.

    + * + *

    + * Basically, if transactions are supported by this connection, the flag AutoCommit is just turned off. + * It will be turned on again when {@link #endTransaction()} is called. + *

    + * + *

    If transactions are not supported by this connection, nothing is done.

    + * + *

    Important note: + * If any error interrupts the START TRANSACTION operation, transactions will be afterwards considered as not supported by this connection. + * So, subsequent call to this function (and any other transaction related function) will never do anything. + *

    + * + * @throws DBException If it is impossible to start a transaction though transactions are supported by this connection. + * If these are not supported, this error can never be thrown. + */ + protected void startTransaction() throws DBException{ + try{ + if (supportsTransaction){ + connection.setAutoCommit(false); + if (logger != null) + logger.logDB(LogLevel.INFO, this, "START_TRANSACTION", "Transaction STARTED.", null); + } + }catch(SQLException se){ + supportsTransaction = false; + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "START_TRANSACTION", "Transaction STARTing impossible!", se); + throw new DBException("Transaction STARTing impossible!", se); + } + } + + /** + *

    Commit the current transaction.

    + * + *

    + * {@link #startTransaction()} must have been called before. If it's not the case the connection + * may throw a {@link SQLException} which will be transformed into a {@link DBException} here. + *

    + * + *

    If transactions are not supported by this connection, nothing is done.

    + * + *

    Important note: + * If any error interrupts the COMMIT operation, transactions will be afterwards considered as not supported by this connection. + * So, subsequent call to this function (and any other transaction related function) will never do anything. + *

    + * + * @throws DBException If it is impossible to commit a transaction though transactions are supported by this connection.. + * If these are not supported, this error can never be thrown. + */ + protected void commit() throws DBException{ + try{ + if (supportsTransaction){ + connection.commit(); + if (logger != null) + logger.logDB(LogLevel.INFO, this, "COMMIT", "Transaction COMMITED.", null); + } + }catch(SQLException se){ + supportsTransaction = false; + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "COMMIT", "Transaction COMMIT impossible!", se); + throw new DBException("Transaction COMMIT impossible!", se); + } + } + + /** + *

    Rollback the current transaction.

    + * + *

    + * {@link #startTransaction()} must have been called before. If it's not the case the connection + * may throw a {@link SQLException} which will be transformed into a {@link DBException} here. + *

    + * + *

    If transactions are not supported by this connection, nothing is done.

    + * + *

    Important note: + * If any error interrupts the ROLLBACK operation, transactions will considered afterwards as not supported by this connection. + * So, subsequent call to this function (and any other transaction related function) will never do anything. + *

    + * + * @throws DBException If it is impossible to rollback a transaction though transactions are supported by this connection.. + * If these are not supported, this error can never be thrown. + */ + protected void rollback(){ + try{ + if (supportsTransaction){ + connection.rollback(); + if (logger != null) + logger.logDB(LogLevel.INFO, this, "ROLLBACK", "Transaction ROLLBACKED.", null); + } + }catch(SQLException se){ + supportsTransaction = false; + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "ROLLBACK", "Transaction ROLLBACK impossible!", se); + } + } + + /** + *

    End the current transaction.

    + * + *

    + * Basically, if transactions are supported by this connection, the flag AutoCommit is just turned on. + *

    + * + *

    If transactions are not supported by this connection, nothing is done.

    + * + *

    Important note: + * If any error interrupts the END TRANSACTION operation, transactions will be afterwards considered as not supported by this connection. + * So, subsequent call to this function (and any other transaction related function) will never do anything. + *

    + * + * @throws DBException If it is impossible to end a transaction though transactions are supported by this connection. + * If these are not supported, this error can never be thrown. + */ + protected void endTransaction(){ + try{ + if (supportsTransaction){ + connection.setAutoCommit(true); + if (logger != null) + logger.logDB(LogLevel.INFO, this, "END_TRANSACTION", "Transaction ENDED.", null); + } + }catch(SQLException se){ + supportsTransaction = false; + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "END_TRANSACTION", "Transaction ENDing impossible!", se); + } + } + + /** + *

    Close silently the given {@link ResultSet}.

    + * + *

    If the given {@link ResultSet} is NULL, nothing (even exception/error) happens.

    + * + *

    + * If any {@link SQLException} occurs during this operation, it is caught and just logged + * (see {@link TAPLog#logDB(uws.service.log.UWSLog.LogLevel, DBConnection, String, String, Throwable)}). + * No error is thrown and nothing else is done. + *

    + * + * @param rs {@link ResultSet} to close. + */ + protected final void close(final ResultSet rs){ + try{ + if (rs != null) + rs.close(); + }catch(SQLException se){ + if (logger != null) + logger.logDB(LogLevel.WARNING, this, "CLOSE", "Can not close a ResultSet!", null); + } + } + + /** + *

    Close silently the given {@link Statement}.

    + * + *

    If the given {@link Statement} is NULL, nothing (even exception/error) happens.

    + * + *

    + * If any {@link SQLException} occurs during this operation, it is caught and just logged + * (see {@link TAPLog#logDB(uws.service.log.UWSLog.LogLevel, DBConnection, String, String, Throwable)}). + * No error is thrown and nothing else is done. + *

    + * + * @param stmt {@link Statement} to close. + */ + protected final void close(final Statement stmt){ + try{ + if (stmt != null) + stmt.close(); + }catch(SQLException se){ + if (logger != null) + logger.logDB(LogLevel.WARNING, this, "CLOSE", "Can not close a Statement!", null); + } + } + + /** + *

    Transform the given column value in a boolean value.

    + * + *

    The following cases are taken into account in function of the given value's type:

    + *
      + *
    • NULL: false is always returned.
    • + * + *
    • {@link Boolean}: the boolean value is returned as provided (but casted in boolean).
    • + * + *
    • {@link Integer}: true is returned only if the integer value is strictly greater than 0, otherwise false is returned.
    • + * + *
    • Other: toString().trim() is first called on this object. Then, an integer value is tried to be extracted from it. + * If it succeeds, the previous rule is applied. If it fails, true will be returned only if the string is "t" or "true" (case insensitively).
    • + *
    + * + * @param colValue The column value to transform in boolean. + * + * @return Its corresponding boolean value. + */ + protected final boolean toBoolean(final Object colValue){ + // NULL => false: + if (colValue == null) + return false; + + // Boolean value => cast in boolean and return this value: + else if (colValue instanceof Boolean) + return ((Boolean)colValue).booleanValue(); + + // Integer value => cast in integer and return true only if the value is positive and not null: + else if (colValue instanceof Integer){ + int intFlag = ((Integer)colValue).intValue(); + return (intFlag > 0); + } + // Otherwise => get the string representation and: + // 1/ try to cast it into an integer and apply the same test as before + // 2/ if the cast fails, return true only if the value is "t" or "true" (case insensitively): + else{ + String strFlag = colValue.toString().trim(); + try{ + int intFlag = Integer.parseInt(strFlag); + return (intFlag > 0); + }catch(NumberFormatException nfe){ + return strFlag.equalsIgnoreCase("t") || strFlag.equalsIgnoreCase("true"); + } + } + } + + /** + * Return NULL if the given column value is an empty string (or it just contains space characters) or NULL. + * Otherwise the given string is returned as provided. + * + * @param dbValue Value to nullify if needed. + * + * @return NULL if the given string is NULL or empty, otherwise the given value. + */ + protected final String nullifyIfNeeded(final String dbValue){ + return (dbValue != null && dbValue.trim().length() <= 0) ? null : dbValue; + } + + /** + * Search a {@link TAPTable} instance whose the ADQL name matches (case sensitively) to the given one. + * + * @param tableName ADQL name of the table to search. + * @param itTables Iterator over the set of tables in which the research must be done. + * + * @return The found table, or NULL if not found. + */ + private TAPTable searchTable(String tableName, final Iterator itTables){ + // Get the schema name, if any prefix the given table name: + String schemaName = null; + int indSep = tableName.indexOf('.'); + if (indSep > 0){ + schemaName = tableName.substring(0, indSep); + tableName = tableName.substring(indSep + 1); + } + + // Search by schema name (if any) and then by table name: + while(itTables.hasNext()){ + // get the table: + TAPTable table = itTables.next(); + // test the schema name (if one was prefixing the table name) (case sensitively): + if (schemaName != null){ + if (table.getADQLSchemaName() == null || !schemaName.equals(table.getADQLSchemaName())) + continue; + } + // test the table name (case sensitively): + if (tableName.equals(table.getADQLName())) + return table; + } + + // NULL if no table matches: + return null; + } + + /** + *

    Tell whether the specified schema exists in the database. + * To do so, it is using the given {@link DatabaseMetaData} object to query the database and list all existing schemas.

    + * + *

    Note: + * This function is completely useless if the connection is not supporting schemas. + *

    + * + *

    Note: + * Test on the schema name is done considering the case sensitivity indicated by the translator + * (see {@link JDBCTranslator#isCaseSensitive(IdentifierField)}). + *

    + * + *

    Note: + * This functions is used by {@link #addUploadedTable(TAPTable, TableIterator)} and {@link #resetTAPSchema(Statement, TAPTable[])}. + *

    + * + * @param schemaName DB name of the schema whose the existence must be checked. + * @param dbMeta Metadata about the database, and mainly the list of all existing schemas. + * + * @return true if the specified schema exists, false otherwise. + * + * @throws SQLException If any error occurs while interrogating the database about existing schema. + */ + protected boolean isSchemaExisting(String schemaName, final DatabaseMetaData dbMeta) throws SQLException{ + if (!supportsSchema || schemaName == null || schemaName.length() == 0) + return true; + + // Determine the case sensitivity to use for the equality test: + boolean caseSensitive = translator.isCaseSensitive(IdentifierField.SCHEMA); + + ResultSet rs = null; + try{ + // List all schemas available and stop when a schema name matches ignoring the case: + rs = dbMeta.getSchemas(); + boolean hasSchema = false; + while(!hasSchema && rs.next()) + hasSchema = equals(rs.getString(1), schemaName, caseSensitive); + return hasSchema; + }finally{ + close(rs); + } + } + + /** + *

    Tell whether the specified table exists in the database. + * To do so, it is using the given {@link DatabaseMetaData} object to query the database and list all existing tables.

    + * + *

    Important note: + * If schemas are not supported by this connection but a schema name is even though provided in parameter, + * the table name will be prefixed by the schema name. + * The research will then be done with NULL as schema name and this prefixed table name. + *

    + * + *

    Note: + * Test on the schema name is done considering the case sensitivity indicated by the translator + * (see {@link JDBCTranslator#isCaseSensitive(IdentifierField)}). + *

    + * + *

    Note: + * This function is used by {@link #addUploadedTable(TAPTable, TableIterator)} and {@link #dropUploadedTable(TAPTable)}. + *

    + * + * @param schemaName DB name of the schema in which the table to search is. If NULL, the table is expected in any schema but ONLY one MUST exist. + * @param tableName DB name of the table to search. + * @param dbMeta Metadata about the database, and mainly the list of all existing tables. + * + * @return true if the specified table exists, false otherwise. + * + * @throws SQLException If any error occurs while interrogating the database about existing tables. + */ + protected boolean isTableExisting(String schemaName, String tableName, final DatabaseMetaData dbMeta) throws DBException, SQLException{ + if (tableName == null || tableName.length() == 0) + return true; + + // Determine the case sensitivity to use for the equality test: + boolean schemaCaseSensitive = translator.isCaseSensitive(IdentifierField.SCHEMA); + boolean tableCaseSensitive = translator.isCaseSensitive(IdentifierField.TABLE); + + ResultSet rs = null; + try{ + + // List all matching tables: + if (supportsSchema){ + String schemaPattern = schemaCaseSensitive ? schemaName : null; + String tablePattern = tableCaseSensitive ? tableName : null; + rs = dbMeta.getTables(null, schemaPattern, tablePattern, null); + }else{ + String tablePattern = tableCaseSensitive ? tableName : null; + rs = dbMeta.getTables(null, null, tablePattern, null); + } + + // Stop on the first table which match completely (schema name + table name in function of their respective case sensitivity): + int cnt = 0; + while(rs.next()){ + String rsSchema = nullifyIfNeeded(rs.getString(2)); + String rsTable = rs.getString(3); + if (!supportsSchema || schemaName == null || equals(rsSchema, schemaName, schemaCaseSensitive)){ + if (equals(rsTable, tableName, tableCaseSensitive)) + cnt++; + } + } + + if (cnt > 1){ + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "TABLE_EXIST", "More than one table match to these criteria (schema=" + schemaName + " (case sensitive?" + schemaCaseSensitive + ") && table=" + tableName + " (case sensitive?" + tableCaseSensitive + "))!", null); + throw new DBException("More than one table match to these criteria (schema=" + schemaName + " (case sensitive?" + schemaCaseSensitive + ") && table=" + tableName + " (case sensitive?" + tableCaseSensitive + "))!"); + } + + return cnt == 1; + + }finally{ + close(rs); + } + } + + /** + *

    Tell whether the specified column exists in the specified table of the database. + * To do so, it is using the given {@link DatabaseMetaData} object to query the database and list all existing columns.

    + * + *

    Important note: + * If schemas are not supported by this connection but a schema name is even though provided in parameter, + * the table name will be prefixed by the schema name. + * The research will then be done with NULL as schema name and this prefixed table name. + *

    + * + *

    Note: + * Test on the schema name is done considering the case sensitivity indicated by the translator + * (see {@link JDBCTranslator#isCaseSensitive(IdentifierField)}). + *

    + * + *

    Note: + * This function is used by {@link #loadSchemas(TAPTable, TAPMetadata, Statement)}, {@link #loadTables(TAPTable, TAPMetadata, Statement)} + * and {@link #loadColumns(TAPTable, List, Statement)}. + *

    + * + * @param schemaName DB name of the table schema. MAY BE NULL + * @param tableName DB name of the table containing the column to search. MAY BE NULL + * @param columnName DB name of the column to search. + * @param dbMeta Metadata about the database, and mainly the list of all existing tables. + * + * @return true if the specified column exists, false otherwise. + * + * @throws SQLException If any error occurs while interrogating the database about existing columns. + */ + protected boolean isColumnExisting(String schemaName, String tableName, String columnName, final DatabaseMetaData dbMeta) throws DBException, SQLException{ + if (columnName == null || columnName.length() == 0) + return true; + + // Determine the case sensitivity to use for the equality test: + boolean schemaCaseSensitive = translator.isCaseSensitive(IdentifierField.SCHEMA); + boolean tableCaseSensitive = translator.isCaseSensitive(IdentifierField.TABLE); + boolean columnCaseSensitive = translator.isCaseSensitive(IdentifierField.COLUMN); + + ResultSet rsT = null, rsC = null; + try{ + /* Note: + * + * The DatabaseMetaData.getColumns(....) function does not work properly + * with the SQLite driver: when all parameters are set to null, meaning all columns of the database + * must be returned, absolutely no rows are selected. + * + * The solution proposed here, is to first search all (matching) tables, and then for each table get + * all its columns and find the matching one(s). + */ + + // List all matching tables: + if (supportsSchema){ + String schemaPattern = schemaCaseSensitive ? schemaName : null; + String tablePattern = tableCaseSensitive ? tableName : null; + rsT = dbMeta.getTables(null, schemaPattern, tablePattern, null); + }else{ + String tablePattern = tableCaseSensitive ? tableName : null; + rsT = dbMeta.getTables(null, null, tablePattern, null); + } + + // For each matching table: + int cnt = 0; + String columnPattern = columnCaseSensitive ? columnName : null; + while(rsT.next()){ + String rsSchema = nullifyIfNeeded(rsT.getString(2)); + String rsTable = rsT.getString(3); + // test the schema name: + if (!supportsSchema || schemaName == null || equals(rsSchema, schemaName, schemaCaseSensitive)){ + // test the table name: + if ((tableName == null || equals(rsTable, tableName, tableCaseSensitive))){ + // list its columns: + rsC = dbMeta.getColumns(null, rsSchema, rsTable, columnPattern); + // count all matching columns: + while(rsC.next()){ + String rsColumn = rsC.getString(4); + if (equals(rsColumn, columnName, columnCaseSensitive)) + cnt++; + } + close(rsC); + } + } + } + + if (cnt > 1){ + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "COLUMN_EXIST", "More than one column match to these criteria (schema=" + schemaName + " (case sensitive?" + schemaCaseSensitive + ") && table=" + tableName + " (case sensitive?" + tableCaseSensitive + ") && column=" + columnName + " (case sensitive?" + columnCaseSensitive + "))!", null); + throw new DBException("More than one column match to these criteria (schema=" + schemaName + " (case sensitive?" + schemaCaseSensitive + ") && table=" + tableName + " (case sensitive?" + tableCaseSensitive + ") && column=" + columnName + " (case sensitive?" + columnCaseSensitive + "))!"); + } + + return cnt == 1; + + }finally{ + close(rsT); + close(rsC); + } + } + + /* + *

    Build a table prefix with the given schema name.

    + * + *

    By default, this function returns: schemaName + "_".

    + * + *

    CAUTION: + * This function is used only when schemas are not supported by the DBMS connection. + * It aims to propose an alternative of the schema notion by prefixing the table name by the schema name. + *

    + * + *

    Note: + * If the given schema is NULL or is an empty string, an empty string will be returned. + * Thus, no prefix will be set....which is very useful when the table name has already been prefixed + * (in such case, the DB name of its schema has theoretically set to NULL). + *

    + * + * @param schemaName (DB) Schema name. + * + * @return The corresponding table prefix, or "" if the given schema name is an empty string or NULL. + * + protected String getTablePrefix(final String schemaName){ + if (schemaName != null && schemaName.trim().length() > 0) + return schemaName + "_"; + else + return ""; + }*/ + + /** + * Tell whether the specified table (using its DB name only) is a standard one or not. + * + * @param dbTableName DB (unqualified) table name. + * @param stdTables List of all tables to consider as the standard ones. + * @param caseSensitive Indicate whether the equality test must be done case sensitively or not. + * + * @return The corresponding {@link STDTable} if the specified table is a standard one, + * NULL otherwise. + * + * @see TAPMetadata#resolveStdTable(String) + */ + protected final STDTable isStdTable(final String dbTableName, final TAPTable[] stdTables, final boolean caseSensitive){ + if (dbTableName != null){ + for(TAPTable t : stdTables){ + if (equals(dbTableName, t.getDBName(), caseSensitive)) + return TAPMetadata.resolveStdTable(t.getADQLName()); + } + } + return null; + } + + /** + *

    "Execute" the query update. This update must concern ONLY ONE ROW.

    + * + *

    + * Note that the "execute" action will be different in function of whether batch update queries are supported or not by this connection: + *

    + *
      + *
    • + * If batch update queries are supported, just {@link PreparedStatement#addBatch()} will be called. + * It means, the query will be appended in a list and will be executed only if + * {@link #executeBatchUpdates(PreparedStatement, int)} is then called. + *
    • + *
    • + * If they are NOT supported, {@link PreparedStatement#executeUpdate()} will merely be called. + *
    • + *
    + * + *

    + * Before returning, and only if batch update queries are not supported, this function is ensuring that exactly one row has been updated. + * If it is not the case, a {@link DBException} is thrown. + *

    + * + *

    Important note: + * If the function {@link PreparedStatement#addBatch()} fails by throwing an {@link SQLException}, batch updates + * will be afterwards considered as not supported by this connection. Besides, if this row is the first one in a batch update (parameter indRow=1), + * then, the error will just be logged and an {@link PreparedStatement#executeUpdate()} will be tried. However, if the row is not the first one, + * the error will be logged but also thrown as a {@link DBException}. In both cases, a subsequent call to + * {@link #executeBatchUpdates(PreparedStatement, int)} will have obviously no effect. + *

    + * + * @param stmt {@link PreparedStatement} in which the update query has been prepared. + * @param indRow Index of the row in the whole update process. It is used only for error management purpose. + * + * @throws SQLException If {@link PreparedStatement#executeUpdate()} fails.
    + * @throws DBException If {@link PreparedStatement#addBatch()} fails and this update does not concern the first row, or if the number of updated rows is different from 1. + */ + protected final void executeUpdate(final PreparedStatement stmt, int indRow) throws SQLException, DBException{ + // BATCH INSERTION: (the query is queued and will be executed later) + if (supportsBatchUpdates){ + // Add the prepared query in the batch queue of the statement: + try{ + stmt.addBatch(); + }catch(SQLException se){ + supportsBatchUpdates = false; + /* + * If the error happens for the first row, it is still possible to insert all rows + * with the non-batch function - executeUpdate(). + * + * Otherwise, it is impossible to insert the previous batched rows ; an exception must be thrown + * and must stop the whole TAP_SCHEMA initialization. + */ + if (indRow == 1){ + if (logger != null) + logger.logDB(LogLevel.WARNING, this, "EXEC_UPDATE", "BATCH query impossible => TRYING AGAIN IN A NORMAL EXECUTION (executeUpdate())!", se); + }else{ + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "EXEC_UPDATE", "BATCH query impossible!", se); + throw new DBException("BATCH query impossible!", se); + } + } + } + + // NORMAL INSERTION: (immediate insertion) + if (!supportsBatchUpdates){ + + // Insert the row prepared in the given statement: + int nbRowsWritten = stmt.executeUpdate(); + + // Check the row has been inserted with success: + if (nbRowsWritten != 1){ + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "EXEC_UPDATE", "ROW " + indRow + " not inserted!", null); + throw new DBException("ROW " + indRow + " not inserted!"); + } + } + } + + /** + *

    Execute all batched queries.

    + * + *

    To do so, {@link PreparedStatement#executeBatch()} and then, if the first was successful, {@link PreparedStatement#clearBatch()} is called.

    + * + *

    + * Before returning, this function is ensuring that exactly the given number of rows has been updated. + * If it is not the case, a {@link DBException} is thrown. + *

    + * + *

    Note: + * This function has no effect if batch queries are not supported. + *

    + * + *

    Important note: + * In case {@link PreparedStatement#executeBatch()} fails by throwing an {@link SQLException}, + * batch update queries will be afterwards considered as not supported by this connection. + *

    + * + * @param stmt {@link PreparedStatement} in which the update query has been prepared. + * @param nbRows Number of rows that should be updated. + * + * @throws DBException If {@link PreparedStatement#executeBatch()} fails, or if the number of updated rows is different from the given one. + */ + protected final void executeBatchUpdates(final PreparedStatement stmt, int nbRows) throws DBException{ + if (supportsBatchUpdates){ + // Execute all the batch queries: + int[] rows; + try{ + rows = stmt.executeBatch(); + }catch(SQLException se){ + supportsBatchUpdates = false; + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "EXEC_UPDATE", "BATCH execution impossible!", se); + throw new DBException("BATCH execution impossible!", se); + } + + // Remove executed queries from the statement: + try{ + stmt.clearBatch(); + }catch(SQLException se){ + if (logger != null) + logger.logDB(LogLevel.WARNING, this, "EXEC_UPDATE", "CLEAR BATCH impossible!", se); + } + + // Count the updated rows: + int nbRowsUpdated = 0; + for(int i = 0; i < rows.length; i++) + nbRowsUpdated += rows[i]; + + // Check all given rows have been inserted with success: + if (nbRowsUpdated != nbRows){ + if (logger != null) + logger.logDB(LogLevel.ERROR, this, "EXEC_UPDATE", "ROWS not all update (" + nbRows + " to update ; " + nbRowsUpdated + " updated)!", null); + throw new DBException("ROWS not all updated (" + nbRows + " to update ; " + nbRowsUpdated + " updated)!"); + } + } + } + + /** + * Append all items of the iterator inside the given list. + * + * @param lst List to update. + * @param it All items to append inside the list. + */ + private < T > void appendAllInto(final List lst, final Iterator it){ + while(it.hasNext()) + lst.add(it.next()); + } + + /** + *

    Tell whether the given DB name is equals (case sensitively or not, in function of the given parameter) + * to the given name coming from a {@link TAPMetadata} object.

    + * + *

    If at least one of the given name is NULL, false is returned.

    + * + *

    Note: + * The comparison will be done in function of the specified case sensitivity BUT ALSO of the case supported and stored by the DBMS. + * For instance, if it has been specified a case insensitivity and that mixed case is not supported by unquoted identifier, + * the comparison must be done, surprisingly, by considering the case if unquoted identifiers are stored in lower or upper case. + * Thus, this special way to evaluate equality should be as closed as possible to the identifier storage and research policies of the used DBMS. + *

    + * + * @param dbName Name provided by the database. + * @param metaName Name provided by a {@link TAPMetadata} object. + * @param caseSensitive true if the equality test must be done case sensitively, false otherwise. + * + * @return true if both names are equal, false otherwise. + */ + protected final boolean equals(final String dbName, final String metaName, final boolean caseSensitive){ + if (dbName == null || metaName == null) + return false; + + if (caseSensitive){ + if (supportsMixedCaseQuotedIdentifier || mixedCaseQuoted) + return dbName.equals(metaName); + else if (lowerCaseQuoted) + return dbName.equals(metaName.toLowerCase()); + else if (upperCaseQuoted) + return dbName.equals(metaName.toUpperCase()); + else + return dbName.equalsIgnoreCase(metaName); + }else{ + if (supportsMixedCaseUnquotedIdentifier) + return dbName.equalsIgnoreCase(metaName); + else if (lowerCaseUnquoted) + return dbName.equals(metaName.toLowerCase()); + else if (upperCaseUnquoted) + return dbName.equals(metaName.toUpperCase()); + else + return dbName.equalsIgnoreCase(metaName); + } + } + + @Override + public void setFetchSize(final int size){ + supportsFetchSize = true; + fetchSize = (size > 0) ? size : IGNORE_FETCH_SIZE; } } diff --git a/src/tap/error/DefaultTAPErrorWriter.java b/src/tap/error/DefaultTAPErrorWriter.java index 40534f08d768a3a4a23a3ac052808d79c0e3dde3..6b390cb1f420fc606a203343169276d8581faafd 100644 --- a/src/tap/error/DefaultTAPErrorWriter.java +++ b/src/tap/error/DefaultTAPErrorWriter.java @@ -16,64 +16,308 @@ package tap.error; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ +import java.io.BufferedWriter; +import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.io.PrintStream; +import java.io.PrintWriter; +import java.sql.SQLException; +import java.util.LinkedHashMap; +import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import tap.ServiceConnection; +import tap.TAPException; +import tap.formatter.OutputFormat; +import tap.formatter.VOTableFormat; +import tap.log.DefaultTAPLog; +import tap.log.TAPLog; import uws.UWSException; +import uws.UWSToolBox; +import uws.job.ErrorSummary; +import uws.job.ErrorType; +import uws.job.UWSJob; import uws.job.user.JobOwner; -import uws.service.error.AbstractServiceErrorWriter; import uws.service.error.ServiceErrorWriter; -import uws.service.log.UWSLog; /** *

    Default implementation of {@link ServiceErrorWriter} for a TAP service.

    * - *

    All errors are written using the function {@link #formatError(Throwable, boolean, uws.job.ErrorType, int, String, JobOwner, HttpServletResponse)} - * of the abstract implementation of the error writer: {@link AbstractServiceErrorWriter}.

    + *

    + * On the contrary to the UWS standard, all errors must be formatted in VOTable. + * So, all errors given to this {@link ServiceErrorWriter} are formatted in VOTable using the structure defined by the IVOA. + * To do that, this class will use the function {@link VOTableFormat#writeError(String, java.util.Map, java.io.PrintWriter)}. + *

    * - *

    A {@link UWSException} may precise the HTTP error code to apply. That's why, {@link #writeError(Throwable, HttpServletResponse, HttpServletRequest, JobOwner, String)} - * has been overridden: to get this error code and submit it to the {@link #formatError(Throwable, boolean, uws.job.ErrorType, int, String, JobOwner, HttpServletResponse)} - * function. Besides, the stack trace of {@link UWSException}s is not printed (except if the message is NULL or empty). - * And this error will be logged only if its error code is {@link UWSException#INTERNAL_SERVER_ERROR}.

    + *

    + * The {@link VOTableFormat} will be got from the {@link ServiceConnection} using {@link ServiceConnection#getOutputFormat(String)} + * with "votable" as parameter. If the returned formatter is not a direct instance or an extension of {@link VOTableFormat}, + * a default instance of this class will be always used. + *

    * - *

    2 formats are managed by this implementation: HTML (default) and JSON. That means the writer will format and - * write a given error in the best appropriate format. This format is chosen thanks to the "Accept" header of the HTTP request. - * If no request is provided or if there is no known format, the HTML format is chosen by default.

    + *

    + * {@link UWSException}s and {@link TAPException}s may precise the HTTP error code to apply, + * which will be used to set the HTTP status of the response. If it is a different kind of exception, + * the HTTP status 500 (INTERNAL SERVER ERROR) will be used. + *

    * - * @author Grégory Mantelet (CDS) - * @version 06/2012 + *

    + * Besides, all exceptions except {@link UWSException} and {@link TAPException} will be logged as FATAL in the TAP context + * (with no event and no object). Thus the full stack trace is available to the administrator so that the error can + * be understood as easily and quickly as possible. + *

    * - * @see AbstractServiceErrorWriter + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (04/2015) */ -public class DefaultTAPErrorWriter extends AbstractServiceErrorWriter { +public class DefaultTAPErrorWriter implements ServiceErrorWriter { - protected final ServiceConnection service; + /** Description of the TAP service using this {@link ServiceErrorWriter}. */ + protected final ServiceConnection service; - public DefaultTAPErrorWriter(final ServiceConnection service){ + /** Logger to use to report any unexpected error. + * This attribute MUST NEVER be used directly, but only with its getter {@link #getLogger()}. */ + protected TAPLog logger = null; + + /** Object to use to format an error message into VOTable. + * This attribute MUST NEVER be used directly, but only with its getter {@link #getFormatter()}. */ + protected VOTableFormat formatter = null; + + /** + *

    Build an error writer for TAP.

    + * + *

    + * On the contrary to the UWS standard, TAP standard defines a format for error reporting. + * Errors should be reported as VOTable document with a defined structure. This one is well + * managed by {@link VOTableFormat} which is actually called by this class when an error must + * be written. + *

    + * + * @param service Description of the TAP service. + * + * @throws NullPointerException If no service description is provided. + */ + public DefaultTAPErrorWriter(final ServiceConnection service) throws NullPointerException{ + if (service == null) + throw new NullPointerException("Missing description of this TAP service! Can not build a ServiceErrorWriter."); this.service = service; } + /** + *

    Get the {@link VOTableFormat} to use in order to format errors.

    + * + *

    Note: + * If not yet set, the formatter of this {@link ServiceErrorWriter} is set to the formatter of VOTable results returned by the {@link ServiceConnection}. + * However this formatter should be a {@link VOTableFormat} instance or an extension (because the function {@link VOTableFormat#writeError(String, java.util.Map, PrintWriter)} is needed). + * Otherwise a default {@link VOTableFormat} instance will be created and always used by this {@link ServiceErrorWriter}. + *

    + * + * @return A VOTable formatter. + * + * @since 2.0 + */ + protected VOTableFormat getFormatter(){ + if (formatter == null){ + OutputFormat fmt = service.getOutputFormat("votable"); + if (fmt == null || !(fmt instanceof VOTableFormat)) + formatter = new VOTableFormat(service); + else + formatter = (VOTableFormat)fmt; + } + return formatter; + } + + /** + *

    Get the logger to use inside this {@link ServiceErrorWriter}.

    + * + *

    Note: + * If not yet set, the logger of this {@link ServiceErrorWriter} is set to the logger used by the {@link ServiceConnection}. + * If none is returned by the {@link ServiceConnection}, a default {@link TAPLog} instance writing logs in System.err + * will be created and always used by this {@link ServiceErrorWriter}. + *

    + * + * @return A logger. + * + * @since 2.0 + */ + protected TAPLog getLogger(){ + if (logger == null){ + logger = service.getLogger(); + if (logger == null) + logger = new DefaultTAPLog(System.err); + } + return logger; + } + + @Override + public boolean writeError(final Throwable t, final HttpServletResponse response, final HttpServletRequest request, final String reqID, final JobOwner user, final String action){ + if (t == null || response == null) + return true; + + boolean written = false; + // If expected error, just write it in VOTable: + if (t instanceof UWSException || t instanceof TAPException){ + // get the error type: + ErrorType type = (t instanceof UWSException) ? ((UWSException)t).getUWSErrorType() : ErrorType.FATAL; + // get the HTTP error code: + int httpErrorCode = (t instanceof UWSException) ? ((UWSException)t).getHttpErrorCode() : ((TAPException)t).getHttpErrorCode(); + // write the VOTable error: + written = writeError(t.getMessage(), type, httpErrorCode, response, request, reqID, user, action); + } + // Otherwise, log it and write a message to the user: + else + // write a message to the user: + written = writeError("INTERNAL SERVER ERROR! Sorry, this error is grave and unexpected. No explanation can be provided for the moment. Details about this error have been reported in the service log files ; you should try again your request later or notify the administrator(s) by yourself (with the following REQ_ID).", ErrorType.FATAL, UWSException.INTERNAL_SERVER_ERROR, response, request, reqID, user, action); + return written; + } + @Override - protected final UWSLog getLogger(){ - return service.getLogger(); + public boolean writeError(final String message, final ErrorType type, final int httpErrorCode, final HttpServletResponse response, final HttpServletRequest request, final String reqID, final JobOwner user, final String action){ + if (message == null || response == null) + return true; + + try{ + // Erase anything written previously in the HTTP response: + response.reset(); + + // Set the HTTP status: + response.setStatus((httpErrorCode <= 0) ? 500 : httpErrorCode); + + // Set the MIME type of the answer (XML for a VOTable document): + response.setContentType("application/xml"); + + // Set the character encoding: + response.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); + + }catch(IllegalStateException ise){ + /* If it is not possible any more to reset the response header and body, + * the error is anyway written in order to corrupt the HTTP response. + * Thus, it will be obvious that an error occurred and the result is + * incomplete and/or wrong.*/ + } + + try{ + // List any additional information useful to report to the user: + Map addInfos = new LinkedHashMap(); + if (reqID != null) + addInfos.put("REQ_ID", reqID); + if (type != null) + addInfos.put("ERROR_TYPE", type.toString()); + if (user != null) + addInfos.put("USER", user.getID() + ((user.getPseudo() == null) ? "" : " (" + user.getPseudo() + ")")); + if (action != null) + addInfos.put("ACTION", action); + + // Format the error in VOTable and write the document in the given HTTP response: + PrintWriter writer; + try{ + writer = response.getWriter(); + }catch(IllegalStateException ise){ + /* This exception may occur just because either the writer or + * the output-stream can be used (because already got before). + * So, we just have to get the output-stream if getting the writer + * throws an error.*/ + writer = new PrintWriter(new BufferedWriter(new OutputStreamWriter(response.getOutputStream()))); + } + getFormatter().writeError(message, addInfos, writer); + + return true; + }catch(IllegalStateException ise){ + return false; + }catch(IOException ioe){ + return false; + } + } + + @Override + public void writeError(Throwable t, ErrorSummary error, UWSJob job, OutputStream output) throws IOException{ + // Get the error message: + String message; + if (error != null && error.getMessage() != null) + message = error.getMessage(); + else if (t != null) + message = (t.getMessage() == null) ? t.getClass().getName() : t.getMessage(); + else + message = "{NO MESSAGE}"; + + // List any additional information useful to report to the user: + Map addInfos = new LinkedHashMap(); + // error type: + if (error != null && error.getType() != null) + addInfos.put("ERROR_TYPE", error.getType().toString()); + // infos about the exception: + putExceptionInfos(t, addInfos); + // job ID: + if (job != null){ + addInfos.put("JOB_ID", job.getJobId()); + if (job.getOwner() != null) + addInfos.put("USER", job.getOwner().getID() + ((job.getOwner().getPseudo() == null) ? "" : " (" + job.getOwner().getPseudo() + ")")); + } + // action running while the error occurred (only one is possible here: EXECUTING an ADQL query): + addInfos.put("ACTION", "EXECUTING"); + + // Format the error in VOTable and write the document in the given HTTP response: + getFormatter().writeError(message, addInfos, new PrintWriter(output)); + } + + /** + * Add all interesting additional information about the given exception inside the given map. + * + * @param t Exception whose some details must be added inside the given map. + * @param addInfos Map of all additional information. + * + * @since 2.0 + */ + protected void putExceptionInfos(final Throwable t, final Map addInfos){ + if (t != null){ + // Browse the exception stack in order to list all exceptions' messages and to get the last cause of this error: + StringBuffer causes = new StringBuffer(); + Throwable cause = t.getCause(), lastCause = t; + int nbCauses = 0, nbStackTraces = 1; + while(cause != null){ + // new line: + causes.append('\n'); + // append the message: + causes.append("\t- ").append(cause.getMessage()); + // SQLException case: + if (cause instanceof SQLException){ + SQLException se = (SQLException)cause; + while(se.getNextException() != null){ + se = se.getNextException(); + causes.append("\n\t\t- ").append(se.getMessage()); + } + } + // go to the next message: + lastCause = cause; + cause = cause.getCause(); + nbCauses++; + nbStackTraces++; + } + + // Add the list of all causes' message: + if (causes.length() > 0) + addInfos.put("CAUSES", "\n" + nbCauses + causes.toString()); + + // Add the stack trace of the original exception ONLY IF NOT A TAP NOR A UWS EXCEPTION (only unexpected error should be detailed to the users): + if (!(lastCause instanceof TAPException && lastCause instanceof UWSException)){ + ByteArrayOutputStream stackTrace = new ByteArrayOutputStream(); + lastCause.printStackTrace(new PrintStream(stackTrace)); + addInfos.put("ORIGIN_STACK_TRACE", "\n" + nbStackTraces + "\n" + stackTrace.toString()); + } + } } @Override - public void writeError(Throwable t, HttpServletResponse response, HttpServletRequest request, JobOwner user, String action) throws IOException{ - if (t instanceof UWSException){ - UWSException ue = (UWSException)t; - formatError(ue, (ue.getMessage() == null || ue.getMessage().trim().isEmpty()), ue.getUWSErrorType(), ue.getHttpErrorCode(), action, user, response, (request != null) ? request.getHeader("Accept") : null); - if (ue.getHttpErrorCode() == UWSException.INTERNAL_SERVER_ERROR) - getLogger().error(ue); - getLogger().httpRequest(request, user, action, ue.getHttpErrorCode(), ue.getMessage(), ue); - }else - super.writeError(t, response, request, user, action); + public String getErrorDetailsMIMEType(){ + return "application/xml"; } } diff --git a/src/tap/file/LocalTAPFileManager.java b/src/tap/file/LocalTAPFileManager.java deleted file mode 100644 index 2e9beecb5e191f07f0e46b4fd695407287d5171f..0000000000000000000000000000000000000000 --- a/src/tap/file/LocalTAPFileManager.java +++ /dev/null @@ -1,148 +0,0 @@ -package tap.file; - -/* - * This file is part of TAPLibrary. - * - * TAPLibrary is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * TAPLibrary is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with TAPLibrary. If not, see . - * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) - */ - -import java.io.File; - -import tap.db.DBConnection; -import uws.UWSException; - -import uws.service.file.DefaultOwnerGroupIdentifier; -import uws.service.file.LocalUWSFileManager; -import uws.service.file.OwnerGroupIdentifier; - -/** - *

    - * Lets creating and managing all files needed in a TAP service. - * These files are: UWS job results and errors, log files, backup files and the upload directory. - *

    - *

    - * All files are written in the local machine, into the given directory. - *

    - * - * @author Grégory Mantelet (CDS) - * @version 06/2012 - * - * @see LocalUWSFileManager - */ -public class LocalTAPFileManager extends LocalUWSFileManager implements TAPFileManager { - - /** Default name of the upload directory. */ - public final static String DEFAULT_UPLOAD_DIRECTORY_NAME = "Upload"; - - /** Default name of the DB activity log file. */ - public final static String DEFAULT_DB_ACTIVITY_LOG_FILE_NAME = "service_db_activity.log"; - - /** Local directory in which all uploaded files will be kept until they are read or ignored (in this case, they will be deleted). */ - private final File uploadDirectory; - - /** - *

    Builds a {@link TAPFileManager} which manages all UWS files in the given directory.

    - *

    - * There will be one directory for each owner ID and owner directories will be grouped - * thanks to {@link DefaultOwnerGroupIdentifier}. - *

    - * - * @param root TAP root directory. - * - * @throws UWSException If the given root directory is null, is not a directory or has not the READ and WRITE permissions. - * - * @see LocalUWSFileManager#LocalUWSFileManager(File) - * @see #getUploadDirectoryName() - */ - public LocalTAPFileManager(File root) throws UWSException{ - super(root); - uploadDirectory = new File(rootDirectory, getUploadDirectoryName()); - } - - /** - *

    Builds a {@link TAPFileManager} which manages all UWS files in the given directory.

    - *

    - * If, according to the third parameter, the owner directories must be grouped, - * the {@link DefaultOwnerGroupIdentifier} will be used. - *

    - * - * @param root TAP root directory. - * @param oneDirectoryForEachUser true to create one directory for each owner ID, false otherwise. - * @param groupUserDirectories true to group user directories, false otherwise. - * note: this value is ignored if the previous parameter is false. - * - * @throws UWSException If the given root directory is null, is not a directory or has not the READ and WRITE permissions. - * - * @see LocalUWSFileManager#LocalUWSFileManager(File, boolean, boolean) - * @see #getUploadDirectoryName() - */ - public LocalTAPFileManager(File root, boolean oneDirectoryForEachUser, boolean groupUserDirectories) throws UWSException{ - super(root, oneDirectoryForEachUser, groupUserDirectories); - uploadDirectory = new File(rootDirectory, getUploadDirectoryName()); - } - - /** - * Builds a {@link TAPFileManager} which manages all UWS files in the given directory. - * - * @param root TAP root directory. - * @param oneDirectoryForEachUser true to create one directory for each owner ID, false otherwise. - * @param groupUserDirectories true to group user directories, false otherwise. - * note: this value is ignored if the previous parameter is false. - * @param ownerGroupIdentifier The "function" to use to identify the group of a job owner. - *
      - *
    • note 1: this value is ignored if one of the two previous parameters is false.
    • - *
    • note 2: if this value is null but the previous parameters are true, - * {@link DefaultOwnerGroupIdentifier} will be chosen as default group identifier.
    • - *
    - * - * @throws UWSException If the given root directory is null, is not a directory or has not the READ and WRITE permissions. - * - * @see LocalUWSFileManager#LocalUWSFileManager(File, boolean, boolean, OwnerGroupIdentifier) - * @see #getUploadDirectoryName() - */ - public LocalTAPFileManager(File root, boolean oneDirectoryForEachUser, boolean groupUserDirectories, OwnerGroupIdentifier ownerGroupIdentifier) throws UWSException{ - super(root, oneDirectoryForEachUser, groupUserDirectories, ownerGroupIdentifier); - uploadDirectory = new File(rootDirectory, getUploadDirectoryName()); - } - - @Override - protected String getLogFileName(final String logTypeGroup){ - if (logTypeGroup != null && logTypeGroup.equals(DBConnection.LOG_TYPE_DB_ACTIVITY.getCustomType())) - return DEFAULT_DB_ACTIVITY_LOG_FILE_NAME; - else - return super.getLogFileName(logTypeGroup); - } - - /** - *

    Gets the name of the directory in which all uploaded files will be saved.

    - * - *

    note 1: this function is called ONLY one time: at the creation.

    - *

    note 2: by default, this function returns: {@link #DEFAULT_UPLOAD_DIRECTORY_NAME}.

    - * - * @return The name of the upload directory. - */ - protected String getUploadDirectoryName(){ - return DEFAULT_UPLOAD_DIRECTORY_NAME; - } - - @Override - public final File getUploadDirectory(){ - if (uploadDirectory != null && !uploadDirectory.exists()) - uploadDirectory.mkdirs(); - return uploadDirectory; - } - -} diff --git a/src/tap/file/TAPFileManager.java b/src/tap/file/TAPFileManager.java deleted file mode 100644 index f15a024693c7c3747438a3cb6e9f653832d37e98..0000000000000000000000000000000000000000 --- a/src/tap/file/TAPFileManager.java +++ /dev/null @@ -1,44 +0,0 @@ -package tap.file; - -/* - * This file is part of TAPLibrary. - * - * TAPLibrary is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * TAPLibrary is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with TAPLibrary. If not, see . - * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) - */ - -import java.io.File; - -import uws.service.file.UWSFileManager; - -/** - * Minimal API of the object which will be used by the TAP service (but more particularly by its UWS resource) - * to create, delete, write and read files needed to the service (i.e. results, errors, logs, backups, upload files). - * - * @author Grégory Mantelet (CDS) - * @version 06/2012 - * - * @see UWSFileManager - */ -public interface TAPFileManager extends UWSFileManager { - - /** - * Local directory in which all uploaded files will be kept until they are read or ignored (in this case, they will be deleted). - * - * @return Path of the directory in which uploaded files must be written. - */ - public File getUploadDirectory(); - -} diff --git a/src/tap/formatter/FITSFormat.java b/src/tap/formatter/FITSFormat.java new file mode 100644 index 0000000000000000000000000000000000000000..20c7d928c84661d522ec3b17643db904840ce382 --- /dev/null +++ b/src/tap/formatter/FITSFormat.java @@ -0,0 +1,101 @@ +package tap.formatter; + +/* + * This file is part of TAPLibrary. + * + * TAPLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * TAPLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with TAPLibrary. If not, see . + * + * Copyright 2014-2015 - Astronomisches Rechen Institut (ARI) + */ + +import java.io.IOException; +import java.io.OutputStream; + +import tap.ServiceConnection; +import tap.TAPException; +import tap.TAPExecutionReport; +import tap.data.TableIterator; +import tap.formatter.VOTableFormat.LimitedStarTable; +import uk.ac.starlink.fits.FitsTableWriter; +import uk.ac.starlink.table.ColumnInfo; +import uk.ac.starlink.table.StarTable; +import uk.ac.starlink.table.StoragePolicy; + +/** + * Format any given query (table) result into FITS. + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (04/2015) + * @since 2.0 + */ +public class FITSFormat implements OutputFormat { + + /** The {@link ServiceConnection} to use (for the log and to have some information about the service (particularly: name, description). */ + protected final ServiceConnection service; + + /** + * Creates a FITS formatter. + * + * @param service The service to use (for the log and to have some information about the service (particularly: name, description). + * + * @throws NullPointerException If the given service connection is null. + */ + public FITSFormat(final ServiceConnection service) throws NullPointerException{ + if (service == null) + throw new NullPointerException("The given service connection is NULL !"); + + this.service = service; + } + + @Override + public String getMimeType(){ + return "application/fits"; + } + + @Override + public String getShortMimeType(){ + return "fits"; + } + + @Override + public String getDescription(){ + return null; + } + + @Override + public String getFileExtension(){ + return "fits"; + } + + @Override + public void writeResult(TableIterator result, OutputStream output, TAPExecutionReport execReport, Thread thread) throws TAPException, IOException, InterruptedException{ + // Extract the columns' metadata: + ColumnInfo[] colInfos = VOTableFormat.toColumnInfos(result, execReport, thread); + + // Turns the result set into a table: + LimitedStarTable table = new LimitedStarTable(result, colInfos, execReport.parameters.getMaxRec()); + + // Copy the table on disk (or in memory if the table is short): + StarTable copyTable = StoragePolicy.PREFER_DISK.copyTable(table); + + /* Format the table in FITS (2 passes are needed for that, hence the copy on disk), + * and write it in the given output stream: */ + new FitsTableWriter().writeStarTable(copyTable, output); + + execReport.nbRows = table.getNbReadRows(); + + output.flush(); + } + +} diff --git a/src/tap/formatter/HTMLFormat.java b/src/tap/formatter/HTMLFormat.java new file mode 100644 index 0000000000000000000000000000000000000000..df179fa5763b1e75c0692b9e15619f208de790a8 --- /dev/null +++ b/src/tap/formatter/HTMLFormat.java @@ -0,0 +1,215 @@ +package tap.formatter; + +/* + * This file is part of TAPLibrary. + * + * TAPLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * TAPLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with TAPLibrary. If not, see . + * + * Copyright 2014 - Astronomisches Rechen Institut (ARI) + */ + +import java.io.BufferedWriter; +import java.io.IOException; +import java.io.OutputStream; +import java.io.OutputStreamWriter; + +import tap.ServiceConnection; +import tap.TAPException; +import tap.TAPExecutionReport; +import tap.data.TableIterator; +import uk.ac.starlink.votable.VOSerializer; +import uws.ISO8601Format; +import adql.db.DBColumn; + +/** + * Format any given query (table) result into HTML. + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (10/2014) + * @since 2.0 + */ +public class HTMLFormat implements OutputFormat { + + /** The {@link ServiceConnection} to use (for the log and to have some information about the service (particularly: name, description). */ + protected final ServiceConnection service; + + /** + * Creates an HTML formatter. + * + * @param service Description of the TAP service. + * + * @throws NullPointerException If the given service connection is null. + */ + public HTMLFormat(final ServiceConnection service) throws NullPointerException{ + if (service == null) + throw new NullPointerException("The given service connection is NULL!"); + + this.service = service; + } + + @Override + public String getMimeType(){ + return "text/html"; + } + + @Override + public String getShortMimeType(){ + return "html"; + } + + @Override + public String getDescription(){ + return null; + } + + @Override + public String getFileExtension(){ + return ".html"; + } + + @Override + public void writeResult(TableIterator result, OutputStream output, TAPExecutionReport execReport, Thread thread) throws TAPException, IOException, InterruptedException{ + // Prepare the output stream: + final BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(output)); + writer.write(""); + writer.newLine(); + + // Write header: + DBColumn[] columns = writeHeader(result, writer, execReport, thread); + + if (thread.isInterrupted()) + throw new InterruptedException(); + + // Write data: + writeData(result, columns, writer, execReport, thread); + + writer.write("
    "); + writer.newLine(); + writer.flush(); + } + + /** + * Write the whole header (one row whose columns are just the columns' name). + * + * @param result Result to write later (but it contains also metadata that was extracted from the result itself). + * @param writer Output in which the metadata must be written. + * @param execReport Execution report (which contains the metadata extracted/guessed from the ADQL query). + * @param thread Thread which has asked for this formatting (it must be used in order to test the {@link Thread#isInterrupted()} flag and so interrupt everything if need). + * + * @return All the written metadata. + * + * @throws IOException If there is an error while writing something in the output. + * @throws InterruptedException If the thread has been interrupted. + * @throws TAPException If any other error occurs. + */ + protected DBColumn[] writeHeader(TableIterator result, BufferedWriter writer, TAPExecutionReport execReport, Thread thread) throws IOException, TAPException, InterruptedException{ + // Prepend a description of this result: + writer.write("TAP result"); + if (service.getProviderName() != null) + writer.write(" from " + VOSerializer.formatText(service.getProviderName())); + writer.write(" on " + ISO8601Format.format(System.currentTimeMillis())); + writer.write("
    " + VOSerializer.formatText(execReport.parameters.getQuery()) + ""); + writer.write(""); + writer.newLine(); + + // Get the columns meta: + DBColumn[] selectedColumns = execReport.resultingColumns; + + // If meta are not known, no header will be written: + int nbColumns = (selectedColumns == null) ? -1 : selectedColumns.length; + if (nbColumns > 0){ + writer.write(""); + writer.newLine(); + writer.write(""); + + // Write all columns' name: + for(int i = 0; i < nbColumns; i++){ + writer.write(""); + writer.write(VOSerializer.formatText(selectedColumns[i].getADQLName())); + writer.write(""); + } + + // Go to a new line (in order to prepare the data writing): + writer.write(""); + writer.newLine(); + writer.write(""); + writer.newLine(); + writer.flush(); + } + + // Returns the written columns: + return selectedColumns; + } + + /** + * Write all the data rows. + * + * @param result Result to write. + * @param selectedColumns All columns' metadata. + * @param writer Print writer in which the data must be written. + * @param execReport Execution report (which contains the maximum allowed number of records to output). + * @param thread Thread which has asked for this formatting (it must be used in order to test the {@link Thread#isInterrupted()} flag and so interrupt everything if need). + * + * @throws IOException If there is an error while writing something in the output stream. + * @throws InterruptedException If the thread has been interrupted. + * @throws TAPException If any other error occurs. + */ + protected void writeData(TableIterator result, DBColumn[] selectedColumns, BufferedWriter writer, TAPExecutionReport execReport, Thread thread) throws IOException, TAPException, InterruptedException{ + execReport.nbRows = 0; + + writer.write(""); + writer.newLine(); + + while(result.nextRow()){ + // Stop right now the formatting if the job has been aborted/canceled/interrupted: + if (thread.isInterrupted()) + throw new InterruptedException(); + + // Deal with OVERFLOW, if needed: + if (execReport.parameters.getMaxRec() > 0 && execReport.nbRows >= execReport.parameters.getMaxRec()){ // that's to say: OVERFLOW ! + writer.write("OVERFLOW (more rows were available but have been truncated by the TAP service)"); + writer.newLine(); + break; + } + + writer.write(""); + + while(result.hasNextCol()){ + writer.write(""); + + // Write the column value: + Object colVal = result.nextCol(); + if (colVal != null) + writer.write(VOSerializer.formatText(colVal.toString())); + + writer.write(""); + + if (thread.isInterrupted()) + throw new InterruptedException(); + } + writer.write(""); + writer.newLine(); + execReport.nbRows++; + + // flush the writer every 30 lines: + if (execReport.nbRows % 30 == 0) + writer.flush(); + } + + writer.write(""); + writer.newLine(); + writer.flush(); + } + +} diff --git a/src/tap/formatter/JSONFormat.java b/src/tap/formatter/JSONFormat.java index c15625fcff4fae81a515543080c53995756b6750..4e808896f81f8c0899436981b00480b4d1f23866 100644 --- a/src/tap/formatter/JSONFormat.java +++ b/src/tap/formatter/JSONFormat.java @@ -16,142 +16,275 @@ package tap.formatter; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ +import java.io.BufferedWriter; import java.io.IOException; import java.io.OutputStream; -import java.io.PrintWriter; +import java.io.OutputStreamWriter; import org.json.JSONException; import org.json.JSONWriter; -import cds.savot.writer.SavotWriter; - -import adql.db.DBColumn; - import tap.ServiceConnection; import tap.TAPException; import tap.TAPExecutionReport; +import tap.data.TableIterator; import tap.metadata.TAPColumn; -import tap.metadata.TAPTypes; - -public abstract class JSONFormat< R > implements OutputFormat { +import tap.metadata.VotType; +import adql.db.DBColumn; +import adql.db.DBType; +import adql.db.DBType.DBDatatype; - /** Indicates whether a format report (start and end date/time) must be printed in the log output. */ - private boolean logFormatReport; +/** + * Format any given query (table) result into JSON. + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (04/2015) + */ +public class JSONFormat implements OutputFormat { /** The {@link ServiceConnection} to use (for the log and to have some information about the service (particularly: name, description). */ - protected final ServiceConnection service; + protected final ServiceConnection service; - public JSONFormat(final ServiceConnection service){ - this(service, false); - } + /** + * Build a JSON formatter. + * + * @param service Description of the TAP service. + * + * @throws NullPointerException If the given service connection is null. + */ + public JSONFormat(final ServiceConnection service) throws NullPointerException{ + if (service == null) + throw new NullPointerException("The given service connection is NULL!"); - public JSONFormat(final ServiceConnection service, final boolean logFormatReport){ this.service = service; - this.logFormatReport = logFormatReport; } + @Override public String getMimeType(){ return "application/json"; } + @Override public String getShortMimeType(){ return "json"; } + @Override public String getDescription(){ return null; } + @Override public String getFileExtension(){ return "json"; } @Override - public void writeResult(R queryResult, OutputStream output, TAPExecutionReport execReport, Thread thread) throws TAPException, InterruptedException{ + public void writeResult(TableIterator result, OutputStream output, TAPExecutionReport execReport, Thread thread) throws TAPException, IOException, InterruptedException{ try{ - long start = System.currentTimeMillis(); - PrintWriter writer = new PrintWriter(output); + // Prepare the output stream for JSON: + BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(output)); JSONWriter out = new JSONWriter(writer); + // { out.object(); + // "metadata": [...] out.key("metadata"); - DBColumn[] columns = writeMetadata(queryResult, out, execReport, thread); + + // Write metadata part: + DBColumn[] columns = writeMetadata(result, out, execReport, thread); writer.flush(); + if (thread.isInterrupted()) + throw new InterruptedException(); + + // "data": [...] out.key("data"); - int nbRows = writeData(queryResult, columns, out, execReport, thread); + // Write the data part: + writeData(result, columns, out, execReport, thread); + + // } out.endObject(); writer.flush(); - if (logFormatReport) - service.getLogger().info("JOB " + execReport.jobID + " WRITTEN\tResult formatted (in JSON ; " + nbRows + " rows ; " + columns.length + " columns) in " + (System.currentTimeMillis() - start) + " ms !"); }catch(JSONException je){ - throw new TAPException("Error while writing a query result in JSON !", je); - }catch(IOException ioe){ - throw new TAPException("Error while writing a query result in JSON !", ioe); + throw new TAPException(je.getMessage(), je); } } - protected abstract DBColumn[] writeMetadata(R queryResult, JSONWriter out, TAPExecutionReport execReport, Thread thread) throws IOException, TAPException, InterruptedException, JSONException; + /** + * Write the whole metadata part of the JSON file. + * + * @param result Result to write later (but it contains also metadata that was extracted from the result itself). + * @param out Output stream in which the metadata must be written. + * @param execReport Execution report (which contains the metadata extracted/guessed from the ADQL query). + * @param thread Thread which has asked for this formatting (it must be used in order to test the {@link Thread#isInterrupted()} flag and so interrupt everything if need). + * + * @return All the written metadata. + * + * @throws IOException If there is an error while writing something in the output stream. + * @throws InterruptedException If the thread has been interrupted. + * @throws JSONException If there is an error while formatting something in JSON. + * @throws TAPException If any other error occurs. + * + * @see #getValidColMeta(DBColumn, TAPColumn) + */ + protected DBColumn[] writeMetadata(TableIterator result, JSONWriter out, TAPExecutionReport execReport, Thread thread) throws IOException, TAPException, InterruptedException, JSONException{ + out.array(); + + // Get the metadata extracted/guesses from the ADQL query: + DBColumn[] columnsFromQuery = execReport.resultingColumns; + + // Get the metadata extracted from the result: + TAPColumn[] columnsFromResult = result.getMetadata(); + + int indField = 0; + if (columnsFromQuery != null){ + + // For each column: + for(DBColumn field : columnsFromQuery){ + + // Try to build/get appropriate metadata for this field/column: + TAPColumn colFromResult = (columnsFromResult != null && indField < columnsFromResult.length) ? columnsFromResult[indField] : null; + TAPColumn tapCol = getValidColMeta(field, colFromResult); + + // Ensure these metadata are well returned at the end of this function: + columnsFromQuery[indField] = tapCol; + + // Write the field/column metadata in the JSON output: + writeFieldMeta(tapCol, out); + indField++; + } + } + + out.endArray(); + return columnsFromQuery; + } /** - *

    Formats in a VOTable field and writes the given {@link TAPColumn} in the given Writer.

    + * Try to get or otherwise to build appropriate metadata using those extracted from the ADQL query and those extracted from the result. * - *

    Note: If the VOTable datatype is int, short or long a NULL values is set by adding a node VALUES: <VALUES null="..." />

    + * @param typeFromQuery Metadata extracted/guessed from the ADQL query. + * @param typeFromResult Metadata extracted/guessed from the result. * - * @param col The column metadata to format into a VOTable field. + * @return The most appropriate metadata. + */ + protected TAPColumn getValidColMeta(final DBColumn typeFromQuery, final TAPColumn typeFromResult){ + if (typeFromQuery != null && typeFromQuery instanceof TAPColumn) + return (TAPColumn)typeFromQuery; + else if (typeFromResult != null){ + if (typeFromQuery != null) + return (TAPColumn)typeFromResult.copy(typeFromQuery.getDBName(), typeFromQuery.getADQLName(), null); + else + return (TAPColumn)typeFromResult.copy(); + }else + return new TAPColumn((typeFromQuery != null) ? typeFromQuery.getADQLName() : "?", new DBType(DBDatatype.VARCHAR), "?"); + } + + /** + * Formats in JSON and writes the given {@link TAPColumn} in the given output. + * + * @param tapCol The column metadata to format/write in JSON. * @param out The stream in which the formatted column metadata must be written. * * @throws IOException If there is an error while writing the field metadata. + * @throws JSONException If there is an error while formatting something in JSON format. * @throws TAPException If there is any other error (by default: never happen). */ protected void writeFieldMeta(TAPColumn tapCol, JSONWriter out) throws IOException, TAPException, JSONException{ + // { out.object(); - out.key("name").value(tapCol.getName()); + // "name": "..." + out.key("name").value(tapCol.getADQLName()); + // "description": "..." (if any) if (tapCol.getDescription() != null && tapCol.getDescription().trim().length() > 0) out.key("description").value(tapCol.getDescription()); - out.key("datatype").value(tapCol.getVotType().datatype); + // "datatype": "..." + VotType votType = new VotType(tapCol.getDatatype()); + out.key("datatype").value(votType.datatype); - int arraysize = tapCol.getVotType().arraysize; - if (arraysize == TAPTypes.STAR_SIZE) - out.key("arraysize").value("*"); - else if (arraysize > 0) - out.key("arraysize").value(arraysize); + // "arraysize": "..." (if any) + if (votType.arraysize != null) + out.key("arraysize").value(votType.arraysize); - if (tapCol.getVotType().xtype != null) - out.key("xtype").value(tapCol.getVotType().xtype); + // "xtype": "..." (if any) + if (votType.xtype != null) + out.key("xtype").value(votType.xtype); + // "unit": "..." (if any) if (tapCol.getUnit() != null && tapCol.getUnit().length() > 0) out.key("unit").value(tapCol.getUnit()); + // "ucd": "..." (if any) if (tapCol.getUcd() != null && tapCol.getUcd().length() > 0) out.key("ucd").value(tapCol.getUcd()); + // "utype": "..." (if any) if (tapCol.getUtype() != null && tapCol.getUtype().length() > 0) out.key("utype").value(tapCol.getUtype()); + // } out.endObject(); } - protected abstract int writeData(R queryResult, DBColumn[] selectedColumns, JSONWriter out, TAPExecutionReport execReport, Thread thread) throws IOException, TAPException, InterruptedException, JSONException; + /** + * Write the whole data part of the JSON file. + * + * @param result Result to write. + * @param selectedColumns All columns' metadata. + * @param out Output stream in which the data must be written. + * @param execReport Execution report (which contains the maximum allowed number of records to output). + * @param thread Thread which has asked for this formatting (it must be used in order to test the {@link Thread#isInterrupted()} flag and so interrupt everything if need). + * + * @throws IOException If there is an error while writing something in the output stream. + * @throws InterruptedException If the thread has been interrupted. + * @throws JSONException If there is an error while formatting something in JSON. + * @throws TAPException If any other error occurs. + */ + protected void writeData(TableIterator result, DBColumn[] selectedColumns, JSONWriter out, TAPExecutionReport execReport, Thread thread) throws IOException, TAPException, InterruptedException, JSONException{ + // [ + out.array(); + + execReport.nbRows = 0; + while(result.nextRow()){ + // Stop right now the formatting if the job has been aborted/canceled/interrupted: + if (thread.isInterrupted()) + throw new InterruptedException(); + + // Deal with OVERFLOW, if needed: + if (execReport.parameters.getMaxRec() > 0 && execReport.nbRows >= execReport.parameters.getMaxRec()) + break; + + // [ + out.array(); + int indCol = 0; + while(result.hasNextCol()) + // ... + writeFieldValue(result.nextCol(), selectedColumns[indCol++], out); + // ] + out.endArray(); + execReport.nbRows++; + } + + // ] + out.endArray(); + } /** - *

    Writes the given field value in the given OutputStream.

    + *

    Writes the given field value in JSON and into the given output.

    * - *

    - * The given value will be encoded as an XML element (see {@link SavotWriter#encodeElement(String)}. - * Besides, if the given value is null and if the column datatype is int, - * short or long, the NULL values declared in the field metadata will be written.

    + *

    note: special numeric values NaN and Inf (double or float) will be written as NULL values.

    * * @param value The value to write. * @param column The corresponding column metadata. diff --git a/src/tap/formatter/OutputFormat.java b/src/tap/formatter/OutputFormat.java index 13fa7c22bf77f3efcd09b80fbda9e092bb407e3a..f450c4be68bfb563865e5d4607d186ac92e6d8b9 100644 --- a/src/tap/formatter/OutputFormat.java +++ b/src/tap/formatter/OutputFormat.java @@ -16,26 +16,25 @@ package tap.formatter; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Astronomisches Rechen Institut (ARI) */ +import java.io.IOException; import java.io.OutputStream; import tap.TAPException; import tap.TAPExecutionReport; +import tap.data.TableIterator; /** * Describes an output format and formats a given query result into this format. * - * @author Grégory Mantelet (CDS) - * - * @param The type of raw query result (i.e. {@link java.sql.ResultSet}). + * @author Grégory Mantelet (CDS;ARI) * - * @version 06/2012 - * - * @see VOTableFormat + * @version 2.0 (03/2015) */ -public interface OutputFormat< R > { +public interface OutputFormat { /** * Gets the MIME type corresponding to this format. @@ -66,27 +65,19 @@ public interface OutputFormat< R > { public String getFileExtension(); /** - * Formats the given query result and writes it in the given output stream. - * - * @param queryResult The raw result to format (i.e. a {@link java.sql.ResultSet}). - * @param output The output stream (a ServletOutputStream or a stream on a file) in which the formatted result must be written. - * @param execReport The report of the execution of the TAP query whose the result must be now written. - * @param thread The thread which has asked the result writting. - * - * @throws TAPException If there is an error while formatting/writing the query result. - */ - public void writeResult(final R queryResult, final OutputStream output, final TAPExecutionReport execReport, final Thread thread) throws TAPException, InterruptedException; - - /* - * Formats the given query result and writes it in some way accessible through the returned {@link Result}. + *

    Formats the given query result and writes it in the given output stream.

    * - * @param queryResult The raw result to format (i.e. a {@link java.sql.ResultSet}). - * @param job The job which processed the query. + *

    Note: the given output stream should not be closed at the end of this function. It is up to the called to do it.

    * - * @return The {@link Result} which provides an access to the formatted query result. + * @param result The raw (table) result to format. + * @param output The output stream (a ServletOutputStream or a stream on a file) in which the formatted result must be written. + * @param execReport The report of the execution of the TAP query whose the result must be now written. + * @param thread The thread which has asked the result writing. * - * @throws TAPException If there is an error while formatting/writing the query result. - * - public Result writeResult(final R queryResult, final TAPJob job) throws TAPException;*/ + * @throws TAPException If there is an error while formatting the query result. + * @throws IOException If any error occurs while writing into the given stream. + * @throws InterruptedException If the query has been interrupted/aborted. + */ + public void writeResult(final TableIterator result, final OutputStream output, final TAPExecutionReport execReport, final Thread thread) throws TAPException, IOException, InterruptedException; } diff --git a/src/tap/formatter/ResultSet2JsonFormatter.java b/src/tap/formatter/ResultSet2JsonFormatter.java deleted file mode 100644 index d7e47f860da0a97f689eb83d1bdf439e88a56b04..0000000000000000000000000000000000000000 --- a/src/tap/formatter/ResultSet2JsonFormatter.java +++ /dev/null @@ -1,121 +0,0 @@ -package tap.formatter; - -/* - * This file is part of TAPLibrary. - * - * TAPLibrary is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * TAPLibrary is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with TAPLibrary. If not, see . - * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) - */ - -import java.io.IOException; - -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.SQLException; - -import org.json.JSONException; -import org.json.JSONWriter; - -import tap.ServiceConnection; -import tap.TAPException; -import tap.TAPExecutionReport; - -import tap.metadata.TAPColumn; -import tap.metadata.TAPTypes; - -import adql.db.DBColumn; - -public class ResultSet2JsonFormatter extends JSONFormat implements ResultSetFormatter { - - public ResultSet2JsonFormatter(ServiceConnection service, boolean logFormatReport){ - super(service, logFormatReport); - } - - public ResultSet2JsonFormatter(ServiceConnection service){ - super(service); - } - - @Override - protected DBColumn[] writeMetadata(ResultSet queryResult, JSONWriter out, TAPExecutionReport execReport, Thread thread) throws IOException, TAPException, InterruptedException, JSONException{ - out.array(); - DBColumn[] selectedColumns = execReport.resultingColumns; - - try{ - ResultSetMetaData meta = queryResult.getMetaData(); - int indField = 1; - if (selectedColumns != null){ - for(DBColumn field : selectedColumns){ - TAPColumn tapCol = null; - try{ - tapCol = (TAPColumn)field; - }catch(ClassCastException ex){ - tapCol = new TAPColumn(field.getADQLName()); - tapCol.setDatatype(meta.getColumnTypeName(indField), TAPTypes.NO_SIZE); - service.getLogger().warning("Unknown DB datatype for the field \"" + tapCol.getName() + "\" ! It is supposed to be \"" + tapCol.getDatatype() + "\" (original value: \"" + meta.getColumnTypeName(indField) + "\")."); - selectedColumns[indField - 1] = tapCol; - } - writeFieldMeta(tapCol, out); - indField++; - - if (thread.isInterrupted()) - throw new InterruptedException(); - } - } - }catch(SQLException e){ - service.getLogger().error("Job N°" + execReport.jobID + " - Impossible to get the metadata of the given ResultSet !", e); - } - - out.endArray(); - return selectedColumns; - } - - @Override - protected int writeData(ResultSet queryResult, DBColumn[] selectedColumns, JSONWriter out, TAPExecutionReport execReport, Thread thread) throws IOException, TAPException, InterruptedException, JSONException{ - out.array(); - int nbRows = 0; - try{ - int nbColumns = queryResult.getMetaData().getColumnCount(); - while(queryResult.next()){ - if (execReport.parameters.getMaxRec() > 0 && nbRows >= execReport.parameters.getMaxRec()) // that's to say: OVERFLOW ! - break; - - out.array(); - Object value; - for(int i = 1; i <= nbColumns; i++){ - value = formatValue(queryResult.getObject(i), selectedColumns[i - 1]); - writeFieldValue(value, selectedColumns[i - 1], out); - if (thread.isInterrupted()) - throw new InterruptedException(); - } - out.endArray(); - nbRows++; - - if (thread.isInterrupted()) - throw new InterruptedException(); - } - }catch(SQLException se){ - throw new TAPException("Job N°" + execReport.jobID + " - Impossible to get the " + (nbRows + 1) + "-th rows from the given ResultSet !", se); - } - - out.endArray(); - return nbRows; - } - - @Override - public Object formatValue(Object value, DBColumn colMeta){ - return value; - } - -} diff --git a/src/tap/formatter/ResultSet2SVFormatter.java b/src/tap/formatter/ResultSet2SVFormatter.java deleted file mode 100644 index 628957071ee609e48f2db64d99bfde961803610d..0000000000000000000000000000000000000000 --- a/src/tap/formatter/ResultSet2SVFormatter.java +++ /dev/null @@ -1,105 +0,0 @@ -package tap.formatter; - -/* - * This file is part of TAPLibrary. - * - * TAPLibrary is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * TAPLibrary is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with TAPLibrary. If not, see . - * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) - */ - -import java.io.IOException; -import java.io.PrintWriter; - -import java.sql.ResultSet; -import java.sql.SQLException; - -import adql.db.DBColumn; - -import tap.ServiceConnection; -import tap.TAPException; -import tap.TAPExecutionReport; - -public class ResultSet2SVFormatter extends SVFormat implements ResultSetFormatter { - - public ResultSet2SVFormatter(final ServiceConnection service, char colSeparator, boolean delimitStrings){ - super(service, colSeparator, delimitStrings); - } - - public ResultSet2SVFormatter(final ServiceConnection service, char colSeparator){ - super(service, colSeparator); - } - - public ResultSet2SVFormatter(final ServiceConnection service, String colSeparator, boolean delimitStrings){ - super(service, colSeparator, delimitStrings); - } - - public ResultSet2SVFormatter(final ServiceConnection service, String colSeparator){ - super(service, colSeparator); - } - - @Override - protected DBColumn[] writeMetadata(ResultSet queryResult, PrintWriter writer, TAPExecutionReport execReport, Thread thread) throws IOException, TAPException, InterruptedException{ - DBColumn[] selectedColumns = execReport.resultingColumns; - int nbColumns = (selectedColumns == null) ? -1 : selectedColumns.length; - if (nbColumns > 0){ - for(int i = 0; i < nbColumns - 1; i++){ - writer.print(selectedColumns[i].getADQLName()); - writer.print(separator); - } - writer.print(selectedColumns[nbColumns - 1].getADQLName()); - writer.println(); - writer.flush(); - } - return selectedColumns; - } - - @Override - protected int writeData(ResultSet queryResult, DBColumn[] selectedColumns, PrintWriter writer, TAPExecutionReport execReport, Thread thread) throws IOException, TAPException, InterruptedException{ - int nbRows = 0; - try{ - int nbColumns = queryResult.getMetaData().getColumnCount(); - while(queryResult.next()){ - if (execReport.parameters.getMaxRec() > 0 && nbRows >= execReport.parameters.getMaxRec()) // that's to say: OVERFLOW ! - break; - - Object value; - for(int i = 1; i <= nbColumns; i++){ - value = formatValue(queryResult.getObject(i), selectedColumns[i - 1]); - writeFieldValue(value, selectedColumns[i - 1], writer); - if (i != nbColumns) - writer.print(separator); - if (thread.isInterrupted()) - throw new InterruptedException(); - } - writer.println(); - nbRows++; - - if (thread.isInterrupted()) - throw new InterruptedException(); - } - writer.flush(); - }catch(SQLException se){ - throw new TAPException("Job N°" + execReport.jobID + " - Impossible to get the " + (nbRows + 1) + "-th rows from the given ResultSet !", se); - } - - return nbRows; - } - - @Override - public Object formatValue(Object value, DBColumn colMeta){ - return value; - } - -} diff --git a/src/tap/formatter/ResultSet2TextFormatter.java b/src/tap/formatter/ResultSet2TextFormatter.java deleted file mode 100644 index 045bf804ec630856dc7032f3a832fb4de844e1de..0000000000000000000000000000000000000000 --- a/src/tap/formatter/ResultSet2TextFormatter.java +++ /dev/null @@ -1,85 +0,0 @@ -package tap.formatter; - -/* - * This file is part of TAPLibrary. - * - * TAPLibrary is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * TAPLibrary is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with TAPLibrary. If not, see . - * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) - */ - -import java.sql.ResultSet; -import java.sql.SQLException; - -import adql.db.DBColumn; - -import cds.util.AsciiTable; - -import tap.ServiceConnection; -import tap.TAPException; -import tap.TAPExecutionReport; - -public class ResultSet2TextFormatter extends TextFormat implements ResultSetFormatter { - - public ResultSet2TextFormatter(ServiceConnection service){ - super(service); - } - - @Override - protected String getHeader(ResultSet queryResult, TAPExecutionReport execReport, Thread thread) throws TAPException{ - DBColumn[] selectedColumns = execReport.resultingColumns; - StringBuffer line = new StringBuffer(); - int nbColumns = (selectedColumns == null) ? -1 : selectedColumns.length; - if (nbColumns > 0){ - for(int i = 0; i < nbColumns - 1; i++) - line.append(selectedColumns[i].getADQLName()).append('|'); - line.append(selectedColumns[nbColumns - 1].getADQLName()); - } - return line.toString(); - } - - @Override - protected int writeData(ResultSet queryResult, AsciiTable asciiTable, TAPExecutionReport execReport, Thread thread) throws TAPException{ - int nbRows = 0; - try{ - DBColumn[] selectedColumns = execReport.resultingColumns; - int nbColumns = selectedColumns.length; - StringBuffer line = new StringBuffer(); - while(queryResult.next()){ - if (execReport.parameters.getMaxRec() > 0 && nbRows >= execReport.parameters.getMaxRec()) // that's to say: OVERFLOW ! - break; - - line.delete(0, line.length()); - Object value; - for(int i = 1; i <= nbColumns; i++){ - value = formatValue(queryResult.getObject(i), selectedColumns[i - 1]); - writeFieldValue(value, selectedColumns[i - 1], line); - if (i != nbColumns) - line.append('|'); - } - asciiTable.addLine(line.toString()); - nbRows++; - } - }catch(SQLException se){ - throw new TAPException("Job N°" + execReport.jobID + " - Impossible to get the " + (nbRows + 1) + "-th rows from the given ResultSet !", se); - } - return nbRows; - } - - @Override - public Object formatValue(Object value, DBColumn colMeta){ - return value; - } - -} diff --git a/src/tap/formatter/ResultSet2VotableFormatter.java b/src/tap/formatter/ResultSet2VotableFormatter.java deleted file mode 100644 index cc841bda7d57f4635c187bde4edaffeff0b2cc2c..0000000000000000000000000000000000000000 --- a/src/tap/formatter/ResultSet2VotableFormatter.java +++ /dev/null @@ -1,127 +0,0 @@ -package tap.formatter; - -/* - * This file is part of TAPLibrary. - * - * TAPLibrary is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * TAPLibrary is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with TAPLibrary. If not, see . - * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) - */ - -import java.io.IOException; - -import tap.TAPExecutionReport; -import tap.TAPException; - -import java.io.OutputStream; -import java.io.PrintWriter; - -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.SQLException; - -import tap.ServiceConnection; -import tap.metadata.TAPColumn; -import tap.metadata.TAPTypes; - -import adql.db.DBColumn; - -/** - * Formats a {@link ResultSet} into a VOTable. - * - * @author Grégory Mantelet (CDS) - * @version 11/2011 - */ -public class ResultSet2VotableFormatter extends VOTableFormat implements ResultSetFormatter { - - public ResultSet2VotableFormatter(final ServiceConnection service) throws NullPointerException{ - super(service); - } - - public ResultSet2VotableFormatter(final ServiceConnection service, final boolean logFormatReport) throws NullPointerException{ - super(service, logFormatReport); - } - - @Override - protected DBColumn[] writeMetadata(final ResultSet queryResult, final PrintWriter output, final TAPExecutionReport execReport, final Thread thread) throws IOException, TAPException, InterruptedException{ - DBColumn[] selectedColumns = execReport.resultingColumns; - try{ - ResultSetMetaData meta = queryResult.getMetaData(); - int indField = 1; - if (selectedColumns != null){ - for(DBColumn field : selectedColumns){ - TAPColumn tapCol = null; - try{ - tapCol = (TAPColumn)field; - }catch(ClassCastException ex){ - tapCol = new TAPColumn(field.getADQLName()); - tapCol.setDatatype(meta.getColumnTypeName(indField), TAPTypes.NO_SIZE); - service.getLogger().warning("Unknown DB datatype for the field \"" + tapCol.getName() + "\" ! It is supposed to be \"" + tapCol.getDatatype() + "\" (original value: \"" + meta.getColumnTypeName(indField) + "\")."); - selectedColumns[indField - 1] = tapCol; - } - writeFieldMeta(tapCol, output); - indField++; - - if (thread.isInterrupted()) - throw new InterruptedException(); - } - } - }catch(SQLException e){ - service.getLogger().error("Job N°" + execReport.jobID + " - Impossible to get the metadata of the given ResultSet !", e); - output.println("Error while getting field(s) metadata"); - } - return selectedColumns; - } - - @Override - protected int writeData(final ResultSet queryResult, final DBColumn[] selectedColumns, final OutputStream output, final TAPExecutionReport execReport, final Thread thread) throws IOException, TAPException, InterruptedException{ - int nbRows = 0; - try{ - output.write("\t\t\t\t\n".getBytes()); - int nbColumns = queryResult.getMetaData().getColumnCount(); - while(queryResult.next()){ - if (execReport.parameters.getMaxRec() > 0 && nbRows >= execReport.parameters.getMaxRec()) - break; - - output.write("\t\t\t\t\t\n".getBytes()); - Object value; - for(int i = 1; i <= nbColumns; i++){ - output.write("\t\t\t\t\t\t".getBytes()); - value = formatValue(queryResult.getObject(i), selectedColumns[i - 1]); - writeFieldValue(value, selectedColumns[i - 1], output); - output.write("\n".getBytes()); - - if (thread.isInterrupted()) - throw new InterruptedException(); - } - - output.write("\t\t\t\t\t\n".getBytes()); - nbRows++; - - if (thread.isInterrupted()) - throw new InterruptedException(); - } - output.write("\t\t\t\t\n".getBytes()); - return nbRows; - }catch(SQLException e){ - throw new TAPException("Job N°" + execReport.jobID + " - Impossible to get the " + (nbRows + 1) + "-th rows from the given ResultSet !", e); - } - } - - @Override - public Object formatValue(Object value, DBColumn colMeta){ - return value; - } - -} diff --git a/src/tap/formatter/SVFormat.java b/src/tap/formatter/SVFormat.java index 259fff2ef5129a7f1013c079edd7080c4eed363c..5c0c19d7265b559547f51b4aa2ac65ed9c474217 100644 --- a/src/tap/formatter/SVFormat.java +++ b/src/tap/formatter/SVFormat.java @@ -16,71 +16,189 @@ package tap.formatter; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ +import java.io.BufferedWriter; import java.io.IOException; import java.io.OutputStream; -import java.io.PrintWriter; +import java.io.OutputStreamWriter; -import cds.savot.writer.SavotWriter; -import adql.db.DBColumn; import tap.ServiceConnection; import tap.TAPException; import tap.TAPExecutionReport; +import tap.data.TableIterator; +import adql.db.DBColumn; -public abstract class SVFormat< R > implements OutputFormat { - - /** Indicates whether a format report (start and end date/time) must be printed in the log output. */ - private boolean logFormatReport; +/** + * Format any given query (table) result into CSV or TSV (or with custom separator). + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (04/2015) + */ +public class SVFormat implements OutputFormat { + /** Column separator for CSV format. */ public static final char COMMA_SEPARATOR = ','; + /** Column separator for sCSV format. */ public static final char SEMI_COLON_SEPARATOR = ';'; + /** Column separator for TSV format. */ public static final char TAB_SEPARATOR = '\t'; - protected final ServiceConnection service; + /** The {@link ServiceConnection} to use (for the log and to have some information about the service (particularly: name, description). */ + protected final ServiceConnection service; + /** Column separator to use. */ protected final String separator; + + /** Indicate whether String values must be delimited by double quotes (default) or not. */ protected final boolean delimitStr; - public SVFormat(final ServiceConnection service, char colSeparator){ + /** MIME type associated with this format. + * @since 1.1 */ + protected final String mimeType; + + /** Alias of the MIME type associated with this format. + * @since 1.1 */ + protected final String shortMimeType; + + /** + * Build a SVFormat (in which String values are delimited by double quotes). + * + * @param service Description of the TAP service. + * @param colSeparator Column separator to use. + * + * @throws NullPointerException If the given service connection is null. + */ + public SVFormat(final ServiceConnection service, char colSeparator) throws NullPointerException{ this(service, colSeparator, true); } - public SVFormat(final ServiceConnection service, char colSeparator, boolean delimitStrings){ - this(service, colSeparator, delimitStrings, false); + /** + * Build a SVFormat. + * + * @param service Description of the TAP service. + * @param colSeparator Column separator to use. + * @param delimitStrings true if String values must be delimited by double quotes, false otherwise. + * + * @throws NullPointerException If the given service connection is null. + */ + public SVFormat(final ServiceConnection service, char colSeparator, boolean delimitStrings) throws NullPointerException{ + this(service, colSeparator, delimitStrings, null, null); } - public SVFormat(final ServiceConnection service, char colSeparator, boolean delimitStrings, final boolean logFormatReport){ - separator = "" + colSeparator; - delimitStr = delimitStrings; - this.service = service; - this.logFormatReport = logFormatReport; + /** + * Build a SVFormat. + * + * @param service Description of the TAP service. + * @param colSeparator Column separator to use. + * @param delimitStrings true if String values must be delimited by double quotes, false otherwise. + * @param mime The MIME type to associate with this format. note: this MIME type is then used by a user to specify the result format he wants. + * @param shortMime The alias of the MIME type to associate with this format. note: this short MIME type is then used by a user to specify the result format he wants. + * + * @throws NullPointerException If the given service connection is null. + * + * @since 2.0 + */ + public SVFormat(final ServiceConnection service, char colSeparator, boolean delimitStrings, final String mime, final String shortMime) throws NullPointerException{ + this(service, "" + colSeparator, delimitStrings, mime, shortMime); } - public SVFormat(final ServiceConnection service, String colSeparator){ + /** + * Build a SVFormat (in which String values are delimited by double quotes). + * + * @param service Description of the TAP service. + * @param colSeparator Column separator to use. + * + * @throws NullPointerException If the given service connection is null. + */ + public SVFormat(final ServiceConnection service, String colSeparator) throws NullPointerException{ this(service, colSeparator, true); } - public SVFormat(final ServiceConnection service, String colSeparator, boolean delimitStrings){ - separator = (colSeparator == null) ? ("" + COMMA_SEPARATOR) : colSeparator; + /** + * Build a SVFormat. + * + * @param service Description of the TAP service. + * @param colSeparator Column separator to use. + * @param delimitStrings true if String values must be delimited by double quotes, false otherwise. + * + * @throws NullPointerException If the given service connection is null. + */ + public SVFormat(final ServiceConnection service, String colSeparator, boolean delimitStrings) throws NullPointerException{ + this(service, colSeparator, delimitStrings, null, null); + } + + /** + * Build a SVFormat. + * + * @param service Description of the TAP service. + * @param colSeparator Column separator to use. + * @param delimitStrings true if String values must be delimited by double quotes, false otherwise. + * @param mime The MIME type to associate with this format. note: this MIME type is then used by a user to specify the result format he wants. + * @param shortMime The alias of the MIME type to associate with this format. note: this short MIME type is then used by a user to specify the result format he wants. + * + * @throws NullPointerException If the given service connection is null. + * + * @since 2.0 + */ + public SVFormat(final ServiceConnection service, String colSeparator, boolean delimitStrings, final String mime, final String shortMime) throws NullPointerException{ + if (service == null) + throw new NullPointerException("The given service connection is NULL!"); + + separator = (colSeparator == null || colSeparator.length() <= 0) ? ("" + COMMA_SEPARATOR) : colSeparator; delimitStr = delimitStrings; + mimeType = (mime == null || mime.trim().length() <= 0) ? guessMimeType(separator) : mime; + shortMimeType = (shortMime == null || shortMime.trim().length() <= 0) ? guessShortMimeType(separator) : shortMime; this.service = service; } - public String getMimeType(){ + /** + *

    Try to guess the MIME type to associate with this SV format, in function of the column separator.

    + * + *

    + * By default, only "," or ";" (text/csv) and [TAB] (text/tab-separated-values) are supported. + * If the separator is unknown, "text/plain" will be returned. + *

    + * + *

    Note: In order to automatically guess more MIME types, you should overwrite this function.

    + * + * @param separator Column separator of this SV format. + * + * @return The guessed MIME type. + * + * @since 2.0 + */ + protected String guessMimeType(final String separator){ switch(separator.charAt(0)){ case COMMA_SEPARATOR: case SEMI_COLON_SEPARATOR: return "text/csv"; case TAB_SEPARATOR: - return "text/tsv"; + return "text/tab-separated-values"; default: return "text/plain"; } } - public String getShortMimeType(){ + /** + *

    Try to guess the short MIME type to associate with this SV format, in function of the column separator.

    + * + *

    + * By default, only "," or ";" (csv) and [TAB] (tsv) are supported. + * If the separator is unknown, "text" will be returned. + *

    + * + *

    Note: In order to automatically guess more short MIME types, you should overwrite this function.

    + * + * @param separator Column separator of this SV format. + * + * @return The guessed short MIME type. + * + * @since 2.0 + */ + protected String guessShortMimeType(final String separator){ switch(separator.charAt(0)){ case COMMA_SEPARATOR: case SEMI_COLON_SEPARATOR: @@ -92,10 +210,22 @@ public abstract class SVFormat< R > implements OutputFormat { } } + @Override + public final String getMimeType(){ + return mimeType; + } + + @Override + public final String getShortMimeType(){ + return shortMimeType; + } + + @Override public String getDescription(){ return null; } + @Override public String getFileExtension(){ switch(separator.charAt(0)){ case COMMA_SEPARATOR: @@ -109,55 +239,127 @@ public abstract class SVFormat< R > implements OutputFormat { } @Override - public void writeResult(R queryResult, OutputStream output, TAPExecutionReport execReport, Thread thread) throws TAPException, InterruptedException{ - try{ - final long startTime = System.currentTimeMillis(); + public void writeResult(TableIterator result, OutputStream output, TAPExecutionReport execReport, Thread thread) throws TAPException, IOException, InterruptedException{ + // Prepare the output stream: + final BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(output)); - final PrintWriter writer = new PrintWriter(output); + // Write header: + DBColumn[] columns = writeHeader(result, writer, execReport, thread); - // Write header: - DBColumn[] columns = writeMetadata(queryResult, writer, execReport, thread); + if (thread.isInterrupted()) + throw new InterruptedException(); - // Write data: - int nbRows = writeData(queryResult, columns, writer, execReport, thread); + // Write data: + writeData(result, columns, writer, execReport, thread); - writer.flush(); + writer.flush(); + } - if (logFormatReport) - service.getLogger().info("JOB " + execReport.jobID + " WRITTEN\tResult formatted (in SV[" + delimitStr + "] ; " + nbRows + " rows ; " + columns.length + " columns) in " + (System.currentTimeMillis() - startTime) + " ms !"); + /** + * Write the whole header (one row whose columns are just the columns' name). + * + * @param result Result to write later (but it contains also metadata that was extracted from the result itself). + * @param writer Output in which the metadata must be written. + * @param execReport Execution report (which contains the metadata extracted/guessed from the ADQL query). + * @param thread Thread which has asked for this formatting (it must be used in order to test the {@link Thread#isInterrupted()} flag and so interrupt everything if need). + * + * @return All the written metadata. + * + * @throws IOException If there is an error while writing something in the output. + * @throws InterruptedException If the thread has been interrupted. + * @throws TAPException If any other error occurs. + */ + protected DBColumn[] writeHeader(TableIterator result, BufferedWriter writer, TAPExecutionReport execReport, Thread thread) throws IOException, TAPException, InterruptedException{ + // Get the columns meta: + DBColumn[] selectedColumns = execReport.resultingColumns; + + // If meta are not known, no header will be written: + int nbColumns = (selectedColumns == null) ? -1 : selectedColumns.length; + if (nbColumns > 0){ + // Write all columns' name: + for(int i = 0; i < nbColumns - 1; i++){ + writer.write(selectedColumns[i].getADQLName()); + writer.write(separator); + } + writer.write(selectedColumns[nbColumns - 1].getADQLName()); - }catch(Exception ex){ - service.getLogger().error("While formatting in (T/C)SV !", ex); + // Go to a new line (in order to prepare the data writing): + writer.newLine(); + writer.flush(); } + + // Returns the written columns: + return selectedColumns; } - protected abstract DBColumn[] writeMetadata(R queryResult, PrintWriter writer, TAPExecutionReport execReport, Thread thread) throws IOException, TAPException, InterruptedException; + /** + * Write all the data rows. + * + * @param result Result to write. + * @param selectedColumns All columns' metadata. + * @param writer Writer in which the data must be written. + * @param execReport Execution report (which contains the maximum allowed number of records to output). + * @param thread Thread which has asked for this formatting (it must be used in order to test the {@link Thread#isInterrupted()} flag and so interrupt everything if need). + * + * @throws IOException If there is an error while writing something in the given writer. + * @throws InterruptedException If the thread has been interrupted. + * @throws TAPException If any other error occurs. + */ + protected void writeData(TableIterator result, DBColumn[] selectedColumns, BufferedWriter writer, TAPExecutionReport execReport, Thread thread) throws IOException, TAPException, InterruptedException{ + execReport.nbRows = 0; + + while(result.nextRow()){ + // Stop right now the formatting if the job has been aborted/canceled/interrupted: + if (thread.isInterrupted()) + throw new InterruptedException(); + + // Deal with OVERFLOW, if needed: + if (execReport.parameters.getMaxRec() > 0 && execReport.nbRows >= execReport.parameters.getMaxRec()) // that's to say: OVERFLOW ! + break; - protected abstract int writeData(R queryResult, DBColumn[] selectedColumns, PrintWriter writer, TAPExecutionReport execReport, Thread thread) throws IOException, TAPException, InterruptedException; + int indCol = 0; + while(result.hasNextCol()){ + // Write the column value: + writeFieldValue(result.nextCol(), selectedColumns[indCol++], writer); + + // Append the column separator: + if (result.hasNextCol()) + writer.write(separator); + } + writer.newLine(); + + execReport.nbRows++; + + // flush the writer every 30 lines: + if (execReport.nbRows % 30 == 0) + writer.flush(); + } + writer.flush(); + } /** - *

    Writes the given field value in the given OutputStream.

    + *

    Writes the given field value in the given Writer.

    * *

    - * The given value will be encoded as an XML element (see {@link SavotWriter#encodeElement(String)}. - * Besides, if the given value is null and if the column datatype is int, - * short or long, the NULL values declared in the field metadata will be written.

    + * A String value will be delimited if {@link #delimitStr} is true, otherwise this type of value will + * be processed like the other type of values: no delimiter and just transformed into a string. + *

    * * @param value The value to write. * @param column The corresponding column metadata. - * @param out The stream in which the field value must be written. + * @param writer The stream in which the field value must be written. * * @throws IOException If there is an error while writing the given field value in the given stream. * @throws TAPException If there is any other error (by default: never happen). */ - protected void writeFieldValue(final Object value, final DBColumn column, final PrintWriter writer) throws IOException, TAPException{ + protected void writeFieldValue(final Object value, final DBColumn column, final BufferedWriter writer) throws IOException, TAPException{ if (value != null){ if ((delimitStr && value instanceof String) || value.toString().contains(separator)){ - writer.print('"'); - writer.print(value.toString().replaceAll("\"", "'")); - writer.print('"'); + writer.write('"'); + writer.write(value.toString().replaceAll("\"", "'")); + writer.write('"'); }else - writer.print(value.toString()); + writer.write(value.toString()); } } } diff --git a/src/tap/formatter/TextFormat.java b/src/tap/formatter/TextFormat.java index 9c5c73486ea6a506e802647ad805b9381c70dba8..7b6f513b754d3693ab2de0e5e20869938f165933 100644 --- a/src/tap/formatter/TextFormat.java +++ b/src/tap/formatter/TextFormat.java @@ -16,90 +16,214 @@ package tap.formatter; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ +import java.io.BufferedWriter; +import java.io.IOException; import java.io.OutputStream; - -import adql.db.DBColumn; - -import cds.util.AsciiTable; +import java.io.OutputStreamWriter; import tap.ServiceConnection; - import tap.TAPException; import tap.TAPExecutionReport; +import tap.data.TableIterator; +import adql.db.DBColumn; +import cds.util.AsciiTable; -public abstract class TextFormat< R > implements OutputFormat { - - /** Indicates whether a format report (start and end date/time) must be printed in the log output. */ - private boolean logFormatReport; - - protected final ServiceConnection service; - - public TextFormat(final ServiceConnection service){ - this(service, false); - } +/** + * Format any given query (table) result into a simple table ASCII representation + * (columns' width are adjusted so that all columns are well aligned and of the same width). + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (04/2015) + */ +public class TextFormat implements OutputFormat { + + /** Internal column separator. + * Note: the output separator is however always a |. + * @since 2.0 */ + protected static final char COL_SEP = '\u25c6'; + + /** The {@link ServiceConnection} to use (for the log and to have some information about the service (particularly: name, description). */ + protected final ServiceConnection service; + + /** + * Build a {@link TextFormat}. + * + * @param service Description of the TAP service. + * + * @throws NullPointerException If the given service connection is null. + */ + public TextFormat(final ServiceConnection service) throws NullPointerException{ + if (service == null) + throw new NullPointerException("The given service connection is NULL!"); - public TextFormat(final ServiceConnection service, final boolean logFormatReport){ this.service = service; - this.logFormatReport = logFormatReport; } + @Override public String getMimeType(){ return "text/plain"; } + @Override public String getShortMimeType(){ return "text"; } + @Override public String getDescription(){ return null; } + @Override public String getFileExtension(){ return "txt"; } @Override - public void writeResult(R queryResult, OutputStream output, TAPExecutionReport execReport, Thread thread) throws TAPException, InterruptedException{ - try{ - AsciiTable asciiTable = new AsciiTable('|'); + public void writeResult(TableIterator result, OutputStream output, TAPExecutionReport execReport, Thread thread) throws TAPException, IOException, InterruptedException{ + // Prepare the formatting of the whole output: + AsciiTable asciiTable = new AsciiTable(COL_SEP); + + // Write header: + String headerLine = getHeader(result, execReport, thread); + asciiTable.addHeaderLine(headerLine); + asciiTable.endHeaderLine(); + + if (thread.isInterrupted()) + throw new InterruptedException(); + + // Write data into the AsciiTable object: + boolean overflow = writeData(result, asciiTable, execReport, thread); + + // Finally write the formatted ASCII table (header + data) in the output stream: + BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(output)); + String[] lines = asciiTable.displayAligned(new int[]{AsciiTable.LEFT}, '|'); + execReport.nbRows = 0; + for(String l : lines){ + // stop right now the formatting if the job has been aborted/canceled/interrupted: + if (thread.isInterrupted()) + throw new InterruptedException(); + // write the line: + writer.write(l); + writer.newLine(); + // update the counter of written lines: + execReport.nbRows++; + // flush the writer every 30 lines: + if (execReport.nbRows % 30 == 0) + writer.flush(); + } - final long startTime = System.currentTimeMillis(); + // Add a line in case of an OVERFLOW: + if (overflow) + writer.write("\nOVERFLOW (more rows were available but have been truncated by the TAP service)"); + + writer.flush(); + } - // Write header: - String headerLine = getHeader(queryResult, execReport, thread); - asciiTable.addHeaderLine(headerLine); - asciiTable.endHeaderLine(); + /** + * Get the whole header (one row whose columns are just the columns' name). + * + * @param result Result to write later (but it contains also metadata that was extracted from the result itself). + * @param execReport Execution report (which contains the metadata extracted/guessed from the ADQL query). + * @param thread Thread which has asked for this formatting (it must be used in order to test the {@link Thread#isInterrupted()} flag and so interrupt everything if need). + * + * @return All the written metadata. + * + * @throws TAPException If any other error occurs. + */ + protected String getHeader(final TableIterator result, final TAPExecutionReport execReport, final Thread thread) throws TAPException{ + // Get the columns meta: + DBColumn[] selectedColumns = execReport.resultingColumns; + + StringBuffer line = new StringBuffer(); + + // If meta are not known, no header will be written: + int nbColumns = (selectedColumns == null) ? -1 : selectedColumns.length; + if (nbColumns > 0){ + + // Write all columns' name: + for(int i = 0; i < nbColumns - 1; i++) + line.append(selectedColumns[i].getADQLName()).append(COL_SEP); + line.append(selectedColumns[nbColumns - 1].getADQLName()); + } - // Write data: - int nbRows = writeData(queryResult, asciiTable, execReport, thread); + // Return the header line: + return line.toString(); + } - // Write all lines in the output stream: - String[] lines = asciiTable.displayAligned(new int[]{AsciiTable.LEFT}); - for(String l : lines){ - output.write(l.getBytes()); - output.write('\n'); + /** + * Write all the data rows into the given {@link AsciiTable} object. + * + * @param queryResult Result to write. + * @param asciiTable Output in which the rows (as string) must be written. + * @param execReport Execution report (which contains the maximum allowed number of records to output). + * @param thread Thread which has asked for this formatting (it must be used in order to test the {@link Thread#isInterrupted()} flag and so interrupt everything if need). + * + * @return true if an overflow (i.e. nbDBRows > MAXREC) is detected, false otherwise. + * + * @throws InterruptedException If the thread has been interrupted. + * @throws TAPException If any other error occurs. + */ + protected boolean writeData(final TableIterator queryResult, final AsciiTable asciiTable, final TAPExecutionReport execReport, final Thread thread) throws TAPException, InterruptedException{ + execReport.nbRows = 0; + boolean overflow = false; + + // Get the list of columns: + DBColumn[] selectedColumns = execReport.resultingColumns; + int nbColumns = selectedColumns.length; + + StringBuffer line = new StringBuffer(); + while(queryResult.nextRow()){ + // Stop right now the formatting if the job has been aborted/canceled/interrupted: + if (thread.isInterrupted()) + throw new InterruptedException(); + + // Deal with OVERFLOW, if needed: + if (execReport.parameters.getMaxRec() > 0 && execReport.nbRows >= execReport.parameters.getMaxRec()){ + overflow = true; + break; } - output.flush(); - if (logFormatReport) - service.getLogger().info("JOB " + execReport.jobID + " WRITTEN\tResult formatted (in text ; " + nbRows + " rows ; " + ((execReport != null && execReport.resultingColumns != null) ? "?" : execReport.resultingColumns.length) + " columns) in " + (System.currentTimeMillis() - startTime) + " ms !"); + // Clear the line buffer: + line.delete(0, line.length()); - }catch(Exception ex){ - service.getLogger().error("While formatting in text/plain !", ex); - } - } + int indCol = 0; + while(queryResult.hasNextCol()){ - protected abstract String getHeader(final R queryResult, final TAPExecutionReport execReport, final Thread thread) throws TAPException; + // Write the column value: + writeFieldValue(queryResult.nextCol(), selectedColumns[indCol++], line); - protected abstract int writeData(final R queryResult, final AsciiTable asciiTable, final TAPExecutionReport execReport, final Thread thread) throws TAPException; + // Write the column separator (if needed): + if (indCol != nbColumns) + line.append(COL_SEP); + } + + // Append the line/row in the ASCII table: + asciiTable.addLine(line.toString()); + + execReport.nbRows++; + } + return overflow; + } + + /** + * Writes the given field value in the given buffer. + * + * @param value The value to write. + * @param tapCol The corresponding column metadata. + * @param line The buffer in which the field value must be written. + */ protected void writeFieldValue(final Object value, final DBColumn tapCol, final StringBuffer line){ - Object obj = value; - if (obj != null) - line.append(obj.toString()); + if (value != null){ + if (value instanceof String) + line.append('"').append(value.toString()).append('"'); + else + line.append(value.toString()); + } } } diff --git a/src/tap/formatter/VOTableFormat.java b/src/tap/formatter/VOTableFormat.java index e5b5625f2d281679f9e6efd6b103e8196db39070..f38b17eba48ce2bbc695c8771609d77dc7d77475 100644 --- a/src/tap/formatter/VOTableFormat.java +++ b/src/tap/formatter/VOTableFormat.java @@ -16,327 +16,697 @@ package tap.formatter; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Astronomisches Rechen Institut (ARI) */ +import java.io.BufferedWriter; import java.io.IOException; - -import tap.TAPExecutionReport; -import tap.TAPJob; -import tap.TAPException; - -import uws.job.Result; - import java.io.OutputStream; +import java.io.OutputStreamWriter; import java.io.PrintWriter; - -import cds.savot.writer.SavotWriter; +import java.util.Iterator; +import java.util.Map; import tap.ServiceConnection; +import tap.TAPException; +import tap.TAPExecutionReport; +import tap.data.DataReadException; +import tap.data.TableIterator; +import tap.error.DefaultTAPErrorWriter; import tap.metadata.TAPColumn; import tap.metadata.VotType; +import tap.metadata.VotType.VotDatatype; +import uk.ac.starlink.table.AbstractStarTable; +import uk.ac.starlink.table.ColumnInfo; +import uk.ac.starlink.table.DefaultValueInfo; +import uk.ac.starlink.table.DescribedValue; +import uk.ac.starlink.table.RowSequence; +import uk.ac.starlink.table.StarTable; +import uk.ac.starlink.votable.DataFormat; +import uk.ac.starlink.votable.VOSerializer; +import uk.ac.starlink.votable.VOTableVersion; import adql.db.DBColumn; +import adql.db.DBType; +import adql.db.DBType.DBDatatype; /** - *

    Formats the given type of query result in VOTable.

    - *

    - * This abstract class is only able to format the skeleton of the VOTable. - * However, it also provides useful methods to format field metadata and field value (including NULL values). - *

    + *

    Format any given query (table) result into VOTable.

    + * *

    - * Attributes of the VOTable node are by default set by this class but can be overridden if necessary thanks to the corresponding class attributes: - * {@link #votTableVersion}, {@link #xmlnsXsi}, {@link #xsiNoNamespaceSchemaLocation}, {@link #xsiSchemaLocation} and - * {@link #xmlns}. + * Format and version of the resulting VOTable can be provided in parameters at the construction time. + * This formatter is using STIL. So all formats and versions managed by STIL are also here. + * Basically, you have the following formats: TABLEDATA, BINARY, BINARY2 (only when using VOTable v1.3) and FITS. + * The versions are: 1.0, 1.1, 1.2 and 1.3. *

    + * + *

    Note: The MIME type is automatically set in function of the given VOTable serialization:

    + *
      + *
    • none or unknown: equivalent to BINARY
    • + *
    • BINARY: "application/x-votable+xml" = "votable"
    • + *
    • BINARY2: "application/x-votable+xml;serialization=BINARY2" = "votable/b2"
    • + *
    • TABLEDATA: "application/x-votable+xml;serialization=TABLEDATA" = "votable/td"
    • + *
    • FITS: "application/x-votable+xml;serialization=FITS" = "votable/fits"
    • + *
    + *

    It is however possible to change these default values thanks to {@link #setMimeType(String, String)}.

    + * + *

    In addition of the INFO elements for QUERY_STATUS="OK" and QUERY_STATUS="OVERFLOW", two additional INFO elements are written:

    + *
      + *
    • PROVIDER = {@link ServiceConnection#getProviderName()} and {@link ServiceConnection#getProviderDescription()}
    • + *
    • QUERY = the ADQL query at the origin of this result.
    • + *
    + * *

    - * When overridding this class, you must implement {@link #writeMetadata(Object, PrintWriter, TAPJob)} and - * {@link #writeData(Object, DBColumn[], OutputStream, TAPJob)}. - * Both are called by {@link #writeResult(Object, OutputStream, TAPJob)}. Finally you will also have to implement - * {@link #writeResult(Object, TAPJob)}, which must format the given result into a VOTable saved in some way accessible - * through the returned {@link Result}. + * Furthermore, this formatter provides a function to format an error in VOTable: {@link #writeError(String, Map, PrintWriter)}. + * This is useful for TAP which requires to return in VOTable any error that occurs while any operation. + * See {@link DefaultTAPErrorWriter} for more details. *

    * - * @author Grégory Mantelet (CDS) - * @version 06/2012 - * - * @param Type of the result to format in VOTable (i.e. {@link java.sql.ResultSet}). - * - * @see ResultSet2VotableFormatter + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (04/2015) */ -public abstract class VOTableFormat< R > implements OutputFormat { - - /** Indicates whether a format report (start and end date/time) must be printed in the log output. */ - private boolean logFormatReport; +public class VOTableFormat implements OutputFormat { /** The {@link ServiceConnection} to use (for the log and to have some information about the service (particularly: name, description). */ - protected final ServiceConnection service; + protected final ServiceConnection service; - protected String votTableVersion = "1.2"; - protected String xmlnsXsi = "http://www.w3.org/2001/XMLSchema-instance"; - protected String xsiSchemaLocation = "http://www.ivoa.net/xml/VOTable/v1.2"; - protected String xsiNoNamespaceSchemaLocation = null; - protected String xmlns = "http://www.ivoa.net/xml/VOTable/v1.2"; + /** Format of the VOTable data part in which data must be formatted. Possible values are: TABLEDATA, BINARY, BINARY2 or FITS. By default, it is set to BINARY. */ + protected final DataFormat votFormat; + + /** VOTable version in which table data must be formatted. By default, it is set to v13. */ + protected final VOTableVersion votVersion; + + /** MIME type associated with this format. */ + protected String mimeType; + + /** Short form of the MIME type associated with this format. */ + protected String shortMimeType; /** - * Creates a VOTable formatter without format report. + *

    Creates a VOTable formatter.

    + * + *

    Note: + * The MIME type is automatically set to "application/x-votable+xml" = "votable". + * It is however possible to change this default value thanks to {@link #setMimeType(String, String)}. + *

    * * @param service The service to use (for the log and to have some information about the service (particularly: name, description). * * @throws NullPointerException If the given service connection is null. + */ + public VOTableFormat(final ServiceConnection service) throws NullPointerException{ + this(service, null, null); + } + + /** + *

    Creates a VOTable formatter.

    * - * @see #VOTableFormat(ServiceConnection, boolean) + * Note: The MIME type is automatically set in function of the given VOTable serialization: + *
      + *
    • none or unknown: equivalent to BINARY
    • + *
    • BINARY: "application/x-votable+xml" = "votable"
    • + *
    • BINARY2: "application/x-votable+xml;serialization=BINARY2" = "votable/b2"
    • + *
    • TABLEDATA: "application/x-votable+xml;serialization=TABLEDATA" = "votable/td"
    • + *
    • FITS: "application/x-votable+xml;serialization=FITS" = "votable/fits"
    • + *
    + *

    It is however possible to change these default values thanks to {@link #setMimeType(String, String)}.

    + * + * @param service The service to use (for the log and to have some information about the service (particularly: name, description). + * @param votFormat Serialization of the VOTable data part. (TABLEDATA, BINARY, BINARY2 or FITS). + * + * @throws NullPointerException If the given service connection is null. */ - public VOTableFormat(final ServiceConnection service) throws NullPointerException{ - this(service, false); + public VOTableFormat(final ServiceConnection service, final DataFormat votFormat) throws NullPointerException{ + this(service, votFormat, null); } /** - * Creates a VOTable formatter. + *

    Creates a VOTable formatter.

    + * + * Note: The MIME type is automatically set in function of the given VOTable serialization: + *
      + *
    • none or unknown: equivalent to BINARY
    • + *
    • BINARY: "application/x-votable+xml" = "votable"
    • + *
    • BINARY2: "application/x-votable+xml;serialization=BINARY2" = "votable/b2"
    • + *
    • TABLEDATA: "application/x-votable+xml;serialization=TABLEDATA" = "votable/td"
    • + *
    • FITS: "application/x-votable+xml;serialization=FITS" = "votable/fits"
    • + *
    + *

    It is however possible to change these default values thanks to {@link #setMimeType(String, String)}.

    * * @param service The service to use (for the log and to have some information about the service (particularly: name, description). - * @param logFormatReport true to append a format report (start and end date/time) in the log output, false otherwise. + * @param votFormat Serialization of the VOTable data part. (TABLEDATA, BINARY, BINARY2 or FITS). + * @param votVersion Version of the resulting VOTable. * * @throws NullPointerException If the given service connection is null. */ - public VOTableFormat(final ServiceConnection service, final boolean logFormatReport) throws NullPointerException{ + public VOTableFormat(final ServiceConnection service, final DataFormat votFormat, final VOTableVersion votVersion) throws NullPointerException{ if (service == null) - throw new NullPointerException("The given service connection is NULL !"); + throw new NullPointerException("The given service connection is NULL!"); + this.service = service; - this.logFormatReport = logFormatReport; + + // Set the VOTable serialization and version: + this.votFormat = (votFormat == null) ? DataFormat.BINARY : votFormat; + this.votVersion = (votVersion == null) ? VOTableVersion.V13 : votVersion; + + // Deduce automatically the MIME type and its short expression: + if (this.votFormat.equals(DataFormat.BINARY)){ + this.mimeType = "application/x-votable+xml"; + this.shortMimeType = "votable"; + }else if (this.votFormat.equals(DataFormat.BINARY2)){ + this.mimeType = "application/x-votable+xml;serialization=BINARY2"; + this.shortMimeType = "votable/b2"; + }else if (this.votFormat.equals(DataFormat.TABLEDATA)){ + this.mimeType = "application/x-votable+xml;serialization=TABLEDATA"; + this.shortMimeType = "votable/td"; + }else if (this.votFormat.equals(DataFormat.FITS)){ + this.mimeType = "application/x-votable+xml;serialization=FITS"; + this.shortMimeType = "votable/fits"; + }else{ + this.mimeType = "application/x-votable+xml"; + this.shortMimeType = "votable"; + } } + @Override public final String getMimeType(){ - return "text/xml"; + return mimeType; } + @Override public final String getShortMimeType(){ - return "votable"; + return shortMimeType; + } + + /** + *

    Set the MIME type associated with this format.

    + * + *

    Note: NULL means no modification of the current value:

    + * + * @param mimeType Full MIME type of this VOTable format. note: if NULL, the MIME type is not modified. + * @param shortForm Short form of this MIME type. note: if NULL, the short MIME type is not modified. + */ + public final void setMimeType(final String mimeType, final String shortForm){ + if (mimeType != null) + this.mimeType = mimeType; + if (shortForm != null) + this.shortMimeType = shortForm; + } + + /** + * Get the set VOTable data serialization/format (e.g. BINARY, TABLEDATA). + * + * @return The data format. + */ + public final DataFormat getVotSerialization(){ + return votFormat; } + /** + * Get the set VOTable version. + * + * @return The VOTable version. + */ + public final VOTableVersion getVotVersion(){ + return votVersion; + } + + @Override public String getDescription(){ return null; } + @Override public String getFileExtension(){ return "xml"; } /** - *

    The skeleton of the resulting VOTable is written in this method:

    - *
      - *
    • <?xml version="1.0" encoding="UTF-8">
    • - *
    • {@link #writeHeader(PrintWriter, TAPJob)}
    • - *
    • <TABLE>
    • - *
    • <DATA>
    • - *
    • {@link #writeData(Object, DBColumn[], OutputStream, TAPJob)}
    • - *
    • </DATA>
    • - *
    • if (nbRows >= job.getMaxRec()) <INFO name="QUERY_STATUS" value="OVERFLOW" />
    • - *
    • </RESOURCE>
    • - *
    • </VOTABLE>
    • - *
    + *

    Write the given error message as VOTable document.

    + * + *

    Note: + * In the TAP protocol, all errors must be returned as VOTable. The class {@link DefaultTAPErrorWriter} is in charge of the management + * and reporting of all errors. It is calling this function while the error message to display to the user is ready and + * must be written in the HTTP response. + *

    * - * @see tap.formatter.OutputFormat#writeResult(Object, OutputStream, TAPExecutionReport) + *

    Here is the XML format of this VOTable error:

    + *
    +	 * 	<VOTABLE version="..." xmlns="..." >
    +	 * 		<RESOURCE type="results">
    +	 * 			<INFO name="QUERY_STATUS" value="ERROR>
    +	 * 				...
    +	 * 			</INFO>
    +	 * 			<INFO name="PROVIDER" value="...">...</INFO>
    +	 * 			<!-- other optional INFOs (e.g. request parameters) -->
    +	 * 		</RESOURCE>
    +	 * 	</VOTABLE>
    +	 * 
    + * + * @param message Error message to display to the user. + * @param otherInfo List of other additional information to display. optional + * @param writer Stream in which the VOTable error must be written. + * + * @throws IOException If any error occurs while writing in the given output. + * + * @since 2.0 */ - public final void writeResult(final R queryResult, final OutputStream output, final TAPExecutionReport execReport, final Thread thread) throws TAPException, InterruptedException{ - try{ - long start = System.currentTimeMillis(); - - PrintWriter out = new PrintWriter(output); - out.println(""); - writeHeader(out, execReport); - out.println("\t\t"); - DBColumn[] columns = writeMetadata(queryResult, out, execReport, thread); - out.println("\t\t\t"); - out.flush(); - int nbRows = writeData(queryResult, columns, output, execReport, thread); - output.flush(); - out.println("\t\t\t"); - out.println("\t\t
    "); - // OVERFLOW ? - if (execReport.parameters.getMaxRec() > 0 && nbRows >= execReport.parameters.getMaxRec()) - out.println("\t\t"); - out.println("\t"); - out.println(""); - out.flush(); - - if (logFormatReport) - service.getLogger().info("JOB " + execReport.jobID + " WRITTEN\tResult formatted (in VOTable ; " + nbRows + " rows ; " + columns.length + " columns) in " + (System.currentTimeMillis() - start) + " ms !"); - }catch(IOException ioe){ - throw new TAPException("Error while writing a query result in VOTable !", ioe); + public void writeError(final String message, final Map otherInfo, final PrintWriter writer) throws IOException{ + BufferedWriter out = new BufferedWriter(writer); + + // Set the root VOTABLE node: + out.write(""); + out.newLine(); + out.write(""); + out.newLine(); + + // The RESOURCE note MUST have a type "results": [REQUIRED] + out.write(""); + out.newLine(); + + // Indicate that the query has been successfully processed: [REQUIRED] + out.write("" + (message == null ? "" : VOSerializer.formatText(message)) + ""); + out.newLine(); + + // Append the PROVIDER information (if any): [OPTIONAL] + if (service.getProviderName() != null){ + out.write("" + ((service.getProviderDescription() == null) ? "" : VOSerializer.formatText(service.getProviderDescription())) + ""); + out.newLine(); + } + + // Append the ADQL query at the origin of this result: [OPTIONAL] + if (otherInfo != null){ + Iterator> it = otherInfo.entrySet().iterator(); + while(it.hasNext()){ + Map.Entry entry = it.next(); + if (entry.getValue() != null){ + if (entry.getValue().startsWith("\n")){ + int sep = entry.getValue().substring(1).indexOf('\n'); + if (sep < 0) + sep = 0; + else + sep++; + out.write("\n" + entry.getValue().substring(sep + 1) + "\n"); + }else + out.write(""); + out.newLine(); + } + } } + + out.flush(); + + /* Write footer. */ + out.write(""); + out.newLine(); + out.write(""); + out.newLine(); + + out.flush(); + } + + @Override + public final void writeResult(final TableIterator queryResult, final OutputStream output, final TAPExecutionReport execReport, final Thread thread) throws TAPException, IOException, InterruptedException{ + ColumnInfo[] colInfos = toColumnInfos(queryResult, execReport, thread); + + /* Turns the result set into a table. */ + LimitedStarTable table = new LimitedStarTable(queryResult, colInfos, execReport.parameters.getMaxRec()); + + /* Prepares the object that will do the serialization work. */ + VOSerializer voser = VOSerializer.makeSerializer(votFormat, votVersion, table); + BufferedWriter out = new BufferedWriter(new OutputStreamWriter(output)); + + /* Write header. */ + writeHeader(votVersion, execReport, out); + + if (thread.isInterrupted()) + throw new InterruptedException(); + + /* Write table element. */ + voser.writeInlineTableElement(out); + execReport.nbRows = table.getNbReadRows(); + out.flush(); + + /* Check for overflow and write INFO if required. */ + if (table.lastSequenceOverflowed()){ + out.write(""); + out.newLine(); + } + + /* Write footer. */ + out.write(""); + out.newLine(); + out.write(""); + out.newLine(); + + out.flush(); } /** - *

    Writes the root node of the VOTable: <VOTABLE>.

    - *

    - * Attributes of this node are written thanks to their corresponding attributes in this class: - * {@link #votTableVersion}, {@link #xmlnsXsi}, {@link #xsiNoNamespaceSchemaLocation}, {@link #xsiSchemaLocation} and {@link #xmlns}. - * They are written only if different from null. - *

    + *

    Writes the first VOTable nodes/elements preceding the data: VOTABLE, RESOURCE and 3 INFOS (QUERY_STATUS, PROVIDER, QUERY).

    * - * @param output Writer in which the root node must be written. + * @param votVersion Target VOTable version. * @param execReport The report of the query execution. + * @param out Writer in which the root node must be written. * * @throws IOException If there is an error while writing the root node in the given Writer. * @throws TAPException If there is any other error (by default: never happen). */ - protected void writeHeader(final PrintWriter output, final TAPExecutionReport execReport) throws IOException, TAPException{ - StringBuffer strBuf = new StringBuffer("'); - output.println(strBuf); - - output.println("\t"); - - // INFO items: - output.println("\t\t"); - output.println("\t\t" + ((service.getProviderDescription() == null) ? "" : SavotWriter.encodeElement(service.getProviderDescription())) + ""); - output.println("\t\t"); + protected void writeHeader(final VOTableVersion votVersion, final TAPExecutionReport execReport, final BufferedWriter out) throws IOException, TAPException{ + // Set the root VOTABLE node: + out.write(""); + out.newLine(); + out.write(""); + out.newLine(); + + // The RESOURCE note MUST have a type "results": [REQUIRED] + out.write(""); + out.newLine(); + + // Indicate that the query has been successfully processed: [REQUIRED] + out.write(""); + out.newLine(); + + // Append the PROVIDER information (if any): [OPTIONAL] + if (service.getProviderName() != null){ + out.write("" + ((service.getProviderDescription() == null) ? "" : VOSerializer.formatText(service.getProviderDescription())) + ""); + out.newLine(); + } + + // Append the ADQL query at the origin of this result: [OPTIONAL] + String adqlQuery = execReport.parameters.getQuery(); + if (adqlQuery != null){ + out.write(""); + out.newLine(); + } + + /* TODO Add somewhere in the table header the different Coordinate Systems used in this result! + * 2 ways to do so: + * 1/ COOSYS (deprecated from VOTable 1.2, but soon un-deprecated) + * 2/ a GROUP item with the STC expression of the coordinate system. + */ + + out.flush(); } /** - *

    Writes fields' metadata of the given query result in the given Writer.

    - *

    Important: To write write metadata of a given field you can use {@link #writeFieldMeta(TAPColumn, PrintWriter)}.

    + * Writes fields' metadata of the given query result. * - * @param queryResult The query result from whose fields' metadata must be written. - * @param output Writer in which fields' metadata must be written. + * @param result The query result from whose fields' metadata must be written. * @param execReport The report of the query execution. - * @param thread The thread which asked for the result writting. + * @param thread The thread which asked for the result writing. * - * @return Extracted field's metadata. + * @return Extracted field's metadata, or NULL if no metadata have been found (theoretically, it never happens). * - * @throws IOException If there is an error while writing the metadata in the given Writer. + * @throws IOException If there is an error while writing the metadata. * @throws TAPException If there is any other error. * @throws InterruptedException If the given thread has been interrupted. */ - protected abstract DBColumn[] writeMetadata(final R queryResult, final PrintWriter output, final TAPExecutionReport execReport, final Thread thread) throws IOException, TAPException, InterruptedException; + public static final ColumnInfo[] toColumnInfos(final TableIterator result, final TAPExecutionReport execReport, final Thread thread) throws IOException, TAPException, InterruptedException{ + // Get the metadata extracted/guesses from the ADQL query: + DBColumn[] columnsFromQuery = execReport.resultingColumns; + + // Get the metadata extracted from the result: + TAPColumn[] columnsFromResult = result.getMetadata(); + + int indField = 0; + if (columnsFromQuery != null){ + + // Initialize the resulting array: + ColumnInfo[] colInfos = new ColumnInfo[columnsFromQuery.length]; + + // For each column: + for(DBColumn field : columnsFromQuery){ + + // Try to build/get appropriate metadata for this field/column: + TAPColumn colFromResult = (columnsFromResult != null && indField < columnsFromResult.length) ? columnsFromResult[indField] : null; + TAPColumn tapCol = getValidColMeta(field, colFromResult); + + // Build the corresponding ColumnInfo object: + colInfos[indField] = getColumnInfo(tapCol); + + indField++; + } + + return colInfos; + }else + return null; + } /** - *

    Formats in a VOTable field and writes the given {@link TAPColumn} in the given Writer.

    + * Try to get or otherwise to build appropriate metadata using those extracted from the ADQL query and those extracted from the result. * - *

    Note: If the VOTable datatype is int, short or long a NULL values is set by adding a node VALUES: <VALUES null="..." />

    + * @param typeFromQuery Metadata extracted/guessed from the ADQL query. + * @param typeFromResult Metadata extracted/guessed from the result. * - * @param col The column metadata to format into a VOTable field. - * @param out The stream in which the formatted column metadata must be written. - * - * @throws IOException If there is an error while writing the field metadata. - * @throws TAPException If there is any other error (by default: never happen). + * @return The most appropriate metadata. */ - protected void writeFieldMeta(TAPColumn col, PrintWriter out) throws IOException, TAPException{ - StringBuffer fieldline = new StringBuffer("\t\t\t"); - - fieldline.append(" 0) - fieldline.append(" ucd=").append('"').append(SavotWriter.encodeAttribute(col.getUcd())).append('"'); + // Set the shape (VOTable arraysize): + colInfo.setShape(getShape(votType.arraysize)); - if (col.getUtype() != null && col.getUtype().length() > 0) - fieldline.append(" utype=").append('"').append(SavotWriter.encodeAttribute(col.getUtype())).append('"'); + // Set this value may be NULL (note: it is not really necessary since STIL set this flag to TRUE by default): + colInfo.setNullable(true); - if (col.getUnit() != null && col.getUnit().length() > 0) - fieldline.append(" unit=").append('"').append(SavotWriter.encodeAttribute(col.getUnit())).append('"'); + // Set the XType (if any): + if (votType.xtype != null) + colInfo.setAuxDatum(new DescribedValue(new DefaultValueInfo("xtype", String.class, "VOTable xtype attribute"), votType.xtype)); - if (col.getDescription() != null && !col.getDescription().trim().isEmpty()) - description = col.getDescription().trim(); - else - description = null; + // Set the additional information: unit, UCD and UType: + colInfo.setUnitString(tapCol.getUnit()); + colInfo.setUCD(tapCol.getUcd()); + colInfo.setUtype(tapCol.getUtype()); - if (nullVal != null || description != null){ - fieldline.append(">\n"); - if (nullVal != null) - fieldline.append("\n"); - if (description != null) - fieldline.append("").append(SavotWriter.encodeElement(description)).append("\n"); - fieldline.append(""); - out.println(fieldline); - }else{ - fieldline.append("/>"); - out.println(fieldline); - } + return colInfo; } /** - *

    Writes the data of the given query result in the given OutputStream.

    - *

    Important: To write a field value you can use {@link #writeFieldValue(Object, DBColumn, OutputStream)}.

    - * - * @param queryResult The query result which contains the data to write. - * @param selectedColumns The columns selected by the query. - * @param output The stream in which the data must be written. - * @param execReport The report of the query execution. - * @param thread The thread which asked for the result writting. + * Convert the VOTable datatype string into a corresponding {@link Class} object. * - * @return The number of written rows. (note: if this number is greater than the value of MAXREC: OVERFLOW) + * @param datatype Value of the VOTable attribute "datatype". + * @param arraysize Value of the VOTable attribute "arraysize". * - * @throws IOException If there is an error while writing the data in the given stream. - * @throws TAPException If there is any other error. - * @throws InterruptedException If the given thread has been interrupted. + * @return The corresponding {@link Class} object. */ - protected abstract int writeData(final R queryResult, final DBColumn[] selectedColumns, final OutputStream output, final TAPExecutionReport execReport, final Thread thread) throws IOException, TAPException, InterruptedException; + protected static final Class getDatatypeClass(final VotDatatype datatype, final String arraysize){ + // Determine whether this type is an array or not: + boolean isScalar = arraysize == null || (arraysize.length() == 1 && arraysize.equals("1")); + + // Guess the corresponding Class object (see section "7.1.4 Data Types" of the STIL documentation): + switch(datatype){ + case BIT: + return boolean[].class; + case BOOLEAN: + return isScalar ? Boolean.class : boolean[].class; + case DOUBLE: + return isScalar ? Double.class : double[].class; + case DOUBLECOMPLEX: + return double[].class; + case FLOAT: + return isScalar ? Float.class : float[].class; + case FLOATCOMPLEX: + return float[].class; + case INT: + return isScalar ? Integer.class : int[].class; + case LONG: + return isScalar ? Long.class : long[].class; + case SHORT: + return isScalar ? Short.class : short[].class; + case UNSIGNEDBYTE: + return isScalar ? Short.class : short[].class; + case CHAR: + case UNICODECHAR: + default: /* If the type is not know (theoretically, never happens), return char[*] by default. */ + return isScalar ? Character.class : String.class; + } + } /** - *

    Writes the given field value in the given OutputStream.

    - * - *

    - * The given value will be encoded as an XML element (see {@link SavotWriter#encodeElement(String)}. - * Besides, if the given value is null and if the column datatype is int, - * short or long, the NULL values declared in the field metadata will be written.

    + * Convert the given VOTable arraysize into a {@link ColumnInfo} shape. * - * @param value The value to write. - * @param column The corresponding column metadata. - * @param output The stream in which the field value must be written. + * @param arraysize Value of the VOTable attribute "arraysize". * - * @throws IOException If there is an error while writing the given field value in the given stream. - * @throws TAPException If there is any other error (by default: never happen). + * @return The corresponding {@link ColumnInfo} shape. */ - protected void writeFieldValue(final Object value, final DBColumn column, final OutputStream output) throws IOException, TAPException{ - String fieldValue = (value == null) ? null : value.toString(); - if (fieldValue == null && column instanceof TAPColumn) - fieldValue = getNullValue(((TAPColumn)column).getVotType().datatype); - if (fieldValue != null) - output.write(SavotWriter.encodeElement(fieldValue).getBytes()); + protected static final int[] getShape(final String arraysize){ + /* + * Note: multi-dimensional arrays are forbidden in the TAP library, + * so no 'nxm...' is possible. + */ + + // No arraysize => empty array: + if (arraysize == null) + return new int[0]; + + // '*' or 'n*' => {-1}: + else if (arraysize.charAt(arraysize.length() - 1) == '*') + return new int[]{-1}; + + // 'n' => {n}: + else{ + try{ + return new int[]{Integer.parseInt(arraysize)}; + }catch(NumberFormatException nfe){ + // if the given arraysize is incorrect (theoretically, never happens), it is like no arraysize has been provided: + return new int[0]; + } + } } /** - *

    Gets the NULL value corresponding to the given datatype:

    - *
      - *
    • for int: {@link Integer#MIN_VALUE}
    • - *
    • for short: {@link Short#MIN_VALUE}
    • - *
    • for long: {@link Long#MIN_VALUE}
    • - *
    • for anything else, null will be returned.
    • - *
    - * - * @param datatype A VOTable datatype. + *

    + * Special {@link StarTable} able to read a fixed maximum number of rows {@link TableIterator}. + * However, if no limit is provided, all rows are read. + *

    * - * @return The corresponding NULL value, or null if there is none. + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (10/2014) + * @since 2.0 */ - public static final String getNullValue(String datatype){ - if (datatype == null) - return null; + public static class LimitedStarTable extends AbstractStarTable { + + /** Number of columns to read. */ + private final int nbCol; + + /** Information about all columns to read. */ + private final ColumnInfo[] columnInfos; + + /** Iterator over the data to read using this special {@link StarTable} */ + private final TableIterator tableIt; + + /** Limit on the number of rows to read. Over this limit, an "overflow" event occurs and {@link #overflow} is set to TRUE. */ + private final long maxrec; + + /** Indicates whether the maximum allowed number of rows has already been read or not. When true, no more row can be read. */ + private boolean overflow; + + /** Last read row. If NULL, no row has been read or no more row is available. */ + private Object[] row = null; + + /** Number of rows read until now. */ + private int nbRows; + + /** + * Build this special {@link StarTable}. + * + * @param tableIt Data on which to iterate using this special {@link StarTable}. + * @param colInfos Information about all columns. + * @param maxrec Limit on the number of rows to read. (if negative, there will be no limit) + */ + LimitedStarTable(final TableIterator tableIt, final ColumnInfo[] colInfos, final long maxrec){ + this.tableIt = tableIt; + nbCol = colInfos.length; + columnInfos = colInfos; + this.maxrec = maxrec; + overflow = false; + } - datatype = datatype.trim().toLowerCase(); + /** + * Indicates whether the last row sequence dispensed by + * this table's getRowSequence method was truncated at maxrec rows. + * + * @return true if the last row sequence overflowed + */ + public boolean lastSequenceOverflowed(){ + return overflow; + } - if (datatype.equals("short")) - return "" + Short.MIN_VALUE; - else if (datatype.equals("int")) - return "" + Integer.MIN_VALUE; - else if (datatype.equals("long")) - return "" + Long.MIN_VALUE; - else - return null; + /** + * Get the number of rows that have been successfully read until now. + * + * @return Number of all read rows. + */ + public int getNbReadRows(){ + return nbRows; + } + + @Override + public int getColumnCount(){ + return nbCol; + } + + @Override + public ColumnInfo getColumnInfo(final int colInd){ + return columnInfos[colInd]; + } + + @Override + public long getRowCount(){ + return -1; + } + + @Override + public RowSequence getRowSequence() throws IOException{ + overflow = false; + row = new Object[nbCol]; + + return new RowSequence(){ + long irow = -1; + + @Override + public boolean next() throws IOException{ + irow++; + try{ + if (maxrec < 0 || irow < maxrec){ + boolean hasNext = tableIt.nextRow(); + if (hasNext){ + for(int i = 0; i < nbCol && tableIt.hasNextCol(); i++) + row[i] = tableIt.nextCol(); + nbRows++; + }else + row = null; + return hasNext; + }else{ + overflow = tableIt.nextRow(); + row = null; + return false; + } + }catch(DataReadException dre){ + if (dre.getCause() != null && dre.getCause() instanceof IOException) + throw (IOException)(dre.getCause()); + else + throw new IOException(dre); + } + } + + @Override + public Object[] getRow() throws IOException{ + return row; + } + + @Override + public Object getCell(int cellIndex) throws IOException{ + return row[cellIndex]; + } + + @Override + public void close() throws IOException{} + }; + } } } diff --git a/src/tap/log/DefaultTAPLog.java b/src/tap/log/DefaultTAPLog.java index 62b6971bcef008cfa401467d91059598b8cd2be1..2750feda89214748837ab064f6da3ee004674ab8 100644 --- a/src/tap/log/DefaultTAPLog.java +++ b/src/tap/log/DefaultTAPLog.java @@ -16,151 +16,162 @@ package tap.log; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.OutputStream; import java.io.PrintWriter; +import java.sql.SQLException; +import tap.TAPException; import tap.TAPExecutionReport; +import tap.TAPSyncJob; import tap.db.DBConnection; - -import tap.file.TAPFileManager; - -import tap.metadata.TAPMetadata; -import tap.metadata.TAPTable; - +import tap.parameters.TAPParameters; +import uws.UWSException; +import uws.service.file.UWSFileManager; import uws.service.log.DefaultUWSLog; /** * Default implementation of the {@link TAPLog} interface which lets logging any message about a TAP service. * - * @author Grégory Mantelet (CDS) - * @version 06/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (04/2015) * * @see DefaultUWSLog */ public class DefaultTAPLog extends DefaultUWSLog implements TAPLog { - public DefaultTAPLog(TAPFileManager fm){ + /** + *

    Builds a {@link TAPLog} which will use the given file + * manager to get the log output (see {@link UWSFileManager#getLogOutput(uws.service.log.UWSLog.LogLevel, String)}).

    + * + *

    note 1: This constructor is particularly useful if the way of managing log output may change in the given file manager. + * Indeed, the output may change in function of the type of message to log ({@link uws.service.log.UWSLog.LogLevel}).

    + * + *

    note 2 If no output can be found in the file manager the standard error output ({@link System#err}) + * will be chosen automatically for all log messages.

    + * + * @param fm A TAP file manager. + * + * @see DefaultUWSLog#DefaultUWSLog(UWSFileManager) + */ + public DefaultTAPLog(final UWSFileManager fm){ super(fm); } - public DefaultTAPLog(OutputStream output){ + /** + *

    Builds a {@link TAPLog} which will print all its + * messages into the given stream.

    + * + *

    note: the given output will be used whatever is the type of message to log ({@link uws.service.log.UWSLog.LogLevel}).

    + * + * @param output An output stream. + * + * @see DefaultUWSLog#DefaultUWSLog(OutputStream) + */ + public DefaultTAPLog(final OutputStream output){ super(output); } - public DefaultTAPLog(PrintWriter writer){ + /** + *

    Builds a {@link TAPLog} which will print all its + * messages into the given stream.

    + * + *

    note: the given output will be used whatever is the type of message to log ({@link uws.service.log.UWSLog.LogLevel}).

    + * + * @param writer A print writer. + * + * @see DefaultUWSLog#DefaultUWSLog(PrintWriter) + */ + public DefaultTAPLog(final PrintWriter writer){ super(writer); } - public void queryFinished(final TAPExecutionReport report){ - StringBuffer buffer = new StringBuffer("QUERY END FOR " + report.jobID + ""); - buffer.append(" - success ? ").append(report.success); - buffer.append(" - synchronous ? ").append(report.synchronous); - buffer.append(" - total duration = ").append(report.getTotalDuration()).append("ms"); - buffer.append(" => upload=").append(report.getUploadDuration()).append("ms"); - buffer.append(", parsing=").append(report.getParsingDuration()).append("ms"); - buffer.append(", translating=").append(report.getTranslationDuration()).append("ms"); - buffer.append(", execution=").append(report.getExecutionDuration()).append("ms"); - buffer.append(", formatting[").append(report.parameters.getFormat()).append("]=").append(report.getFormattingDuration()).append("ms"); - info(buffer.toString()); - } - - public void dbActivity(final String message){ - dbActivity(message, null); - } - - public void dbActivity(final String message, final Throwable t){ - String msgType = (t == null) ? "[INFO] " : "[ERROR] "; - log(DBConnection.LOG_TYPE_DB_ACTIVITY, ((message == null) ? null : (msgType + message)), t); - } - - public void dbInfo(final String message){ - dbActivity(message); - } - - public void dbError(final String message, final Throwable t){ - dbActivity(message, t); - } - - @Override - public void tapMetadataFetched(TAPMetadata metadata){ - dbActivity("TAP metadata fetched from the database !"); - } - - @Override - public void tapMetadataLoaded(TAPMetadata metadata){ - dbActivity("TAP metadata loaded into the database !"); - } - - @Override - public void connectionOpened(DBConnection connection, String dbName){ - //dbActivity("A connection has been opened to the database \""+dbName+"\" !"); - } - - @Override - public void connectionClosed(DBConnection connection){ - //dbActivity("A database connection has been closed !"); - } - - @Override - public void transactionStarted(final DBConnection connection){ - //dbActivity("A transaction has been started !"); - } - - @Override - public void transactionCancelled(final DBConnection connection){ - //dbActivity("A transaction has been cancelled !"); - } - - @Override - public void transactionEnded(final DBConnection connection){ - //dbActivity("A transaction has been ended/commited !"); - } - - @Override - public void schemaCreated(final DBConnection connection, String schema){ - dbActivity("CREATE SCHEMA \"" + schema + "\"\t" + connection.getID()); - } - - @Override - public void schemaDropped(final DBConnection connection, String schema){ - dbActivity("DROP SCHEMA \"" + schema + "\"\t" + connection.getID()); - } - - protected final String getFullDBName(final TAPTable table){ - return (table.getSchema() != null) ? (table.getSchema().getDBName() + ".") : ""; - } - @Override - public void tableCreated(final DBConnection connection, TAPTable table){ - dbActivity("CREATE TABLE \"" + getFullDBName(table) + "\" (ADQL name: \"" + table.getFullName() + "\")\t" + connection.getID()); + protected void printException(Throwable error, final PrintWriter out){ + if (error != null){ + if (error instanceof UWSException || error instanceof TAPException || error.getClass().getPackage().getName().startsWith("adql.")){ + if (error.getCause() != null) + printException(error.getCause(), out); + else{ + out.println("Caused by a " + error.getClass().getName() + " " + getExceptionOrigin(error)); + if (error.getMessage() != null) + out.println("\t" + error.getMessage()); + } + }else if (error instanceof SQLException){ + out.println("Caused by a " + error.getClass().getName() + " " + getExceptionOrigin(error)); + out.print("\t"); + do{ + out.println(error.getMessage()); + error = ((SQLException)error).getNextException(); + if (error != null) + out.print("\t=> "); + }while(error != null); + }else{ + out.print("Caused by a "); + error.printStackTrace(out); + } + } } @Override - public void tableDropped(final DBConnection connection, TAPTable table){ - dbActivity("DROP TABLE \"" + getFullDBName(table) + "\" (ADQL name: \"" + table.getFullName() + "\")\t" + connection.getID()); - } + public void logDB(LogLevel level, final DBConnection connection, final String event, final String message, final Throwable error){ + // If the type is missing: + if (level == null) + level = (error != null) ? LogLevel.ERROR : LogLevel.INFO; - @Override - public void rowsInserted(final DBConnection connection, TAPTable table, int nbInsertedRows){ - dbActivity("INSERT ROWS (" + ((nbInsertedRows > 0) ? nbInsertedRows : "???") + ") into \"" + getFullDBName(table) + "\" (ADQL name: \"" + table.getFullName() + "\")\t" + connection.getID()); - } + // Log or not? + if (!canLog(level)) + return; - @Override - public void sqlQueryExecuting(final DBConnection connection, String sql){ - dbActivity("EXECUTING SQL QUERY \t" + connection.getID() + "\n" + ((sql == null) ? "???" : sql.replaceAll("\n", " ").replaceAll("\t", " ").replaceAll("\r", ""))); - } + // log the main given error: + log(level, "DB", event, (connection != null ? connection.getID() : null), message, null, error); - @Override - public void sqlQueryError(final DBConnection connection, String sql, Throwable t){ - dbActivity("EXECUTION ERROR\t" + connection.getID(), t); + /* Some SQL exceptions (like BatchUpdateException) have a next exception which provides more information. + * Here, the stack trace of the next exception is also logged: + */ + if (error != null && error instanceof SQLException && ((SQLException)error).getNextException() != null){ + PrintWriter out = getOutput(level, "DB"); + out.println("[NEXT EXCEPTION]"); + ((SQLException)error).getNextException().printStackTrace(out); + out.flush(); + } } @Override - public void sqlQueryExecuted(final DBConnection connection, String sql){ - dbActivity("SUCCESSFULL END OF EXECUTION\t" + connection.getID()); + public void logTAP(LogLevel level, final Object obj, final String event, final String message, final Throwable error){ + // If the type is missing: + if (level == null) + level = (error != null) ? LogLevel.ERROR : LogLevel.INFO; + + // Log or not? + if (!canLog(level)) + return; + + // Get more information (when known event and available object): + String jobId = null, msgAppend = null; + try{ + if (event != null && obj != null){ + if (event.equals("SYNC_INIT")) + msgAppend = "QUERY=" + ((TAPParameters)obj).getQuery(); + else if (obj instanceof TAPSyncJob){ + log(level, "JOB", event, ((TAPSyncJob)obj).getID(), message, null, error); + return; + }else if (obj instanceof TAPExecutionReport){ + TAPExecutionReport report = (TAPExecutionReport)obj; + jobId = report.jobID; + msgAppend = (report.synchronous ? "SYNC" : "ASYNC") + ",duration=" + report.getTotalDuration() + "ms (upload=" + report.getUploadDuration() + ",parse=" + report.getParsingDuration() + ",exec=" + report.getExecutionDuration() + ",format[" + report.parameters.getFormat() + "]=" + report.getFormattingDuration() + ")"; + }else if (event.equalsIgnoreCase("WRITING_ERROR")) + jobId = obj.toString(); + } + }catch(Throwable t){ + error("Error while preparing a log message in logTAP(...)! The message will be logger but without additional information such as the job ID.", t); + } + + // Log the message: + log(level, "TAP", event, jobId, message, msgAppend, error); } } diff --git a/src/tap/log/TAPLog.java b/src/tap/log/TAPLog.java index 04439aa2297c4bdee7bb9f22e07f8aa8de2f5f86..288eac73ad82bbc1b2e2283dea5592d1e4a61039 100644 --- a/src/tap/log/TAPLog.java +++ b/src/tap/log/TAPLog.java @@ -16,59 +16,93 @@ package tap.log; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import tap.TAPExecutionReport; +import tap.TAPSyncJob; import tap.db.DBConnection; - -import tap.metadata.TAPMetadata; -import tap.metadata.TAPTable; - +import tap.parameters.TAPParameters; import uws.service.log.UWSLog; /** - * Lets logging any kind of message about a TAP service. + * Let log any kind of message about a TAP service. * - * @author Grégory Mantelet (CDS) - * @version 06/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (04/2015) */ public interface TAPLog extends UWSLog { - public void queryFinished(final TAPExecutionReport report); - - public void dbInfo(final String message); - - public void dbError(final String message, final Throwable t); - - public void tapMetadataFetched(final TAPMetadata metadata); - - public void tapMetadataLoaded(final TAPMetadata metadata); - - public void connectionOpened(final DBConnection connection, final String dbName); - - public void connectionClosed(final DBConnection connection); - - public void transactionStarted(final DBConnection connection); - - public void transactionCancelled(final DBConnection connection); - - public void transactionEnded(final DBConnection connection); - - public void schemaCreated(final DBConnection connection, final String schema); - - public void schemaDropped(final DBConnection connection, final String schema); - - public void tableCreated(final DBConnection connection, final TAPTable table); - - public void tableDropped(final DBConnection connection, final TAPTable table); - - public void rowsInserted(final DBConnection connection, final TAPTable table, final int nbInsertedRows); - - public void sqlQueryExecuting(final DBConnection connection, final String sql); - - public void sqlQueryError(final DBConnection connection, final String sql, final Throwable t); - - public void sqlQueryExecuted(final DBConnection connection, final String sql); + /** + *

    Log a message and/or an error in the DB (database) context.

    + * + *

    List of all events sent by the library (case sensitive):

    + *
      + *
    • CONNECTION_LACK
    • + *
    • TRANSLATE
    • + *
    • EXECUTE
    • + *
    • RESULT
    • + *
    • LOAD_TAP_SCHEMA
    • + *
    • CLEAN_TAP_SCHEMA
    • + *
    • CREATE_TAP_SCHEMA
    • + *
    • TABLE_EXIST
    • + *
    • COLUMN_EXIST
    • + *
    • EXEC_UPDATE
    • + *
    • ADD_UPLOAD_TABLE
    • + *
    • DROP_UPLOAD_TABLE
    • + *
    • START_TRANSACTION
    • + *
    • COMMIT
    • + *
    • ROLLBACK
    • + *
    • END_TRANSACTION
    • + *
    • CLOSE
    • + *
    + * + * @param level Level of the log (info, warning, error, ...). SHOULD NOT be NULL, but if NULL anyway, the level SHOULD be considered as INFO + * @param connection DB connection from which this log comes. MAY be NULL + * @param event Event at the origin of this log or action executed by the given database connection while this log is sent. MAY be NULL + * @param message Message to log. MAY be NULL + * @param error Error/Exception to log. MAY be NULL + * + * @since 2.0 + */ + public void logDB(final LogLevel level, final DBConnection connection, final String event, final String message, final Throwable error); + + /** + *

    Log a message and/or an error in the general context of TAP.

    + * + *

    + * One of the parameter is of type {@link Object}. This object can be used to provide more information to the log function + * in order to describe as much as possible the state and/or result event. + *

    + * + *

    List of all events sent by the library (case sensitive):

    + *
      + *
    • IDENT_USER (with a NULL "obj")
    • + *
    • SYNC_INIT (with "obj" as an instance of {@link TAPParameters})
    • + *
    • ASYNC_INIT (with a NULL "obj")
    • + *
    • START (with "obj" as an instance of {@link TAPSyncJob})
    • + *
    • UPLOADING (with "obj" as an instance of {@link TAPExecutionReport})
    • + *
    • PARSING (with "obj" as an instance of {@link TAPExecutionReport})
    • + *
    • START_DB_EXECUTION (with "obj" as an instance of {@link TAPExecutionReport})
    • + *
    • WRITING_RESULT (with "obj" as an instance of {@link TAPExecutionReport})
    • + *
    • RESULT_WRITTEN (with "obj" as an instance of {@link TAPExecutionReport})
    • + *
    • START_STEP (with "obj" as an instance of {@link TAPExecutionReport})
    • + *
    • END_EXEC (with "obj" as an instance of {@link TAPExecutionReport})
    • + *
    • END_DB_EXECUTION (with "obj" as an instance of {@link TAPExecutionReport})
    • + *
    • DROP_UPLOAD (with "obj" as an instance of {@link TAPExecutionReport})
    • + *
    • TIME_OUT (with "obj" as an instance of {@link TAPSyncJob})
    • + *
    • END (with "obj" as an instance of {@link TAPSyncJob})
    • + *
    + * + * @param level Level of the log (info, warning, error, ...). SHOULD NOT be NULL, but if NULL anyway, the level SHOULD be considered as INFO + * @param obj Object providing more information about the event/object at the origin of this log. MAY be NULL + * @param event Event at the origin of this log or action currently executed by TAP while this log is sent. MAY be NULL + * @param message Message to log. MAY be NULL + * @param error Error/Exception to log. MAY be NULL + * + * @since 2.0 + */ + public void logTAP(final LogLevel level, final Object obj, final String event, final String message, final Throwable error); } diff --git a/src/tap/metadata/TAPColumn.java b/src/tap/metadata/TAPColumn.java index 94b69da94ddb6547dcac454a1043b132874e5b50..30d4d1d7492838bef1c1d3ea472b88d96bf0bd84 100644 --- a/src/tap/metadata/TAPColumn.java +++ b/src/tap/metadata/TAPColumn.java @@ -16,49 +16,142 @@ package tap.metadata; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ +import java.awt.List; import java.util.ArrayList; import java.util.Iterator; +import java.util.Map; import adql.db.DBColumn; import adql.db.DBTable; +import adql.db.DBType; +import adql.db.DBType.DBDatatype; +/** + *

    Represent a column as described by the IVOA standard in the TAP protocol definition.

    + * + *

    + * This object representation has exactly the same fields as the column of the table TAP_SCHEMA.columns. + * But it also provides a way to add other data. For instance, if information not listed in the standard + * may be stored here, they can be using the function {@link #setOtherData(Object)}. This object can be + * a single value (integer, string, ...), but also a {@link Map}, {@link List}, etc... + *

    + * + *

    Important note: + * A {@link TAPColumn} object MUST always have a DB name. That's why by default, at the creation + * the DB name is the ADQL name. Once created, it is possible to set the DB name with {@link #setDBName(String)}. + * This DB name MUST be UNqualified and without double quotes. If a NULL or empty value is provided, + * nothing is done and the object keeps its former DB name. + *

    + * + *

    Set a table

    + * + *

    + * By default a column is detached (not part of a table). To specify the table in which this column is, + * you must use {@link TAPTable#addColumn(TAPColumn)}. By doing this, the table link inside this column + * will be set automatically and you will be able to get the table with {@link #getTable()}. + *

    + * + *

    Foreign keys

    + * + *

    + * In case this column is linked to one or several of other tables, it will be possible to list all + * foreign keys where the target columns is with {@link #getTargets()}. In the same way, it will be + * possible to list all foreign keys in which this column is a target with {@link #getSources()}. + * However, in order to ensure the consistency between all metadata, these foreign key's links are + * set at the table level by the table itself using {@link #addSource(TAPForeignKey)} and + * {@link #addTarget(TAPForeignKey)}. + *

    + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (02/2015) + */ public class TAPColumn implements DBColumn { + /** Name that this column MUST have in ADQL queries. */ private final String adqlName; + /** Name that this column have in the database. + * Note: It CAN NOT be NULL. By default, it is the ADQL name. */ private String dbName = null; + /** Table which owns this column. + * Note: It should be NULL only at the construction or for a quick representation of a column. + * Then, this attribute is automatically set by a {@link TAPTable} when adding this column inside it + * with {@link TAPTable#addColumn(TAPColumn)}. */ private DBTable table = null; + /** Description of this column. + * Note: Standard TAP column field ; MAY be NULL. */ private String description = null; + /** Unit of this column's values. + * Note: Standard TAP column field ; MAY be NULL. */ private String unit = null; + /** UCD describing the scientific content of this column. + * Note: Standard TAP column field ; MAY be NULL. */ private String ucd = null; + /** UType associating this column with a data-model. + * Note: Standard TAP column field ; MAY be NULL. */ private String utype = null; - private String datatype = null; - - private int size = TAPTypes.NO_SIZE; - - private VotType votType = null; + /** Type of this column. + * Note: Standard TAP column field ; CAN'T be NULL. */ + private DBType datatype = new DBType(DBDatatype.VARCHAR); + /** Flag indicating whether this column is one of those that should be returned by default. + * Note: Standard TAP column field ; FALSE by default. */ private boolean principal = false; + /** Flag indicating whether this column is indexed in the database. + * Note: Standard TAP column field ; FALSE by default. */ private boolean indexed = false; + /** Flag indicating whether this column can be set to NULL in the database. + * Note: Standard TAP column field ; FALSE by default. + * @since 2.0 */ + private boolean nullable = false; + + /** Flag indicating whether this column is defined by a standard. + * Note: Standard TAP column field ; FALSE by default. */ private boolean std = false; + /** Let add some information in addition of the ones of the TAP protocol. + * Note: This object can be anything: an {@link Integer}, a {@link String}, a {@link Map}, a {@link List}, ... + * Its content is totally free and never used or checked. */ protected Object otherData = null; + /** List all foreign keys in which this column is a source. + *

    CAUTION: For consistency consideration, this attribute SHOULD never be modified! + * It is set by the constructor and filled ONLY by the table.

    */ protected final ArrayList lstTargets; + /** List all foreign keys in which this column is a target. + *

    CAUTION: For consistency consideration, this attribute SHOULD never be modified! + * It is set by the constructor and filled ONLY by the table.

    */ protected final ArrayList lstSources; + /** + *

    Build a VARCHAR {@link TAPColumn} instance with the given ADQL name.

    + * + *

    Note: + * The DB name is set by default with the ADQL name. To set the DB name, + * you MUST call then {@link #setDBName(String)}. + * The datatype is set by default to VARCHAR. + *

    + * + *

    Note: + * If the given ADQL name is prefixed (= it has some text separated by a '.' before the column name), + * this prefix will be removed. Only the part after the '.' character will be kept. + *

    + * + * @param columnName Name that this column MUST have in ADQL queries. CAN'T be NULL ; this name can never be changed after. + */ public TAPColumn(String columnName){ if (columnName == null || columnName.trim().length() == 0) throw new NullPointerException("Missing column name !"); @@ -67,28 +160,199 @@ public class TAPColumn implements DBColumn { dbName = adqlName; lstTargets = new ArrayList(1); lstSources = new ArrayList(1); - setDefaultType(); } - public TAPColumn(String columnName, String description){ + /** + *

    Build a {@link TAPColumn} instance with the given ADQL name and datatype.

    + * + *

    Note: + * The DB name is set by default with the ADQL name. To set the DB name, + * you MUST call then {@link #setDBName(String)}. + *

    + * + *

    Note: + * If the given ADQL name is prefixed (= it has some text separated by a '.' before the column name), + * this prefix will be removed. Only the part after the '.' character will be kept. + *

    + * + *

    Note: + * The datatype is set by calling the function {@link #setDatatype(DBType)} which does not do + * anything if the given datatype is NULL. + *

    + * + * @param columnName Name that this column MUST have in ADQL queries. CAN'T be NULL ; this name can never be changed after. + * @param type Datatype of this column. If NULL, VARCHAR will be the datatype of this column + * + * @see #setDatatype(DBType) + */ + public TAPColumn(String columnName, DBType type){ this(columnName); + setDatatype(type); + } + + /** + *

    Build a VARCHAR {@link TAPColumn} instance with the given ADQL name and description.

    + * + *

    Note: + * The DB name is set by default with the ADQL name. To set the DB name, + * you MUST call then {@link #setDBName(String)}. + *

    + * + *

    Note: + * If the given ADQL name is prefixed (= it has some text separated by a '.' before the column name), + * this prefix will be removed. Only the part after the '.' character will be kept. + *

    + * + * @param columnName Name that this column MUST have in ADQL queries. CAN'T be NULL ; this name can never be changed after. + * @param description Description of the column's content. May be NULL + */ + public TAPColumn(String columnName, String description){ + this(columnName, (DBType)null, description); + } + + /** + *

    Build a {@link TAPColumn} instance with the given ADQL name, datatype and description.

    + * + *

    Note: + * The DB name is set by default with the ADQL name. To set the DB name, + * you MUST call then {@link #setDBName(String)}. + *

    + * + *

    Note: + * If the given ADQL name is prefixed (= it has some text separated by a '.' before the column name), + * this prefix will be removed. Only the part after the '.' character will be kept. + *

    + * + *

    Note: + * The datatype is set by calling the function {@link #setDatatype(DBType)} which does do + * anything if the given datatype is NULL. + *

    + * + * @param columnName Name that this column MUST have in ADQL queries. CAN'T be NULL ; this name can never be changed after. + * @param type Datatype of this column. If NULL, VARCHAR will be the datatype of this column + * @param description Description of the column's content. May be NULL + */ + public TAPColumn(String columnName, DBType type, String description){ + this(columnName, type); this.description = description; } + /** + *

    Build a VARCHAR {@link TAPColumn} instance with the given ADQL name, description and unit.

    + * + *

    Note: + * The DB name is set by default with the ADQL name. To set the DB name, + * you MUST call then {@link #setDBName(String)}. + *

    + * + *

    Note: + * If the given ADQL name is prefixed (= it has some text separated by a '.' before the column name), + * this prefix will be removed. Only the part after the '.' character will be kept. + *

    + * + * @param columnName Name that this column MUST have in ADQL queries. CAN'T be NULL ; this name can never be changed after. + * @param description Description of the column's content. May be NULL + * @param unit Unit of the column's values. May be NULL + */ public TAPColumn(String columnName, String description, String unit){ - this(columnName, description); + this(columnName, null, description, unit); + } + + /** + *

    Build a {@link TAPColumn} instance with the given ADQL name, type, description and unit.

    + * + *

    Note: + * The DB name is set by default with the ADQL name. To set the DB name, + * you MUST call then {@link #setDBName(String)}. + *

    + * + *

    Note: + * If the given ADQL name is prefixed (= it has some text separated by a '.' before the column name), + * this prefix will be removed. Only the part after the '.' character will be kept. + *

    + * + *

    Note: + * The datatype is set by calling the function {@link #setDatatype(DBType)} which does do + * anything if the given datatype is NULL. + *

    + * + * @param columnName Name that this column MUST have in ADQL queries. CAN'T be NULL ; this name can never be changed after. + * @param type Datatype of this column. If NULL, VARCHAR will be the datatype of this column + * @param description Description of the column's content. May be NULL + * @param unit Unit of the column's values. May be NULL + */ + public TAPColumn(String columnName, DBType type, String description, String unit){ + this(columnName, type, description); this.unit = unit; } + /** + *

    Build a VARCHAR {@link TAPColumn} instance with the given fields.

    + * + *

    Note: + * The DB name is set by default with the ADQL name. To set the DB name, + * you MUST call then {@link #setDBName(String)}. + *

    + * + *

    Note: + * If the given ADQL name is prefixed (= it has some text separated by a '.' before the column name), + * this prefix will be removed. Only the part after the '.' character will be kept. + *

    + * + *

    Note: + * The datatype is set by calling the function {@link #setDatatype(DBType)} which does do + * anything if the given datatype is NULL. + *

    + * + * @param columnName Name that this column MUST have in ADQL queries. CAN'T be NULL ; this name can never be changed after. + * @param description Description of the column's content. May be NULL + * @param unit Unit of the column's values. May be NULL + * @param ucd UCD describing the scientific content of this column. + * @param utype UType associating this column with a data-model. + */ public TAPColumn(String columnName, String description, String unit, String ucd, String utype){ - this(columnName, description, unit); + this(columnName, null, description, unit, ucd, utype); + } + + /** + *

    Build a {@link TAPColumn} instance with the given fields.

    + * + *

    Note: + * The DB name is set by default with the ADQL name. To set the DB name, + * you MUST call then {@link #setDBName(String)}. + *

    + * + *

    Note: + * If the given ADQL name is prefixed (= it has some text separated by a '.' before the column name), + * this prefix will be removed. Only the part after the '.' character will be kept. + *

    + * + *

    Note: + * The datatype is set by calling the function {@link #setDatatype(DBType)} which does do + * anything if the given datatype is NULL. + *

    + * + * @param columnName Name that this column MUST have in ADQL queries. CAN'T be NULL ; this name can never be changed after. + * @param type Datatype of this column. If NULL, VARCHAR will be the datatype of this column + * @param description Description of the column's content. May be NULL + * @param unit Unit of the column's values. May be NULL + * @param ucd UCD describing the scientific content of this column. + * @param utype UType associating this column with a data-model. + */ + public TAPColumn(String columnName, DBType type, String description, String unit, String ucd, String utype){ + this(columnName, type, description, unit); this.ucd = ucd; this.utype = utype; } /** - * @return The name. + * Get the ADQL name (the name this column MUST have in ADQL queries). + * + * @return Its ADQL name. + * @see #getADQLName() + * @deprecated Does not do anything special: just call {@link #getADQLName()}. */ + @Deprecated public final String getName(){ return getADQLName(); } @@ -103,255 +367,397 @@ public class TAPColumn implements DBColumn { return dbName; } + /** + *

    Change the name that this column MUST have in the database (i.e. in SQL queries).

    + * + *

    Note: + * If the given value is NULL or an empty string, nothing is done ; the DB name keeps is former value. + *

    + * + * @param name The new database name of this column. + */ public final void setDBName(String name){ name = (name != null) ? name.trim() : name; - dbName = (name == null || name.length() == 0) ? adqlName : name; + if (name != null && name.length() > 0) + dbName = name; } - /** - * @return The table. - */ + @Override public final DBTable getTable(){ return table; } /** - * @param table The table to set. + *

    Set the table in which this column is.

    + * + *

    Warning: + * For consistency reasons, this function SHOULD be called only by the {@link TAPTable} + * that owns this column. + *

    + * + *

    Important note: + * If this column was already linked with another {@link TAPTable} object, the previous link is removed + * here, but also in the table (by calling {@link TAPTable#removeColumn(String)}). + *

    + * + * @param table The table that owns this column. */ - public final void setTable(DBTable table){ + protected final void setTable(final DBTable table){ + if (this.table != null && this.table instanceof TAPTable && (table == null || !table.equals(this.table))) + ((TAPTable)this.table).removeColumn(adqlName); this.table = table; } /** - * @return The description. + * Get the description of this column. + * + * @return Its description. MAY be NULL */ public final String getDescription(){ return description; } /** - * @param description The description to set. + * Set the description of this column. + * + * @param description Its new description. MAY be NULL */ public final void setDescription(String description){ this.description = description; } /** - * @return The unit. + * Get the unit of the column's values. + * + * @return Its unit. MAY be NULL */ public final String getUnit(){ return unit; } /** - * @param unit The unit to set. + * Set the unit of the column's values. + * + * @param unit Its new unit. MAY be NULL */ public final void setUnit(String unit){ this.unit = unit; } /** - * @return The ucd. + * Get the UCD describing the scientific content of this column. + * + * @return Its UCD. MAY be NULL */ public final String getUcd(){ return ucd; } /** - * @param ucd The ucd to set. + * Set the UCD describing the scientific content of this column. + * + * @param ucd Its new UCD. MAY be NULL */ public final void setUcd(String ucd){ this.ucd = ucd; } /** - * @return The utype. + * Get the UType associating this column with a data-model. + * + * @return Its UType. MAY be NULL */ public final String getUtype(){ return utype; } /** - * @param utype The utype to set. + * Set the UType associating this column with a data-model. + * + * @param utype Its new UType. MAY be NULL */ public final void setUtype(String utype){ this.utype = utype; } /** - * @return The datatype. + * Get the type of the column's values. + * + * @return Its datatype. CAN'T be NULL */ - public final String getDatatype(){ + @Override + public final DBType getDatatype(){ return datatype; } /** - * @return Array size (>0 or 2 special values: {@link TAPTypes#NO_SIZE} and {@link TAPTypes#STAR_SIZE}). - */ - public final int getArraySize(){ - return size; - } - - /** - *

    Sets the DB datatype, the size and uses these information to set the corresponding VOTable type.

    - * Important: - *
      - *
    • If the given datatype is not known according to {@link TAPTypes#getDBType(String)}, the datatype of this column is set to its default value (see {@link #setDefaultType()}),
    • - *
    • The VOTable type is set automatically thanks to {@link TAPTypes#getVotType(String, int)}.
    • - *
    + *

    Set the type of the column's values.

    * - * @param datatype The datatype to set. - * @param size Array size (>0 or 2 special values: {@link TAPTypes#NO_SIZE} and {@link TAPTypes#STAR_SIZE}). + *

    Note: + * The datatype won't be changed, if the given type is NULL. + *

    * - * @see TAPTypes#getDBType(VotType) - * @see TAPTypes#getVotType(String, int) - * @see #setDefaultType() - */ - public final void setDatatype(String datatype, int size){ - this.datatype = TAPTypes.getDBType(datatype); - this.size = (size <= 0 && size != TAPTypes.STAR_SIZE) ? TAPTypes.NO_SIZE : size; - - if (this.datatype == null) - setDefaultType(); - else - this.votType = TAPTypes.getVotType(this.datatype, this.size); - } - - /** - * @return The VOTable type to use. + * @param type Its new datatype. */ - public final VotType getVotType(){ - return votType; + public final void setDatatype(final DBType type){ + if (type != null) + datatype = type; } /** - *

    Sets the VOTable type and uses it to set the DB datatype and its size.

    - * Important: - *
      - *
    • If the given VOTable type is not known according to {@link TAPTypes#getDBType(VotType)}, the DB datatype of this column and its size are set to the default value (see {@link #setDefaultType()}).
    • - *
    - * - * @param type A full VOTable type (that's to say: datatype, arraysize and xtype). + * Tell whether this column is one of those returned by default. * - * @see TAPTypes#getDBType(VotType) - * @see #setDefaultType() - */ - public final void setVotType(final VotType type){ - this.votType = type; - this.datatype = TAPTypes.getDBType(type); - this.size = type.arraysize; - - if (this.datatype == null) - setDefaultType(); - } - - /** - * Sets the default DB datatype (VARCHAR) and its corresponding VOTable type (char , *). - */ - protected final void setDefaultType(){ - datatype = TAPTypes.VARCHAR; - size = TAPTypes.STAR_SIZE; - votType = TAPTypes.getVotType(datatype, size); - } - - /** - * @return The principal. + * @return true if this column should be returned by default, false otherwise. */ public final boolean isPrincipal(){ return principal; } /** - * @param principal The principal to set. + * Set whether this column should be one of those returned by default. + * + * @param principal true if this column should be returned by default, false otherwise. */ public final void setPrincipal(boolean principal){ this.principal = principal; } /** - * @return The indexed. + * Tell whether this column is indexed. + * + * @return true if this column is indexed, false otherwise. */ public final boolean isIndexed(){ return indexed; } /** - * @param indexed The indexed to set. + * Set whether this column is indexed or not. + * + * @param indexed true if this column is indexed, false otherwise. */ public final void setIndexed(boolean indexed){ this.indexed = indexed; } /** - * @return The std. + * Tell whether this column is nullable. + * + * @return true if this column is nullable, false otherwise. + * + * @since 2.0 + */ + public final boolean isNullable(){ + return nullable; + } + + /** + * Set whether this column is nullable or not. + * + * @param nullable true if this column is nullable, false otherwise. + * + * @since 2.0 + */ + public final void setNullable(boolean nullable){ + this.nullable = nullable; + } + + /** + * Tell whether this column is defined by a standard. + * + * @return true if this column is defined by a standard, false otherwise. */ public final boolean isStd(){ return std; } /** - * @param std The std to set. + * Set whether this column is defined by a standard. + * + * @param std true if this column is defined by a standard, false otherwise. */ public final void setStd(boolean std){ this.std = std; } + /** + *

    Get the other (piece of) information associated with this column.

    + * + *

    Note: + * By default, NULL is returned, but it may be any kind of value ({@link Integer}, + * {@link String}, {@link Map}, {@link List}, ...). + *

    + * + * @return The other (piece of) information. MAY be NULL + */ public Object getOtherData(){ return otherData; } + /** + * Set the other (piece of) information associated with this column. + * + * @param data Another information about this column. MAY be NULL + */ public void setOtherData(Object data){ otherData = data; } + /** + *

    Let add a foreign key in which this column is a source (= which is targeting another column).

    + * + *

    Note: + * Nothing is done if the given value is NULL. + *

    + * + *

    Warning: + * For consistency reasons, this function SHOULD be called only by the {@link TAPTable} + * that owns this column or that is part of the foreign key. + *

    + * + * @param key A foreign key. + */ protected void addTarget(TAPForeignKey key){ if (key != null) lstTargets.add(key); } - protected int getNbTargets(){ + /** + * Get the number of times this column is targeting another column. + * + * @return How many this column is source in a foreign key. + */ + public int getNbTargets(){ return lstTargets.size(); } - protected Iterator getTargets(){ + /** + * Get the list of foreign keys in which this column is a source (= is targeting another column). + * + * @return List of foreign keys in which this column is a source. + */ + public Iterator getTargets(){ return lstTargets.iterator(); } + /** + *

    Remove the fact that this column is a source (= is targeting another column) + * in the given foreign key.

    + * + *

    Note: + * Nothing is done if the given value is NULL. + *

    + * + *

    Warning: + * For consistency reasons, this function SHOULD be called only by the {@link TAPTable} + * that owns this column or that is part of the foreign key. + *

    + * + * @param key Foreign key in which this column was targeting another column. + */ protected void removeTarget(TAPForeignKey key){ - lstTargets.remove(key); + if (key != null) + lstTargets.remove(key); } + /** + *

    Remove the fact that this column is a source (= is targeting another column) + * in any foreign key in which it was.

    + * + *

    Warning: + * For consistency reasons, this function SHOULD be called only by the {@link TAPTable} + * that owns this column or that is part of the foreign key. + *

    + */ protected void removeAllTargets(){ lstTargets.clear(); } + /** + *

    Let add a foreign key in which this column is a target (= which is targeted by another column).

    + * + *

    Note: + * Nothing is done if the given value is NULL. + *

    + * + *

    Warning: + * For consistency reasons, this function SHOULD be called only by the {@link TAPTable} + * that owns this column or that is part of the foreign key. + *

    + * + * @param key A foreign key. + */ protected void addSource(TAPForeignKey key){ if (key != null) lstSources.add(key); } - protected int getNbSources(){ + /** + * Get the number of times this column is targeted by another column. + * + * @return How many this column is target in a foreign key. + */ + public int getNbSources(){ return lstSources.size(); } - protected Iterator getSources(){ + /** + * Get the list of foreign keys in which this column is a target (= is targeted another column). + * + * @return List of foreign keys in which this column is a target. + */ + public Iterator getSources(){ return lstSources.iterator(); } + /** + *

    Remove the fact that this column is a target (= is targeted by another column) + * in the given foreign key.

    + * + *

    Note: + * Nothing is done if the given value is NULL. + *

    + * + *

    Warning: + * For consistency reasons, this function SHOULD be called only by the {@link TAPTable} + * that owns this column or that is part of the foreign key. + *

    + * + * @param key Foreign key in which this column was targeted by another column. + */ protected void removeSource(TAPForeignKey key){ lstSources.remove(key); } + /** + *

    Remove the fact that this column is a target (= is targeted by another column) + * in any foreign key in which it was.

    + * + *

    Warning: + * For consistency reasons, this function SHOULD be called only by the {@link TAPTable} + * that owns this column or that is part of the foreign key. + *

    + */ protected void removeAllSources(){ lstSources.clear(); } + /** + *

    Warning: + * Since the type of the other data is not known, the copy of its value + * can not be done properly. So, this column and its copy will share the same other data object. + * If it is also needed to make a deep copy of this other data object, this function MUST be + * overridden. + * + * + * @see adql.db.DBColumn#copy(java.lang.String, java.lang.String, adql.db.DBTable) + */ + @Override public DBColumn copy(final String dbName, final String adqlName, final DBTable dbTable){ - TAPColumn copy = new TAPColumn((adqlName == null) ? this.adqlName : adqlName, description, unit, ucd, utype); + TAPColumn copy = new TAPColumn((adqlName == null) ? this.adqlName : adqlName, datatype, description, unit, ucd, utype); copy.setDBName((dbName == null) ? this.dbName : dbName); copy.setTable(dbTable); - copy.setDatatype(datatype, size); copy.setIndexed(indexed); copy.setPrincipal(principal); copy.setStd(std); @@ -360,11 +766,22 @@ public class TAPColumn implements DBColumn { return copy; } + /** + *

    Provide a deep copy (included the other data) of this column.

    + * + *

    Warning: + * Since the type of the other data is not known, the copy of its value + * can not be done properly. So, this column and its copy will share the same other data object. + * If it is also needed to make a deep copy of this other data object, this function MUST be + * overridden. + * + * + * @return The deep copy of this column. + */ public DBColumn copy(){ - TAPColumn copy = new TAPColumn(adqlName, description, unit, ucd, utype); + TAPColumn copy = new TAPColumn(adqlName, datatype, description, unit, ucd, utype); copy.setDBName(dbName); copy.setTable(table); - copy.setDatatype(datatype, size); copy.setIndexed(indexed); copy.setPrincipal(principal); copy.setStd(std); @@ -378,7 +795,7 @@ public class TAPColumn implements DBColumn { return false; TAPColumn col = (TAPColumn)obj; - return col.getTable().equals(table) && col.getName().equals(adqlName); + return col.getTable().equals(table) && col.getADQLName().equals(adqlName); } @Override diff --git a/src/tap/metadata/TAPMetadata.java b/src/tap/metadata/TAPMetadata.java index 9dda1b99bbc6bae1d679ca65438c43795dcd49a3..580b966acbfacb1e985dd40daa99d7d960a633bb 100644 --- a/src/tap/metadata/TAPMetadata.java +++ b/src/tap/metadata/TAPMetadata.java @@ -16,47 +16,127 @@ package tap.metadata; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; import java.io.PrintWriter; - import java.util.ArrayList; -import java.util.HashMap; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.Map; import java.util.NoSuchElementException; import javax.servlet.ServletConfig; import javax.servlet.ServletException; - import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import adql.db.DBTable; +import tap.metadata.TAPTable.TableType; import tap.resource.Capabilities; import tap.resource.TAPResource; import tap.resource.VOSIResource; +import uk.ac.starlink.votable.VOSerializer; +import uws.ClientAbortException; +import uws.UWSToolBox; +import adql.db.DBTable; +import adql.db.DBType; +import adql.db.DBType.DBDatatype; +/** + *

    Let listing all schemas, tables and columns available in a TAP service. + * This list also corresponds to the TAP resource "/tables".

    + * + *

    + * Only schemas are stored in this object. So that's why only schemas can be added and removed + * from this class. However, {@link TAPSchema} objects are listing tables, whose the object + * representation is listing columns. So to add tables, you must first embed them in a schema. + *

    + * + *

    + * All metadata have two names: one to use in ADQL queries and the other to use when really querying + * the database. This is very useful to hide the real complexity of the database and propose + * a simpler view of the query-able data. It is particularly useful if a schema does not exist in the + * database but has been added in the TAP schema for more logical separation on the user point of view. + * In a such case, the schema would have an ADQL name but no DB name (NULL value ; which is possible only + * with {@link TAPSchema} objects). + *

    + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (03/2015) + */ public class TAPMetadata implements Iterable, VOSIResource, TAPResource { + /** Resource name of the TAP metadata. This name is also used - in this class - in the TAP URL to identify this resource. + * Here it corresponds to the following URI: ".../tables". */ public static final String RESOURCE_NAME = "tables"; + /** List of all schemas available through the TAP service. */ protected final Map schemas; + + /** Part of the TAP URI which identify this TAP resource. + * By default, it is the resource name ; so here, the corresponding TAP URI would be: "/tables". */ protected String accessURL = getName(); + /** + *

    Build an empty list of metadata.

    + * + *

    Note: + * By default, a TAP service must have at least a TAP_SCHEMA schema which contains a set of 5 tables + * (schemas, tables, columns, keys and key_columns). This schema is not created here by default + * because it can be customized by the service implementor. Besides, the DB name may be different. + * However, you can easily get this schema thanks to the function {@link #getStdSchema(boolean)} + * which returns the standard definition of this schema (including all tables and columns described + * by the standard). For a standard definition of this schema, you can then write the following: + *

    + *
    +	 * TAPMetadata meta = new TAPMetadata();
    +	 * meta.addSchema(TAPMetadata.getStdSchema());
    +	 * 
    + *

    + * Of course, this schema (and its tables and their columns) can be customized after if needed. + * Otherwise, if you want customize just some part of this schema, you can also use the function + * {@link #getStdTable(STDTable)} to get just the standard definition of some of its tables, either + * to customize them or to merely get them and keep them like they are. + *

    + */ public TAPMetadata(){ - schemas = new HashMap(); + schemas = new LinkedHashMap(); } + /** + *

    Add the given schema inside this TAP metadata set.

    + * + *

    Note: + * If the given schema is NULL, nothing will be done. + *

    + * + * @param s The schema to add. + */ public final void addSchema(TAPSchema s){ - if (s != null && s.getName() != null) - schemas.put(s.getName(), s); + if (s != null && s.getADQLName() != null) + schemas.put(s.getADQLName(), s); } + /** + *

    Build a new {@link TAPSchema} object with the given ADQL name. + * Then, add it inside this TAP metadata set.

    + * + *

    Note: + * The built {@link TAPSchema} object is returned, so that being modified afterwards if needed. + *

    + * + * @param schemaName ADQL name of the schema to create and add inside this TAP metadata set. + * + * @return The created and added schema, + * or NULL if the given schema is NULL or an empty string. + * + * @see TAPSchema#TAPSchema(String) + * @see #addSchema(TAPSchema) + */ public TAPSchema addSchema(String schemaName){ - if (schemaName == null) + if (schemaName == null || schemaName.trim().length() <= 0) return null; TAPSchema s = new TAPSchema(schemaName); @@ -64,6 +144,24 @@ public class TAPMetadata implements Iterable, VOSIResource, TAPResour return s; } + /** + *

    Build a new {@link TAPSchema} object with the given ADQL name. + * Then, add it inside this TAP metadata set.

    + * + *

    Note: + * The built {@link TAPSchema} object is returned, so that being modified afterwards if needed. + *

    + * + * @param schemaName ADQL name of the schema to create and add inside this TAP metadata set. + * @param description Description of the new schema. MAY be NULL + * @param utype UType associating the new schema with a data-model. MAY be NULL + * + * @return The created and added schema, + * or NULL if the given schema is NULL or an empty string. + * + * @see TAPSchema#TAPSchema(String, String, String) + * @see #addSchema(TAPSchema) + */ public TAPSchema addSchema(String schemaName, String description, String utype){ if (schemaName == null) return null; @@ -73,6 +171,17 @@ public class TAPMetadata implements Iterable, VOSIResource, TAPResour return s; } + /** + *

    Tell whether there is a schema with the given ADQL name.

    + * + *

    Important note: + * This function is case sensitive! + *

    + * + * @param schemaName ADQL name of the schema whose the existence must be checked. + * + * @return true if a schema with the given ADQL name exists, false otherwise. + */ public final boolean hasSchema(String schemaName){ if (schemaName == null) return false; @@ -80,6 +189,18 @@ public class TAPMetadata implements Iterable, VOSIResource, TAPResour return schemas.containsKey(schemaName); } + /** + *

    Search for a schema having the given ADQL name.

    + * + *

    Important note: + * This function is case sensitive! + *

    + * + * @param schemaName ADQL name of the schema to search. + * + * @return The schema having the given ADQL name, + * or NULL if no such schema can be found. + */ public final TAPSchema getSchema(String schemaName){ if (schemaName == null) return null; @@ -87,14 +208,44 @@ public class TAPMetadata implements Iterable, VOSIResource, TAPResour return schemas.get(schemaName); } + /** + * Get the number of schemas contained in this TAP metadata set. + * + * @return Number of all schemas. + */ public final int getNbSchemas(){ return schemas.size(); } + /** + * Tell whether this TAP metadata set contains no schema. + * + * @return true if this TAP metadata set has no schema, + * false if it contains at least one schema. + */ public final boolean isEmpty(){ return schemas.isEmpty(); } + /** + *

    Remove the schema having the given ADQL name.

    + * + *

    Important note: + * This function is case sensitive! + *

    + * + *

    WARNING: + * If the goal of this function's call is to delete definitely the specified schema + * from the metadata, you SHOULD also call {@link TAPTable#removeAllForeignKeys()} on the + * removed table. Indeed, foreign keys of this table would still link the removed table + * with other tables AND columns of the whole metadata set. + *

    + * + * @param schemaName ADQL name of the schema to remove from this TAP metadata set. + * + * @return The removed schema, + * or NULL if no such schema can be found. + */ public final TAPSchema removeSchema(String schemaName){ if (schemaName == null) return null; @@ -102,6 +253,9 @@ public class TAPMetadata implements Iterable, VOSIResource, TAPResour return schemas.remove(schemaName); } + /** + * Remove all schemas of this metadata set. + */ public final void removeAllSchemas(){ schemas.clear(); } @@ -111,10 +265,27 @@ public class TAPMetadata implements Iterable, VOSIResource, TAPResour return schemas.values().iterator(); } + /** + * Get the list of all tables available in this TAP metadata set. + * + * @return An iterator over the list of all tables contained in this TAP metadata set. + */ public Iterator getTables(){ - return new TableIterator(this); + return new TAPTableIterator(this); } + /** + *

    Tell whether this TAP metadata set contains the specified table.

    + * + *

    Note: + * This function is case sensitive! + *

    + * + * @param schemaName ADQL name of the schema owning the table to search. + * @param tableName ADQL name of the table to search. + * + * @return true if the specified table exists, false otherwise. + */ public boolean hasTable(String schemaName, String tableName){ TAPSchema s = getSchema(schemaName); if (s != null) @@ -123,6 +294,17 @@ public class TAPMetadata implements Iterable, VOSIResource, TAPResour return false; } + /** + *

    Tell whether this TAP metadata set contains a table with the given ADQL name, whatever is its schema.

    + * + *

    Note: + * This function is case sensitive! + *

    + * + * @param tableName ADQL name of the table to search. + * + * @return true if the specified table exists, false otherwise. + */ public boolean hasTable(String tableName){ for(TAPSchema s : this) if (s.hasTable(tableName)) @@ -130,7 +312,19 @@ public class TAPMetadata implements Iterable, VOSIResource, TAPResour return false; } - // @Override + /** + *

    Search for the specified table in this TAP metadata set.

    + * + *

    Note: + * This function is case sensitive! + *

    + * + * @param schemaName ADQL name of the schema owning the table to search. + * @param tableName ADQL name of the table to search. + * + * @return The table which has the given ADQL name and which is inside the specified schema, + * or NULL if no such table can be found. + */ public TAPTable getTable(String schemaName, String tableName){ TAPSchema s = getSchema(schemaName); if (s != null) @@ -139,7 +333,19 @@ public class TAPMetadata implements Iterable, VOSIResource, TAPResour return null; } - // @Override + /** + *

    Search in this TAP metadata set for all tables whose the ADQL name matches the given one, + * whatever is their schema.

    + * + *

    Note: + * This function is case sensitive! + *

    + * + * @param tableName ADQL name of the tables to search. + * + * @return A list of all the tables which have the given ADQL name, + * or an empty list if no such table can be found. + */ public ArrayList getTable(String tableName){ ArrayList tables = new ArrayList(); for(TAPSchema s : this) @@ -148,6 +354,11 @@ public class TAPMetadata implements Iterable, VOSIResource, TAPResour return tables; } + /** + * Get the number of all tables contained in this TAP metadata set. + * + * @return Number of all its tables. + */ public int getNbTables(){ int nbTables = 0; for(TAPSchema s : this) @@ -155,11 +366,17 @@ public class TAPMetadata implements Iterable, VOSIResource, TAPResour return nbTables; } - public static class TableIterator implements Iterator { + /** + * Let iterating over the list of all tables contained in a given {@link TAPMetadata} object. + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (08/2014) + */ + protected static class TAPTableIterator implements Iterator { private Iterator it; private Iterator itTables; - public TableIterator(TAPMetadata tapSchema){ + public TAPTableIterator(TAPMetadata tapSchema){ it = tapSchema.iterator(); if (it.hasNext()) @@ -231,122 +448,269 @@ public class TAPMetadata implements Iterable, VOSIResource, TAPResour } @Override - public void init(ServletConfig config) throws ServletException{ - ; - } + public void init(ServletConfig config) throws ServletException{} @Override - public void destroy(){ - ; - } + public void destroy(){} @Override - public boolean executeResource(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException{ + public boolean executeResource(HttpServletRequest request, HttpServletResponse response) throws IOException{ response.setContentType("application/xml"); + response.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); PrintWriter writer = response.getWriter(); + write(writer); + return false; + } + + /** + * Format in XML this whole metadata set and write it in the given writer. + * + * @param writer Stream in which the XML representation of this metadata must be written. + * + * @throws IOException If there is any error while writing the XML in the given writer. + * + * @since 2.0 + */ + public void write(final PrintWriter writer) throws IOException{ writer.println(""); - // TODO Change the xsi:schemaLocation attribute with a CDS URL ! - //writer.println(""); - writer.println(""); + /* TODO The XSD schema for VOSITables should be fixed soon! This schema should be changed here before the library is released! + * Note: the XSD schema at http://www.ivoa.net/xml/VOSITables/v1.0 contains an incorrect targetNamespace ("http://www.ivoa.net/xml/VOSICapabilities/v1.0"). + * In order to make this XML document valid, a custom location toward a correct XSD schema is used: http://vo.ari.uni-heidelberg.de/docs/schemata/VOSITables-v1.0.xsd */ + writer.println(""); for(TAPSchema s : schemas.values()) writeSchema(s, writer); - writer.println(""); + writer.println(""); - writer.flush(); - - return false; + UWSToolBox.flush(writer); } + /** + *

    Format in XML the given schema and then write it in the given writer.

    + * + *

    Written lines:

    + *
    +	 * <schema>
    +	 * 	<name>...</name>
    +	 * 	<title>...</title>
    +	 * 	<description>...</description>
    +	 * 	<utype>...</utype>
    +	 * 		// call #writeTable(TAPTable, PrintWriter) for each table
    +	 * </schema>
    +	 * 
    + * + *

    Note: + * When NULL an attribute or a field is not written. Here this rule concerns: description and utype. + *

    + * + * @param s The schema to format and to write in XML. + * @param writer Output in which the XML serialization of the given schema must be written. + * + * @throws IOException If the connection with the HTTP client has been either canceled or closed for another reason. + * + * @see #writeTable(TAPTable, PrintWriter) + */ private void writeSchema(TAPSchema s, PrintWriter writer) throws IOException{ final String prefix = "\t\t"; writer.println("\t"); - writeAtt(prefix, "name", s.getName(), writer); - writeAtt(prefix, "description", s.getDescription(), writer); - writeAtt(prefix, "utype", s.getUtype(), writer); + writeAtt(prefix, "name", s.getADQLName(), false, writer); + writeAtt(prefix, "title", s.getTitle(), true, writer); + writeAtt(prefix, "description", s.getDescription(), true, writer); + writeAtt(prefix, "utype", s.getUtype(), true, writer); + + int nbColumns = 0; + for(TAPTable t : s){ + + // write each table: + nbColumns += writeTable(t, writer); + + // flush the PrintWriter buffer when at least 30 tables have been read: + /* Note: the buffer may have already been flushed before automatically, + * but this manual flush is also checking whether any error has occurred while writing the previous characters. + * If so, a ClientAbortException (extension of IOException) is thrown in order to interrupt the writing of the + * metadata and thus, in order to spare server resources (and particularly memory if the metadata set is large). */ + if (nbColumns / 30 > 1){ + UWSToolBox.flush(writer); + nbColumns = 0; + } - for(TAPTable t : s) - writeTable(t, writer); + } writer.println("\t"); + + if (nbColumns > 0) + UWSToolBox.flush(writer); } - private void writeTable(TAPTable t, PrintWriter writer) throws IOException{ + /** + *

    Format in XML the given table and then write it in the given writer.

    + * + *

    Written lines:

    + *
    +	 * <table type="...">
    +	 * 	<name>...</name>
    +	 * 	<title>...</title>
    +	 * 	<description>...</description>
    +	 * 	<utype>...</utype>
    +	 * 		// call #writeColumn(TAPColumn, PrintWriter) for each column
    +	 * 		// call #writeForeignKey(TAPForeignKey, PrintWriter) for each foreign key
    +	 * </table>
    +	 * 
    + * + *

    Note 1: + * When NULL an attribute or a field is not written. Here this rule concerns: description and utype. + *

    + * + *

    Note 2: + * The PrintWriter buffer is flushed all the 10 columns. At that moment the writer is checked for errors. + * If the error flag is set, a {@link ClientAbortException} is thrown in order to stop the metadata writing. + * This is particularly useful if the metadata data is pretty large. + *

    + * + * @param t The table to format and to write in XML. + * @param writer Output in which the XML serialization of the given table must be written. + * + * @return The total number of written columns. + */ + private int writeTable(TAPTable t, PrintWriter writer){ final String prefix = "\t\t\t"; - writer.print("\t\t"); + writer.print("\t\t"); - writeAtt(prefix, "name", t.getFullName(), writer); - writeAtt(prefix, "description", t.getDescription(), writer); - writeAtt(prefix, "utype", t.getUtype(), writer); + if (t.isInitiallyQualified()) + writeAtt(prefix, "name", t.getADQLSchemaName() + "." + t.getADQLName(), false, writer); + else + writeAtt(prefix, "name", t.getADQLName(), false, writer); + writeAtt(prefix, "title", t.getTitle(), true, writer); + writeAtt(prefix, "description", t.getDescription(), true, writer); + writeAtt(prefix, "utype", t.getUtype(), true, writer); + int nbCol = 0; Iterator itCols = t.getColumns(); - while(itCols.hasNext()) + while(itCols.hasNext()){ writeColumn(itCols.next(), writer); + nbCol++; + } Iterator itFK = t.getForeignKeys(); while(itFK.hasNext()) writeForeignKey(itFK.next(), writer); writer.println("\t\t
    "); + + return nbCol; } - private void writeColumn(TAPColumn c, PrintWriter writer) throws IOException{ + /** + *

    Format in XML the given column and then write it in the given writer.

    + * + *

    Written lines:

    + *
    +	 * <column std="true|false"> // the value of this field is TAPColumn#isStd()
    +	 * 	<name>...</name>
    +	 * 	<description>...</description>
    +	 * 	<unit>...</unit>
    +	 * 	<utype>...</utype>
    +	 * 	<ucd>...</ucd>
    +	 * 	<dataType xsi:type="vod:TAPType" size="...">...</dataType>
    +	 * 	<flag>indexed</flag> // if TAPColumn#isIndexed()
    +	 * 	<flag>primary</flag> // if TAPColumn#isPrincipal()
    +	 * </column>
    +	 * 
    + * + *

    Note: + * When NULL an attribute or a field is not written. Here this rule concerns: description, unit, utype, ucd and flags. + *

    + * + * @param c The column to format and to write in XML. + * @param writer Output in which the XML serialization of the given column must be written. + */ + private void writeColumn(TAPColumn c, PrintWriter writer){ final String prefix = "\t\t\t\t"; - writer.print("\t\t\t"); + writer.print("\t\t\t"); - writeAtt(prefix, "name", c.getName(), writer); - writeAtt(prefix, "description", c.getDescription(), writer); - writeAtt(prefix, "unit", c.getUnit(), writer); - writeAtt(prefix, "utype", c.getUtype(), writer); - writeAtt(prefix, "ucd", c.getUcd(), writer); + writeAtt(prefix, "name", c.getADQLName(), false, writer); + writeAtt(prefix, "description", c.getDescription(), true, writer); + writeAtt(prefix, "unit", c.getUnit(), true, writer); + writeAtt(prefix, "ucd", c.getUcd(), true, writer); + writeAtt(prefix, "utype", c.getUtype(), true, writer); if (c.getDatatype() != null){ writer.print(prefix); writer.print("= 0){ + if (c.getDatatype().length > 0){ writer.print(" size=\""); - writer.print(c.getArraySize()); + writer.print(c.getDatatype().length); writer.print("\""); } writer.print('>'); - writer.print(c.getDatatype().toUpperCase()); + writer.print(VOSerializer.formatText(c.getDatatype().type.toString().toUpperCase())); writer.println(""); } if (c.isIndexed()) - writeAtt(prefix, "flag", "indexed", writer); + writeAtt(prefix, "flag", "indexed", true, writer); if (c.isPrincipal()) - writeAtt(prefix, "flag", "primary", writer); + writeAtt(prefix, "flag", "primary", true, writer); + if (c.isNullable()) + writeAtt(prefix, "flag", "nullable", true, writer); writer.println("\t\t\t"); } - private void writeForeignKey(TAPForeignKey fk, PrintWriter writer) throws IOException{ + /** + *

    Format in XML the given foreign key and then write it in the given writer.

    + * + *

    Written lines:

    + *
    +	 * <foreignKey>
    +	 * 	<targetTable>...</targetTable>
    +	 * 	<description>...</description>
    +	 * 	<utype>...</utype>
    +	 * 	<fkColumn>
    +	 * 		<fromColumn>...</fromColumn>
    +	 * 		<targetColumn>...</targetColumn>
    +	 * 	</fkColumn>
    +	 * 	...
    +	 * </foreignKey>
    +	 * 
    + * + *

    Note: + * When NULL an attribute or a field is not written. Here this rule concerns: description and utype. + *

    + * + * @param fk The foreign key to format and to write in XML. + * @param writer Output in which the XML serialization of the given foreign key must be written. + */ + private void writeForeignKey(TAPForeignKey fk, PrintWriter writer){ final String prefix = "\t\t\t\t"; writer.println("\t\t\t"); - writeAtt(prefix, "targetTable", fk.getTargetTable().getFullName(), writer); - writeAtt(prefix, "description", fk.getDescription(), writer); - writeAtt(prefix, "utype", fk.getUtype(), writer); + writeAtt(prefix, "targetTable", fk.getTargetTable().getFullName(), false, writer); + writeAtt(prefix, "description", fk.getDescription(), true, writer); + writeAtt(prefix, "utype", fk.getUtype(), true, writer); final String prefix2 = prefix + "\t"; for(Map.Entry entry : fk){ writer.print(prefix); writer.println(""); - writeAtt(prefix2, "fromColumn", entry.getKey(), writer); - writeAtt(prefix2, "targetColumn", entry.getValue(), writer); + writeAtt(prefix2, "fromColumn", entry.getKey(), false, writer); + writeAtt(prefix2, "targetColumn", entry.getValue(), false, writer); writer.print(prefix); writer.println(""); } @@ -354,11 +718,205 @@ public class TAPMetadata implements Iterable, VOSIResource, TAPResour writer.println("\t\t\t"); } - private void writeAtt(String prefix, String attributeName, String attributeValue, PrintWriter writer) throws IOException{ - if (attributeValue != null){ + /** + * Write the specified metadata attribute as a simple XML node. + * + * @param prefix Prefix of the XML node. (generally, space characters) + * @param attributeName Name of the metadata attribute to write (= Name of the XML node). + * @param attributeValue Value of the metadata attribute (= Value of the XML node). + * @param isOptionalAttr true if the attribute to write is optional (in this case, if the value is NULL or an empty string, the whole attribute item won't be written), + * false otherwise (here, if the value is NULL or an empty string, the XML item will be written with an empty string as value). + * @param writer Output in which the XML node must be written. + */ + private void writeAtt(String prefix, String attributeName, String attributeValue, boolean isOptionalAttr, PrintWriter writer){ + if (attributeValue != null && attributeValue.trim().length() > 0){ StringBuffer xml = new StringBuffer(prefix); - xml.append('<').append(attributeName).append('>').append(attributeValue).append("'); + xml.append('<').append(attributeName).append('>').append(VOSerializer.formatText(attributeValue)).append("'); writer.println(xml.toString()); + }else if (!isOptionalAttr) + writer.println("<" + attributeName + ">"); + } + + /** + *

    + * Get the definition of the whole standard TAP_SCHEMA. Thus, all standard TAP_SCHEMA tables + * (with all their columns) are also included in this object. + *

    + * + *

    Note: + * This function create the {@link TAPSchema} and all its {@link TAPTable}s objects on the fly. + *

    + * + * @param isSchemaSupported false if the DB name must be prefixed by "TAP_SCHEMA_", true otherwise. + * + * @return The whole TAP_SCHEMA definition. + * + * @see STDSchema#TAPSCHEMA + * @see STDTable + * @see #getStdTable(STDTable) + * + * @since 2.0 + */ + public static final TAPSchema getStdSchema(final boolean isSchemaSupported){ + TAPSchema tap_schema = new TAPSchema(STDSchema.TAPSCHEMA.toString(), "Set of tables listing and describing the schemas, tables and columns published in this TAP service.", null); + if (!isSchemaSupported) + tap_schema.setDBName(null); + for(STDTable t : STDTable.values()){ + TAPTable table = getStdTable(t); + if (!isSchemaSupported) + table.setDBName(STDSchema.TAPSCHEMA.label + "_" + table.getADQLName()); + tap_schema.addTable(table); + } + return tap_schema; + } + + /** + *

    Get the definition of the specified standard TAP table.

    + * + *

    Important note: + * The returned table is not linked at all with a schema, on the contrary of {@link #getStdSchema(boolean)} which returns tables linked with the returned schema. + * So, you may have to linked this table to schema (by using {@link TAPSchema#addTable(TAPTable)}) whose the ADQL name is TAP_SCHEMA after calling this function. + *

    + * + *

    Note: + * This function create the {@link TAPTable} object on the fly. + *

    + * + * @param tableId ID of the TAP table to return. + * + * @return The corresponding table definition (with no schema). + * + * @since 2.0 + */ + public static final TAPTable getStdTable(final STDTable tableId){ + switch(tableId){ + + case SCHEMAS: + TAPTable schemas = new TAPTable(STDTable.SCHEMAS.toString(), TableType.table, "List of schemas published in this TAP service.", null); + schemas.setInitiallyQualifed(true); + schemas.addColumn("schema_name", new DBType(DBDatatype.VARCHAR), "schema name, possibly qualified", null, null, null, true, true, true); + schemas.addColumn("description", new DBType(DBDatatype.VARCHAR), "brief description of schema", null, null, null, false, false, true); + schemas.addColumn("utype", new DBType(DBDatatype.VARCHAR), "UTYPE if schema corresponds to a data model", null, null, null, false, false, true); + return schemas; + + case TABLES: + TAPTable tables = new TAPTable(STDTable.TABLES.toString(), TableType.table, "List of tables published in this TAP service.", null); + tables.setInitiallyQualifed(true); + tables.addColumn("schema_name", new DBType(DBDatatype.VARCHAR), "the schema name from TAP_SCHEMA.schemas", null, null, null, true, true, true); + tables.addColumn("table_name", new DBType(DBDatatype.VARCHAR), "table name as it should be used in queries", null, null, null, true, true, true); + tables.addColumn("table_type", new DBType(DBDatatype.VARCHAR), "one of: table, view", null, null, null, false, false, true); + tables.addColumn("description", new DBType(DBDatatype.VARCHAR), "brief description of table", null, null, null, false, false, true); + tables.addColumn("utype", new DBType(DBDatatype.VARCHAR), "UTYPE if table corresponds to a data model", null, null, null, false, false, true); + return tables; + + case COLUMNS: + TAPTable columns = new TAPTable(STDTable.COLUMNS.toString(), TableType.table, "List of columns of all tables listed in TAP_SCHEMA.TABLES and published in this TAP service.", null); + columns.setInitiallyQualifed(true); + columns.addColumn("table_name", new DBType(DBDatatype.VARCHAR), "table name from TAP_SCHEMA.tables", null, null, null, true, true, true); + columns.addColumn("column_name", new DBType(DBDatatype.VARCHAR), "column name", null, null, null, true, true, true); + columns.addColumn("description", new DBType(DBDatatype.VARCHAR), "brief description of column", null, null, null, false, false, true); + columns.addColumn("unit", new DBType(DBDatatype.VARCHAR), "unit in VO standard format", null, null, null, false, false, true); + columns.addColumn("ucd", new DBType(DBDatatype.VARCHAR), "UCD of column if any", null, null, null, false, false, true); + columns.addColumn("utype", new DBType(DBDatatype.VARCHAR), "UTYPE of column if any", null, null, null, false, false, true); + columns.addColumn("datatype", new DBType(DBDatatype.VARCHAR), "ADQL datatype as in section 2.5", null, null, null, false, false, true); + columns.addColumn("size", new DBType(DBDatatype.INTEGER), "length of variable length datatypes", null, null, null, false, false, true); + columns.addColumn("principal", new DBType(DBDatatype.INTEGER), "a principal column; 1 means true, 0 means false", null, null, null, false, false, true); + columns.addColumn("indexed", new DBType(DBDatatype.INTEGER), "an indexed column; 1 means true, 0 means false", null, null, null, false, false, true); + columns.addColumn("std", new DBType(DBDatatype.INTEGER), "a standard column; 1 means true, 0 means false", null, null, null, false, false, true); + return columns; + + case KEYS: + TAPTable keys = new TAPTable(STDTable.KEYS.toString(), TableType.table, "List all foreign keys but provides just the tables linked by the foreign key. To know which columns of these tables are linked, see in TAP_SCHEMA.key_columns using the key_id.", null); + keys.setInitiallyQualifed(true); + keys.addColumn("key_id", new DBType(DBDatatype.VARCHAR), "unique key identifier", null, null, null, true, true, true); + keys.addColumn("from_table", new DBType(DBDatatype.VARCHAR), "fully qualified table name", null, null, null, false, false, true); + keys.addColumn("target_table", new DBType(DBDatatype.VARCHAR), "fully qualified table name", null, null, null, false, false, true); + keys.addColumn("description", new DBType(DBDatatype.VARCHAR), "description of this key", null, null, null, false, false, true); + keys.addColumn("utype", new DBType(DBDatatype.VARCHAR), "utype of this key", null, null, null, false, false, true); + return keys; + + case KEY_COLUMNS: + TAPTable key_columns = new TAPTable(STDTable.KEY_COLUMNS.toString(), TableType.table, "List all foreign keys but provides just the columns linked by the foreign key. To know the table of these columns, see in TAP_SCHEMA.keys using the key_id.", null); + key_columns.setInitiallyQualifed(true); + key_columns.addColumn("key_id", new DBType(DBDatatype.VARCHAR), "unique key identifier", null, null, null, true, true, true); + key_columns.addColumn("from_column", new DBType(DBDatatype.VARCHAR), "key column name in the from_table", null, null, null, false, false, true); + key_columns.addColumn("target_column", new DBType(DBDatatype.VARCHAR), "key column name in the target_table", null, null, null, false, false, true); + return key_columns; + + default: + return null; + } + } + + /** + *

    Tell whether the given table name is a standard TAP table.

    + * + *

    Note: + * This function is case sensitive. Indeed TAP_SCHEMA tables are defined by the TAP standard by a given case. + * Thus, this case is expected here. + *

    + * + * @param tableName Unqualified table name. + * + * @return The corresponding {@link STDTable} or NULL if the given table is not part of the TAP standard. + * + * @since 2.0 + */ + public static final STDTable resolveStdTable(String tableName){ + if (tableName == null || tableName.trim().length() == 0) + return null; + + for(STDTable t : STDTable.values()){ + if (t.label.equals(tableName)) + return t; + } + + return null; + } + + /** + * Enumeration of all schemas defined in the TAP standard. + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (07/2014) + * @since 2.0 + */ + public enum STDSchema{ + TAPSCHEMA("TAP_SCHEMA"), UPLOADSCHEMA("TAP_UPLOAD"); + + /** Real name of the schema. */ + public final String label; + + private STDSchema(final String name){ + this.label = name; + } + + @Override + public String toString(){ + return label; + } + } + + /** + * Enumeration of all tables of TAP_SCHEMA. + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (07/2014) + * @since 2.0 + */ + public enum STDTable{ + SCHEMAS("schemas"), TABLES("tables"), COLUMNS("columns"), KEYS("keys"), KEY_COLUMNS("key_columns"); + + /** Real name of the table. */ + public final String label; + + private STDTable(final String name){ + this.label = name; + } + + @Override + public String toString(){ + return label; } } diff --git a/src/tap/metadata/TAPSchema.java b/src/tap/metadata/TAPSchema.java index faced37e9c861593c1888c473f06c631743dc2c3..182c421cf71a92f22adb39f972b2f79d0079b6db 100644 --- a/src/tap/metadata/TAPSchema.java +++ b/src/tap/metadata/TAPSchema.java @@ -16,37 +16,127 @@ package tap.metadata; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Astronomisches Rechen Institut (ARI) */ -import java.util.HashMap; +import java.awt.List; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.Map; +import tap.metadata.TAPTable.TableType; + +/** + *

    Represent a schema as described by the IVOA standard in the TAP protocol definition.

    + * + *

    + * This object representation has exactly the same fields as the column of the table TAP_SCHEMA.schemas. + * But it also provides a way to add other data. For instance, if information not listed in the standard + * may be stored here, they can be using the function {@link #setOtherData(Object)}. This object can be + * a single value (integer, string, ...), but also a {@link Map}, {@link List}, etc... + *

    + * + *

    Note: + * On the contrary to {@link TAPColumn} and {@link TAPTable}, a {@link TAPSchema} object MAY have no DB name. + * But by default, at the creation the DB name is the ADQL name. Once created, it is possible to set the DB + * name with {@link #setDBName(String)}. This DB name MAY be qualified, BUT MUST BE without double quotes. + *

    + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (02/2015) + */ public class TAPSchema implements Iterable { + /** Name that this schema MUST have in ADQL queries. */ private final String adqlName; + /** Name that this schema have in the database. + * Note: It MAY be NULL. By default, it is the ADQL name. */ private String dbName = null; + /** Descriptive, human-interpretable name of the schema. + * Note: Standard TAP schema field ; MAY be NULL. + * @since 2.0 */ + private String title = null; + + /** Description of this schema. + * Note: Standard TAP schema field ; MAY be NULL. */ private String description = null; + /** UType describing the scientific content of this schema. + * Note: Standard TAP schema field ; MAY be NULL. */ private String utype = null; + /** Let add some information in addition of the ones of the TAP protocol. + * Note: This object can be anything: an {@link Integer}, a {@link String}, a {@link Map}, a {@link List}, ... + * Its content is totally free and never used or checked. */ protected Object otherData = null; + /** List all tables contained inside this schema. */ protected final Map tables; + /** + *

    Build a {@link TAPSchema} instance with the given ADQL name.

    + * + *

    Note: + * The DB name is set by default with the ADQL name. To set the DB name, + * you MUST call then {@link #setDBName(String)}. + *

    + * + *

    Note: + * If the given ADQL name is prefixed (= it has some text separated by a '.' before the schema name), + * this prefix will be removed. Only the part after the '.' character will be kept. + *

    + * + * @param schemaName Name that this schema MUST have in ADQL queries. CAN'T be NULL ; this name can never be changed after. + */ public TAPSchema(String schemaName){ - adqlName = schemaName; + if (schemaName == null || schemaName.trim().length() == 0) + throw new NullPointerException("Missing schema name!"); + int indPrefix = schemaName.lastIndexOf('.'); + adqlName = (indPrefix >= 0) ? schemaName.substring(indPrefix + 1).trim() : schemaName.trim(); dbName = adqlName; - tables = new HashMap(); + tables = new LinkedHashMap(); } + /** + *

    Build a {@link TAPSchema} instance with the given ADQL name and description.

    + * + *

    Note: + * The DB name is set by default with the ADQL name. To set the DB name, + * you MUST call then {@link #setDBName(String)}. + *

    + * + *

    Note: + * If the given ADQL name is prefixed (= it has some text separated by a '.' before the schema name), + * this prefix will be removed. Only the part after the '.' character will be kept. + *

    + * + * @param schemaName Name that this schema MUST have in ADQL queries. CAN'T be NULL ; this name can never be changed after. + * @param description Description of this schema. MAY be NULL + */ public TAPSchema(String schemaName, String description){ this(schemaName, description, null); } + /** + *

    Build a {@link TAPSchema} instance with the given ADQL name, description and UType.

    + * + *

    Note: + * The DB name is set by default with the ADQL name. To set the DB name, + * you MUST call then {@link #setDBName(String)}. + *

    + * + *

    Note: + * If the given ADQL name is prefixed (= it has some text separated by a '.' before the schema name), + * this prefix will be removed. Only the part after the '.' character will be kept. + *

    + * + * @param schemaName Name that this schema MUST have in ADQL queries. CAN'T be NULL ; this name can never be changed after. + * @param description Description of this schema. MAY be NULL + * @param utype UType associating this schema with a data-model. MAY be NULL + */ public TAPSchema(String schemaName, String description, String utype){ this(schemaName); this.description = description; @@ -54,68 +144,165 @@ public class TAPSchema implements Iterable { } /** - * @return The name. + * Get the ADQL name (the name this schema MUST have in ADQL queries). + * + * @return Its ADQL name. + * @see #getADQLName() + * @deprecated Does not do anything special: just call {@link #getADQLName()}. */ + @Deprecated public final String getName(){ return getADQLName(); } + /** + * Get the name this schema MUST have in ADQL queries. + * + * @return Its ADQL name. CAN'T be NULL + */ public final String getADQLName(){ return adqlName; } + /** + * Get the name this schema MUST have in the database. + * + * @return Its DB name. MAY be NULL + */ public final String getDBName(){ return dbName; } + /** + * Set the name this schema MUST have in the database. + * + * @param name Its new DB name. MAY be NULL + */ public final void setDBName(String name){ name = (name != null) ? name.trim() : name; - dbName = (name == null || name.length() == 0) ? adqlName : name; + dbName = name; + } + + /** + * Get the title of this schema. + * + * @return Its title. MAY be NULL + * + * @since 2.0 + */ + public final String getTitle(){ + return title; + } + + /** + * Set the title of this schema. + * + * @param title Its new title. MAY be NULL + * + * @since 2.0 + */ + public final void setTitle(final String title){ + this.title = title; } /** - * @return The description. + * Get the description of this schema. + * + * @return Its description. MAY be NULL */ public final String getDescription(){ return description; } /** - * @param description The description to set. + * Set the description of this schema. + * + * @param description Its new description. MAY be NULL */ public final void setDescription(String description){ this.description = description; } /** - * @return The utype. + * Get the UType associating this schema with a data-model. + * + * @return Its UType. MAY be NULL */ public final String getUtype(){ return utype; } /** - * @param utype The utype to set. + * Set the UType associating this schema with a data-model. + * + * @param utype Its new UType. MAY be NULL */ public final void setUtype(String utype){ this.utype = utype; } + /** + *

    Get the other (piece of) information associated with this schema.

    + * + *

    Note: + * By default, NULL is returned, but it may be any kind of value ({@link Integer}, + * {@link String}, {@link Map}, {@link List}, ...). + *

    + * + * @return The other (piece of) information. MAY be NULL + */ public Object getOtherData(){ return otherData; } + /** + * Set the other (piece of) information associated with this schema. + * + * @param data Another information about this schema. MAY be NULL + */ public void setOtherData(Object data){ otherData = data; } + /** + *

    Add the given table inside this schema.

    + * + *

    Note: + * If the given table is NULL, nothing will be done. + *

    + * + *

    Important note: + * By adding the given table inside this schema, it + * will be linked with this schema using {@link TAPTable#setSchema(TAPSchema)}. + * In this function, if the table was already linked with another {@link TAPSchema}, + * the former link is removed using {@link TAPSchema#removeTable(String)}. + *

    + * + * @param newTable Table to add inside this schema. + */ public final void addTable(TAPTable newTable){ - if (newTable != null && newTable.getName() != null){ - tables.put(newTable.getName(), newTable); + if (newTable != null && newTable.getADQLName() != null){ + tables.put(newTable.getADQLName(), newTable); newTable.setSchema(this); } } + /** + *

    Build a {@link TAPTable} object whose the ADQL and DB name will the given one. + * Then, add this table inside this schema.

    + * + *

    Note: + * The built {@link TAPTable} object is returned, so that being modified afterwards if needed. + *

    + * + * @param tableName ADQL name (and indirectly also the DB name) of the table to create and add. + * + * @return The created and added {@link TAPTable} object, + * or NULL if the given name is NULL or an empty string. + * + * @see TAPTable#TAPTable(String) + * @see #addTable(TAPTable) + */ public TAPTable addTable(String tableName){ if (tableName == null) return null; @@ -125,15 +312,46 @@ public class TAPSchema implements Iterable { return t; } - public TAPTable addTable(String tableName, String tableType, String description, String utype){ + /** + *

    Build a {@link TAPTable} object whose the ADQL and DB name will the given one. + * Then, add this table inside this schema.

    + * + *

    Note: + * The built {@link TAPTable} object is returned, so that being modified afterwards if needed. + *

    + * + * @param tableName ADQL name (and indirectly also the DB name) of the table to create and add. + * @param tableType Type of the new table. If NULL, "table" will be the type of the created table. + * @param description Description of the new table. MAY be NULL + * @param utype UType associating the new column with a data-model. MAY be NULL + * + * @return The created and added {@link TAPTable} object, + * or NULL if the given name is NULL or an empty string. + * + * @see TAPTable#TAPTable(String, TableType, String, String) + * @see #addTable(TAPTable) + */ + public TAPTable addTable(String tableName, TableType tableType, String description, String utype){ if (tableName == null) return null; TAPTable t = new TAPTable(tableName, tableType, description, utype); addTable(t); + return t; } + /** + *

    Tell whether this schema contains a table having the given ADQL name.

    + * + *

    Important note: + * This function is case sensitive! + *

    + * + * @param tableName Name of the table whose the existence in this schema must be checked. + * + * @return true if a table with the given ADQL name exists, false otherwise. + */ public final boolean hasTable(String tableName){ if (tableName == null) return false; @@ -141,6 +359,18 @@ public class TAPSchema implements Iterable { return tables.containsKey(tableName); } + /** + *

    Search for a table having the given ADQL name.

    + * + *

    Important note: + * This function is case sensitive! + *

    + * + * @param tableName ADQL name of the table to search. + * + * @return The table having the given ADQL name, + * or NULL if no such table can be found. + */ public final TAPTable getTable(String tableName){ if (tableName == null) return null; @@ -148,33 +378,79 @@ public class TAPSchema implements Iterable { return tables.get(tableName); } + /** + * Get the number of all tables contained inside this schema. + * + * @return Number of its tables. + */ public final int getNbTables(){ return tables.size(); } + /** + * Tell whether this schema contains no table. + * + * @return true if this schema contains no table, + * false if it has at least one table. + */ public final boolean isEmpty(){ return tables.isEmpty(); } + /** + *

    Remove the table having the given ADQL name.

    + * + *

    Important note: + * This function is case sensitive! + *

    + * + *

    Note: + * If the specified table is removed, its schema link is also deleted. + *

    + * + *

    WARNING: + * If the goal of this function's call is to delete definitely the specified table + * from the metadata, you SHOULD also call {@link TAPTable#removeAllForeignKeys()}. + * Indeed, foreign keys of the table would still link the removed table with other tables + * AND columns of the whole metadata set. + *

    + * + * @param tableName ADQL name of the table to remove from this schema. + * + * @return The removed table, + * or NULL if no table with the given ADQL name can be found. + */ public final TAPTable removeTable(String tableName){ if (tableName == null) return null; TAPTable removedTable = tables.remove(tableName); - if (removedTable != null){ + if (removedTable != null) removedTable.setSchema(null); - removedTable.removeAllForeignKeys(); - } return removedTable; } + /** + *

    Remove all the tables contained inside this schema.

    + * + *

    Note: + * When a table is removed, its schema link is also deleted. + *

    + * + *

    CAUTION: + * If the goal of this function's call is to delete definitely all the tables of this schema + * from the metadata, you SHOULD also call {@link TAPTable#removeAllForeignKeys()} + * on all tables before calling this function. + * Indeed, foreign keys of the tables would still link the removed tables with other tables + * AND columns of the whole metadata set. + *

    + */ public final void removeAllTables(){ Iterator> it = tables.entrySet().iterator(); while(it.hasNext()){ Map.Entry entry = it.next(); it.remove(); entry.getValue().setSchema(null); - entry.getValue().removeAllForeignKeys(); } } diff --git a/src/tap/metadata/TAPTable.java b/src/tap/metadata/TAPTable.java index 92fd803c37dc8cfb8630355c59110830e43c5d56..f3030d0aea914e9b6340d2fec5407db949a5cf21 100644 --- a/src/tap/metadata/TAPTable.java +++ b/src/tap/metadata/TAPTable.java @@ -16,70 +16,209 @@ package tap.metadata; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Astronomisches Rechen Institut (ARI) */ +import java.awt.List; import java.util.ArrayList; import java.util.Collection; -import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; +import tap.TAPException; import adql.db.DBColumn; import adql.db.DBTable; +import adql.db.DBType; +/** + *

    Represent a table as described by the IVOA standard in the TAP protocol definition.

    + * + *

    + * This object representation has exactly the same fields as the column of the table TAP_SCHEMA.tables. + * But it also provides a way to add other data. For instance, if information not listed in the standard + * may be stored here, they can be using the function {@link #setOtherData(Object)}. This object can be + * a single value (integer, string, ...), but also a {@link Map}, {@link List}, etc... + *

    + * + *

    Important note: + * A {@link TAPTable} object MUST always have a DB name. That's why by default, at the creation + * the DB name is the ADQL name. Once created, it is possible to set the DB name with {@link #setDBName(String)}. + * This DB name MUST be UNqualified and without double quotes. If a NULL or empty value is provided, + * nothing is done and the object keeps its former DB name. + *

    + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (02/2015) + */ public class TAPTable implements DBTable { + /** + * Different types of table according to the TAP protocol. + * The default one should be "table". + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (08/2014) + * + * @since 2.0 + */ + public enum TableType{ + output, table, view; + } + + /** Name that this table MUST have in ADQL queries. */ private final String adqlName; + /**

    Indicate whether the ADQL name has been given at creation with a schema prefix or not.

    + *

    Note: This information is used only when writing TAP_SCHEMA.tables or when writing the output of the resource /tables.

    + * @since 2.0 */ + private boolean isInitiallyQualified; + + /** Name that this table have in the database. + * Note: It CAN'T be NULL. By default, it is the ADQL name. */ private String dbName = null; + /** The schema which owns this table. + * Note: It is NULL only at the construction. + * Then, this attribute is automatically set by a {@link TAPSchema} when adding this table inside it + * with {@link TAPSchema#addTable(TAPTable)}. */ private TAPSchema schema = null; - private String type = "table"; + /** Type of this table. + * Note: Standard TAP table field ; CAN NOT be NULL ; by default, it is "table". */ + private TableType type = TableType.table; + /** Descriptive, human-interpretable name of the table. + * Note: Standard TAP table field ; MAY be NULL. + * @since 2.0 */ + private String title = null; + + /** Description of this table. + * Note: Standard TAP table field ; MAY be NULL. */ private String description = null; + /** UType associating this table with a data-model. + * Note: Standard TAP table field ; MAY be NULL. */ private String utype = null; + /** List of columns composing this table. + * Note: all columns of this list are linked to this table from the moment they are added inside it. */ protected final Map columns; + /** List of all foreign keys linking this table to others. */ protected final ArrayList foreignKeys; + /** Let add some information in addition of the ones of the TAP protocol. + * Note: This object can be anything: an {@link Integer}, a {@link String}, a {@link Map}, a {@link List}, ... + * Its content is totally free and never used or checked. */ protected Object otherData = null; + /** + *

    Build a {@link TAPTable} instance with the given ADQL name.

    + * + *

    Note: + * The DB name is set by default with the ADQL name. To set the DB name, + * you MUST call then {@link #setDBName(String)}. + * The table type is set by default to "table". + *

    + * + *

    Note: + * If the given ADQL name is prefixed (= it has some text separated by a '.' before the table name), + * this prefix will be removed. Only the part after the '.' character will be kept. + *

    + * + * @param tableName Name that this table MUST have in ADQL queries. CAN'T be NULL ; this name can never be changed after. + */ public TAPTable(String tableName){ if (tableName == null || tableName.trim().length() == 0) throw new NullPointerException("Missing table name !"); int indPrefix = tableName.lastIndexOf('.'); adqlName = (indPrefix >= 0) ? tableName.substring(indPrefix + 1).trim() : tableName.trim(); + isInitiallyQualified = (indPrefix >= 0); dbName = adqlName; columns = new LinkedHashMap(); foreignKeys = new ArrayList(); } - public TAPTable(String tableName, String tableType){ + /** + *

    Build a {@link TAPTable} instance with the given ADQL name and table type.

    + * + *

    Note: + * The DB name is set by default with the ADQL name. To set the DB name, + * you MUST call then {@link #setDBName(String)}. + *

    + * + *

    Note: + * The table type is set by calling the function {@link #setType(TableType)} which does not do + * anything if the given table type is NULL. + *

    + * + * @param tableName Name that this table MUST have in ADQL queries. CAN'T be NULL ; this name can never be changed after. + * @param tableType Type of this table. If NULL, "table" will be the type of this table. + * + * @see #setType(TableType) + */ + public TAPTable(String tableName, TableType tableType){ this(tableName); - type = tableType; + setType(tableType); } - public TAPTable(String tableName, String tableType, String description, String utype){ + /** + *

    Build a {@link TAPTable} instance with the given ADQL name, table type, description and UType.

    + * + *

    Note: + * The DB name is set by default with the ADQL name. To set the DB name, + * you MUST call then {@link #setDBName(String)}. + *

    + * + *

    Note: + * The table type is set by calling the function {@link #setType(TableType)} which does not do + * anything if the given table type is NULL. + *

    + * + * @param tableName Name that this table MUST have in ADQL queries. CAN'T be NULL ; this name can never be changed after. + * @param tableType Type of this table. If NULL, "table" will be the type of this table. + * @param description Description of this table. MAY be NULL. + * @param utype UType associating this table with a data-model. MAY be NULL + * + * @see #setType(TableType) + */ + public TAPTable(String tableName, TableType tableType, String description, String utype){ this(tableName, tableType); this.description = description; this.utype = utype; } + /** + *

    Get the qualified name of this table.

    + * + *

    Warning: + * The part of the returned full name won't be double quoted! + *

    + * + *

    Note: + * If this table is not attached to a schema, this function will just return + * the ADQL name of this table. + *

    + * + * @return Qualified ADQL name of this table. + */ public final String getFullName(){ if (schema != null) - return schema.getName() + "." + adqlName; + return schema.getADQLName() + "." + adqlName; else return adqlName; } /** - * @return The name. + * Get the ADQL name (the name this table MUST have in ADQL queries). + * + * @return Its ADQL name. + * @see #getADQLName() + * @deprecated Does not do anything special: just call {@link #getADQLName()}. */ + @Deprecated public final String getName(){ return getADQLName(); } @@ -89,14 +228,56 @@ public class TAPTable implements DBTable { return adqlName; } + /** + *

    Tells whether the ADQL name of this table must be qualified in the "table_name" column of TAP_SCHEMA.tables + * and in the /schema/table/name field of the resource /tables.

    + * + *

    Note: this value is set automatically by the constructor: "true" if the table name was qualified, + * "false" otherwise. It can be changed with the function {@link #setInitiallyQualifed(boolean)}, BUT by doing so + * you may generate a mismatch between the table name of TAP_SCHEMA.tables and the one of /tables.

    + * + * @return true if the table name must be qualified in TAP_SCHEMA.tables and in /tables, false otherwise. + * + * @since 2.0 + */ + public final boolean isInitiallyQualified(){ + return isInitiallyQualified; + } + + /** + *

    Let specifying whether the table name must be qualified in TAP_SCHEMA.tables and in the resource /tables.

    + * + *

    WARNING: Calling this function may generate a mismatch between the table name of TAP_SCHEMA.tables and + * the one of the resource /tables. So, be sure to change this flag before setting the content of TAP_SCHEMA.tables + * using {@link tap.db.JDBCConnection#setTAPSchema(TAPMetadata)}.

    + * + * @param mustBeQualified true if the table name in TAP_SCHEMA.tables and in the resource /tables must be qualified by the schema name, + * false otherwise. + * + * @since 2.0 + */ + public final void setInitiallyQualifed(final boolean mustBeQualified){ + isInitiallyQualified = mustBeQualified; + } + @Override public final String getDBName(){ return dbName; } + /** + *

    Change the name that this table MUST have in the database (i.e. in SQL queries).

    + * + *

    Note: + * If the given value is NULL or an empty string, nothing is done ; the DB name keeps is former value. + *

    + * + * @param name The new database name of this table. + */ public final void setDBName(String name){ name = (name != null) ? name.trim() : name; - dbName = (name == null || name.length() == 0) ? adqlName : name; + if (name != null && name.length() > 0) + dbName = name; } @Override @@ -111,87 +292,189 @@ public class TAPTable implements DBTable { @Override public final String getADQLSchemaName(){ - return schema.getADQLName(); + return schema == null ? null : schema.getADQLName(); } @Override public final String getDBSchemaName(){ - return schema.getDBName(); + return schema == null ? null : schema.getDBName(); } /** - * @return The schema. + * Get the schema that owns this table. + * + * @return Its schema. MAY be NULL */ public final TAPSchema getSchema(){ return schema; } /** - * @param schema The schema to set. + *

    Set the schema in which this schema is.

    + * + *

    Warning: + * For consistency reasons, this function SHOULD be called only by the {@link TAPSchema} + * that owns this table. + *

    + * + *

    Important note: + * If this table was already linked with another {@link TAPSchema} object, the previous link is removed + * here, but also in the schema (by calling {@link TAPSchema#removeTable(String)}). + *

    + * + * @param schema The schema that owns this table. */ - protected final void setSchema(TAPSchema schema){ + protected final void setSchema(final TAPSchema schema){ + if (this.schema != null && (schema == null || !schema.equals(this.schema))) + this.schema.removeTable(adqlName); this.schema = schema; } /** - * @return The type. + * Get the type of this table. + * + * @return Its type. */ - public final String getType(){ + public final TableType getType(){ return type; } /** - * @param type The type to set. + *

    Set the type of this table.

    + * + *

    Note: + * If the given type is NULL, nothing will be done ; the type of this table won't be changed. + *

    + * + * @param type Its new type. */ - public final void setType(String type){ - this.type = type; + public final void setType(TableType type){ + if (type != null) + this.type = type; } /** - * @return The description. + * Get the title of this table. + * + * @return Its title. MAY be NULL + * + * @since 2.0 + */ + public final String getTitle(){ + return title; + } + + /** + * Set the title of this table. + * + * @param title Its new title. MAY be NULL + * + * @since 2.0 + */ + public final void setTitle(final String title){ + this.title = title; + } + + /** + * Get the description of this table. + * + * @return Its description. MAY be NULL */ public final String getDescription(){ return description; } /** - * @param description The description to set. + * Set the description of this table. + * + * @param description Its new description. MAY be NULL */ public final void setDescription(String description){ this.description = description; } /** - * @return The utype. + * Get the UType associating this table with a data-model. + * + * @return Its UType. MAY be NULL */ public final String getUtype(){ return utype; } /** - * @param utype The utype to set. + * Set the UType associating this table with a data-model. + * + * @param utype Its new UType. MAY be NULL */ public final void setUtype(String utype){ this.utype = utype; } + /** + *

    Get the other (piece of) information associated with this table.

    + * + *

    Note: + * By default, NULL is returned, but it may be any kind of value ({@link Integer}, + * {@link String}, {@link Map}, {@link List}, ...). + *

    + * + * @return The other (piece of) information. MAY be NULL + */ public Object getOtherData(){ return otherData; } + /** + * Set the other (piece of) information associated with this table. + * + * @param data Another information about this table. MAY be NULL + */ public void setOtherData(Object data){ otherData = data; } - public final void addColumn(TAPColumn newColumn){ - if (newColumn != null && newColumn.getName() != null){ - columns.put(newColumn.getName(), newColumn); + /** + *

    Add a column to this table.

    + * + *

    Note: + * If the given column is NULL, nothing will be done. + *

    + * + *

    Important note: + * By adding the given column inside this table, it + * will be linked with this table using {@link TAPColumn#setTable(DBTable)}. + * In this function, if the column was already linked with another {@link TAPTable}, + * the former link is removed using {@link TAPTable#removeColumn(String)}. + *

    + * + * @param newColumn Column to add inside this table. + */ + public final void addColumn(final TAPColumn newColumn){ + if (newColumn != null && newColumn.getADQLName() != null){ + columns.put(newColumn.getADQLName(), newColumn); newColumn.setTable(this); } } + /** + *

    Build a {@link TAPColumn} object whose the ADQL and DB name will the given one. + * Then, add this column inside this table.

    + * + *

    Note: + * The built {@link TAPColumn} object is returned, so that being modified afterwards if needed. + *

    + * + * @param columnName ADQL name (and indirectly also the DB name) of the column to create and add. + * + * @return The created and added {@link TAPColumn} object, + * or NULL if the given name is NULL or an empty string. + * + * @see TAPColumn#TAPColumn(String) + * @see #addColumn(TAPColumn) + */ public final TAPColumn addColumn(String columnName){ - if (columnName == null) + if (columnName == null || columnName.trim().length() <= 0) return null; TAPColumn c = new TAPColumn(columnName); @@ -199,34 +482,68 @@ public class TAPTable implements DBTable { return c; } - public TAPColumn addColumn(String columnName, String description, String unit, String ucd, String utype){ - if (columnName == null) - return null; - - TAPColumn c = new TAPColumn(columnName, description, unit, ucd, utype); - addColumn(c); - return c; - } - - public TAPColumn addColumn(String columnName, String description, String unit, String ucd, String utype, String datatype, int size, boolean principal, boolean indexed, boolean std){ - if (columnName == null) + /** + *

    Build a {@link TAPColumn} object whose the ADQL and DB name will the given one. + * Then, add this column inside this table.

    + * + *

    Note: + * The built {@link TAPColumn} object is returned, so that being modified afterwards if needed. + *

    + * + * @param columnName ADQL name (and indirectly also the DB name) of the column to create and add. + * @param datatype Type of the new column's values. If NULL, VARCHAR will be the type of the created column. + * @param description Description of the new column. MAY be NULL + * @param unit Unit of the new column's values. MAY be NULL + * @param ucd UCD describing the scientific content of the new column. MAY be NULL + * @param utype UType associating the new column with a data-model. MAY be NULL + * + * @return The created and added {@link TAPColumn} object, + * or NULL if the given name is NULL or an empty string. + * + * @see TAPColumn#TAPColumn(String, DBType, String, String, String, String) + * @see #addColumn(TAPColumn) + */ + public TAPColumn addColumn(String columnName, DBType datatype, String description, String unit, String ucd, String utype){ + if (columnName == null || columnName.trim().length() <= 0) return null; - TAPColumn c = new TAPColumn(columnName, description, unit, ucd, utype); - c.setDatatype(datatype, size); - c.setPrincipal(principal); - c.setIndexed(indexed); - c.setStd(std); + TAPColumn c = new TAPColumn(columnName, datatype, description, unit, ucd, utype); addColumn(c); return c; } - public TAPColumn addColumn(String columnName, String description, String unit, String ucd, String utype, VotType votType, boolean principal, boolean indexed, boolean std){ - if (columnName == null) + /** + *

    Build a {@link TAPColumn} object whose the ADQL and DB name will the given one. + * Then, add this column inside this table.

    + * + *

    Note: + * The built {@link TAPColumn} object is returned, so that being modified afterwards if needed. + *

    + * + * @param columnName ADQL name (and indirectly also the DB name) of the column to create and add. + * @param datatype Type of the new column's values. If NULL, VARCHAR will be the type of the created column. + * @param description Description of the new column. MAY be NULL + * @param unit Unit of the new column's values. MAY be NULL + * @param ucd UCD describing the scientific content of the new column. MAY be NULL + * @param utype UType associating the new column with a data-model. MAY be NULL + * @param principal true if the new column should be returned by default, false otherwise. + * @param indexed true if the new column is indexed, false otherwise. + * @param std true if the new column is defined by a standard, false otherwise. + * + * @return The created and added {@link TAPColumn} object, + * or NULL if the given name is NULL or an empty string. + * + * @see TAPColumn#TAPColumn(String, DBType, String, String, String, String) + * @see TAPColumn#setPrincipal(boolean) + * @see TAPColumn#setIndexed(boolean) + * @see TAPColumn#setStd(boolean) + * @see #addColumn(TAPColumn) + */ + public TAPColumn addColumn(String columnName, DBType datatype, String description, String unit, String ucd, String utype, boolean principal, boolean indexed, boolean std){ + if (columnName == null || columnName.trim().length() <= 0) return null; - TAPColumn c = new TAPColumn(columnName, description, unit, ucd, utype); - c.setVotType(votType); + TAPColumn c = new TAPColumn(columnName, datatype, description, unit, ucd, utype); c.setPrincipal(principal); c.setIndexed(indexed); c.setStd(std); @@ -234,6 +551,17 @@ public class TAPTable implements DBTable { return c; } + /** + *

    Tell whether this table contains a column with the given ADQL name.

    + * + *

    Important note: + * This function is case sensitive. + *

    + * + * @param columnName ADQL name (case sensitive) of the column whose the existence must be checked. + * + * @return true if a column having the given ADQL name exists in this table, false otherwise. + */ public final boolean hasColumn(String columnName){ if (columnName == null) return false; @@ -241,6 +569,11 @@ public class TAPTable implements DBTable { return columns.containsKey(columnName); } + /** + * Get the list of all columns contained in this table. + * + * @return An iterator over the list of this table's columns. + */ public Iterator getColumns(){ return columns.values().iterator(); } @@ -253,7 +586,7 @@ public class TAPTable implements DBTable { if (colName != null && colName.length() > 0){ Collection collColumns = columns.values(); for(TAPColumn column : collColumns){ - if (column.getDBName().equalsIgnoreCase(colName)) + if (column.getDBName().equals(colName)) return column; } } @@ -261,6 +594,18 @@ public class TAPTable implements DBTable { } } + /** + *

    Search a column inside this table having the given ADQL name.

    + * + *

    Important note: + * This function is case sensitive. + *

    + * + * @param columnName ADQL name of the column to search. + * + * @return The matching column, + * or NULL if no column with this ADQL name has been found. + */ public final TAPColumn getColumn(String columnName){ if (columnName == null) return null; @@ -268,18 +613,64 @@ public class TAPTable implements DBTable { return columns.get(columnName); } + /** + *

    Tell whether this table contains a column with the given ADQL or DB name.

    + * + *

    Note: + * This functions is just calling {@link #getColumn(String, boolean)} and compare its result + * with NULL in order to check the existence of the specified column. + *

    + * + * @param colName ADQL or DB name that the column to search must have. + * @param byAdqlName true to search the column by ADQL name, false to search by DB name. + * + * @return true if a column has been found inside this table with the given ADQL or DB name, + * false otherwise. + * + * @see #getColumn(String, boolean) + */ public boolean hasColumn(String colName, boolean byAdqlName){ return (getColumn(colName, byAdqlName) != null); } + /** + * Get the number of columns composing this table. + * + * @return Number of its columns. + */ public final int getNbColumns(){ return columns.size(); } + /** + * Tell whether this table contains no column. + * + * @return true if this table is empty (no column), + * false if it contains at least one column. + */ public final boolean isEmpty(){ return columns.isEmpty(); } + /** + *

    Remove the specified column.

    + * + *

    Important note: + * This function is case sensitive! + *

    + * + *

    Note: + * If some foreign keys were associating the column to remove, + * they will be also deleted. + *

    + * + * @param columnName ADQL name of the column to remove. + * + * @return The removed column, + * or NULL if no column with the given ADQL name has been found. + * + * @see #deleteColumnRelations(TAPColumn) + */ public final TAPColumn removeColumn(String columnName){ if (columnName == null) return null; @@ -287,9 +678,15 @@ public class TAPTable implements DBTable { TAPColumn removedColumn = columns.remove(columnName); if (removedColumn != null) deleteColumnRelations(removedColumn); + return removedColumn; } + /** + * Delete all foreign keys having the given column in the sources or the targets list. + * + * @param col A column. + */ protected final void deleteColumnRelations(TAPColumn col){ // Remove the relation between the column and this table: col.setTable(null); @@ -306,6 +703,10 @@ public class TAPTable implements DBTable { } } + /** + * Remove all columns composing this table. + * Foreign keys will also be deleted. + */ public final void removeAllColumns(){ Iterator> it = columns.entrySet().iterator(); while(it.hasNext()){ @@ -315,7 +716,31 @@ public class TAPTable implements DBTable { } } - public final void addForeignKey(TAPForeignKey key) throws Exception{ + /** + *

    Add the given foreign key to this table.

    + * + *

    Note: + * This function will do nothing if the given foreign key is NULL. + *

    + * + *

    WARNING: + * The source table ({@link TAPForeignKey#getFromTable()}) of the given foreign key MUST be this table + * and the foreign key MUST be completely defined. + * If not, an exception will be thrown and the key won't be added. + *

    + * + *

    Note: + * If the given foreign key is added to this table, all the columns of this key will be + * linked to the foreign key using either {@link TAPColumn#addSource(TAPForeignKey)} or + * {@link TAPColumn#addTarget(TAPForeignKey)}. + *

    + * + * @param key Foreign key (whose the FROM table is this table) to add inside this table. + * + * @throws TAPException If the source table of the given foreign key is not this table + * or if the given key is not completely defined. + */ + public final void addForeignKey(TAPForeignKey key) throws TAPException{ if (key == null) return; @@ -323,57 +748,120 @@ public class TAPTable implements DBTable { final String errorMsgPrefix = "Impossible to add the foreign key \"" + keyId + "\" because "; if (key.getFromTable() == null) - throw new Exception(errorMsgPrefix + "no source table is specified !"); + throw new TAPException(errorMsgPrefix + "no source table is specified !"); if (!this.equals(key.getFromTable())) - throw new Exception(errorMsgPrefix + "the source table is not \"" + getName() + "\""); + throw new TAPException(errorMsgPrefix + "the source table is not \"" + getADQLName() + "\""); if (key.getTargetTable() == null) - throw new Exception(errorMsgPrefix + "no target table is specified !"); + throw new TAPException(errorMsgPrefix + "no target table is specified !"); if (key.isEmpty()) - throw new Exception(errorMsgPrefix + "it defines no relation !"); + throw new TAPException(errorMsgPrefix + "it defines no relation !"); if (foreignKeys.add(key)){ try{ TAPTable targetTable = key.getTargetTable(); for(Map.Entry relation : key){ if (!hasColumn(relation.getKey())) - throw new Exception(errorMsgPrefix + "the source column \"" + relation.getKey() + "\" doesn't exist in \"" + getName() + "\" !"); + throw new TAPException(errorMsgPrefix + "the source column \"" + relation.getKey() + "\" doesn't exist in \"" + getName() + "\" !"); else if (!targetTable.hasColumn(relation.getValue())) - throw new Exception(errorMsgPrefix + "the target column \"" + relation.getValue() + "\" doesn't exist in \"" + targetTable.getName() + "\" !"); + throw new TAPException(errorMsgPrefix + "the target column \"" + relation.getValue() + "\" doesn't exist in \"" + targetTable.getName() + "\" !"); else{ getColumn(relation.getKey()).addTarget(key); targetTable.getColumn(relation.getValue()).addSource(key); } } - }catch(Exception ex){ + }catch(TAPException ex){ foreignKeys.remove(key); throw ex; } } } - public TAPForeignKey addForeignKey(String keyId, TAPTable targetTable, Map columns) throws Exception{ + /** + *

    Build a foreign key using the ID, the target table and the given list of columns. + * Then, add the created foreign key to this table.

    + * + *

    Note: + * The source table of the created foreign key ({@link TAPForeignKey#getFromTable()}) will be this table. + *

    + * + *

    Note: + * If the given foreign key is added to this table, all the columns of this key will be + * linked to the foreign key using either {@link TAPColumn#addSource(TAPForeignKey)} or + * {@link TAPColumn#addTarget(TAPForeignKey)}. + *

    + * + * @return The created and added foreign key. + * + * @throws TAPException If the specified key is not completely or correctly defined. + * + * @see TAPForeignKey#TAPForeignKey(String, TAPTable, TAPTable, Map) + */ + public TAPForeignKey addForeignKey(String keyId, TAPTable targetTable, Map columns) throws TAPException{ TAPForeignKey key = new TAPForeignKey(keyId, this, targetTable, columns); addForeignKey(key); return key; } - public TAPForeignKey addForeignKey(String keyId, TAPTable targetTable, Map columns, String description, String utype) throws Exception{ + /** + *

    Build a foreign key using the ID, the target table, the given list of columns, the given description and the given UType. + * Then, add the created foreign key to this table.

    + * + *

    Note: + * The source table of the created foreign key ({@link TAPForeignKey#getFromTable()}) will be this table. + *

    + * + *

    Note: + * If the given foreign key is added to this table, all the columns of this key will be + * linked to the foreign key using either {@link TAPColumn#addSource(TAPForeignKey)} or + * {@link TAPColumn#addTarget(TAPForeignKey)}. + *

    + * + * @return The created and added foreign key. + * + * @throws TAPException If the specified key is not completely or correctly defined. + * + * @see TAPForeignKey#TAPForeignKey(String, TAPTable, TAPTable, Map, String, String) + */ + public TAPForeignKey addForeignKey(String keyId, TAPTable targetTable, Map columns, String description, String utype) throws TAPException{ TAPForeignKey key = new TAPForeignKey(keyId, this, targetTable, columns, description, utype); addForeignKey(key); return key; } + /** + * Get the list of all foreign keys associated whose the source is this table. + * + * @return An iterator over all its foreign keys. + */ public final Iterator getForeignKeys(){ return foreignKeys.iterator(); } + /** + * Get the number of all foreign keys whose the source is this table + * + * @return Number of all its foreign keys. + */ public final int getNbForeignKeys(){ return foreignKeys.size(); } + /** + *

    Remove the given foreign key from this table.

    + * + *

    Note: + * This function will also delete the link between the columns of the foreign key + * and the foreign key, using {@link #deleteRelations(TAPForeignKey)}. + *

    + * + * @param keyToRemove Foreign key to removed from this table. + * + * @return true if the key has been successfully removed, + * false otherwise. + */ public final boolean removeForeignKey(TAPForeignKey keyToRemove){ if (foreignKeys.remove(keyToRemove)){ deleteRelations(keyToRemove); @@ -382,6 +870,14 @@ public class TAPTable implements DBTable { return false; } + /** + *

    Remove all the foreign keys whose the source is this table.

    + * + *

    Note: + * This function will also delete the link between the columns of all the removed foreign keys + * and the foreign keys, using {@link #deleteRelations(TAPForeignKey)}. + *

    + */ public final void removeAllForeignKeys(){ Iterator it = foreignKeys.iterator(); while(it.hasNext()){ @@ -390,6 +886,13 @@ public class TAPTable implements DBTable { } } + /** + * Delete the link between all columns of the given foreign key + * and this foreign key. Thus, these columns won't be anymore source or target + * of this foreign key. + * + * @param key A foreign key whose links with its columns must be deleted. + */ protected final void deleteRelations(TAPForeignKey key){ for(Map.Entry relation : key){ TAPColumn col = key.getFromTable().getColumn(relation.getKey()); @@ -426,69 +929,17 @@ public class TAPTable implements DBTable { @Override public String toString(){ - return ((schema != null) ? (schema.getName() + ".") : "") + adqlName; - } - - public static void main(String[] args) throws Exception{ - TAPSchema schema1 = new TAPSchema("monSchema1"); - TAPSchema schema2 = new TAPSchema("monSchema2"); - - TAPTable tRef = schema1.addTable("ToRef"); - tRef.addColumn("monMachin"); - - TAPTable t = schema2.addTable("Test"); - t.addColumn("machin"); - t.addColumn("truc"); - HashMap mapCols = new HashMap(); - mapCols.put("machin", "monMachin"); - TAPForeignKey key = new TAPForeignKey("KeyID", t, tRef, mapCols); - t.addForeignKey(key); - mapCols = new HashMap(); - mapCols.put("truc", "monMachin"); - key = new TAPForeignKey("2ndKey", t, tRef, mapCols); - t.addForeignKey(key); - - printSchema(schema1); - printSchema(schema2); - - System.out.println(); - - schema2.removeTable("Test"); - printSchema(schema1); - printSchema(schema2); - } - - public static void printSchema(TAPSchema schema){ - System.out.println("*** SCHEMA \"" + schema.getName() + "\" ***"); - for(TAPTable t : schema) - printTable(t); - } - - public static void printTable(TAPTable t){ - System.out.println("TABLE: " + t + "\nNb Columns: " + t.getNbColumns() + "\nNb Relations: " + t.getNbForeignKeys()); - Iterator it = t.getColumns(); - while(it.hasNext()){ - TAPColumn col = it.next(); - System.out.print("\t- " + col + "( "); - Iterator keys = col.getTargets(); - while(keys.hasNext()) - for(Map.Entry relation : keys.next()) - System.out.print(">" + relation.getKey() + "/" + relation.getValue() + " "); - keys = col.getSources(); - while(keys.hasNext()) - for(Map.Entry relation : keys.next()) - System.out.print("<" + relation.getKey() + "/" + relation.getValue() + " "); - System.out.println(")"); - } + return ((schema != null) ? (schema.getADQLName() + ".") : "") + adqlName; } + @Override public DBTable copy(final String dbName, final String adqlName){ TAPTable copy = new TAPTable((adqlName == null) ? this.adqlName : adqlName); copy.setDBName((dbName == null) ? this.dbName : dbName); copy.setSchema(schema); Collection collColumns = columns.values(); for(TAPColumn col : collColumns) - copy.addColumn((TAPColumn)col.copy()); + copy.addColumn((TAPColumn)col.copy(col.getDBName(), col.getADQLName(), null)); copy.setDescription(description); copy.setOtherData(otherData); copy.setType(type); diff --git a/src/tap/metadata/TAPTypes.java b/src/tap/metadata/TAPTypes.java deleted file mode 100644 index 423b854ab6ca437107c7c4981312882ec5d52978..0000000000000000000000000000000000000000 --- a/src/tap/metadata/TAPTypes.java +++ /dev/null @@ -1,359 +0,0 @@ -package tap.metadata; - -/* - * This file is part of TAPLibrary. - * - * TAPLibrary is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * TAPLibrary is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with TAPLibrary. If not, see . - * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) - */ - -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; -import java.util.Map.Entry; - -/** - *

    - * Gathers all types used by a TAP service and described in the IVOA document for TAP. - * This class lets "translating" a DB type into a VOTable field type and vice-versa. - * You can also add some DB type aliases, that's to say other other names for the existing DB types: - * smallint, integer, bigint, real, double, binary, varbinary, char, varchar, blob, clob, timestamp, point, region. - * For instance: TEXT <-> VARCHAR. - *

    - * - * @author Grégory Mantelet (CDS) - * @version 11/2011 - * - * @see VotType - */ -public final class TAPTypes { - - private static final Map dbTypes; - private static final Map dbTypeAliases; - private static final Map votTypes; - - public static final String SMALLINT = "SMALLINT"; - public static final String INTEGER = "INTEGER"; - public static final String BIGINT = "BIGINT"; - public static final String REAL = "REAL"; - public static final String DOUBLE = "DOUBLE"; - public static final String BINARY = "BINARY"; - public static final String VARBINARY = "VARBINARY"; - public static final String CHAR = "CHAR"; - public static final String VARCHAR = "VARCHAR"; - public static final String BLOB = "BLOB"; - public static final String CLOB = "CLOB"; - public static final String TIMESTAMP = "TIMESTAMP"; - public static final String POINT = "POINT"; - public static final String REGION = "REGION"; - - /** No array size. */ - public static final int NO_SIZE = -1; - - /** Means '*' (i.e. char(*)). */ - public static final int STAR_SIZE = -12345; - - static{ - dbTypes = new HashMap(14); - votTypes = new HashMap(7); - - VotType type = new VotType("short", 1, null); - dbTypes.put(SMALLINT, type); - votTypes.put(type, SMALLINT); - - type = new VotType("int", 1, null); - dbTypes.put(INTEGER, type); - votTypes.put(type, INTEGER); - - type = new VotType("long", 1, null); - dbTypes.put(BIGINT, type); - votTypes.put(type, BIGINT); - - type = new VotType("float", 1, null); - dbTypes.put(REAL, type); - votTypes.put(type, REAL); - - type = new VotType("double", 1, null); - dbTypes.put(DOUBLE, type); - votTypes.put(type, DOUBLE); - - dbTypes.put(BINARY, new VotType("unsignedByte", 1, null)); - - type = new VotType("unsignedByte", STAR_SIZE, null); - dbTypes.put(VARBINARY, type); - votTypes.put(type, VARBINARY); - - dbTypes.put(CHAR, new VotType("char", 1, null)); - - type = new VotType("char", STAR_SIZE, null); - dbTypes.put(VARCHAR, type); - votTypes.put(type, VARCHAR); - - type = new VotType("unsignedByte", STAR_SIZE, "adql:BLOB"); - dbTypes.put(BLOB, type); - votTypes.put(type, BLOB); - - type = new VotType("char", STAR_SIZE, "adql:CLOB"); - dbTypes.put(CLOB, type); - votTypes.put(type, CLOB); - - type = new VotType("char", STAR_SIZE, "adql:TIMESTAMP"); - dbTypes.put(TIMESTAMP, type); - votTypes.put(type, TIMESTAMP); - - type = new VotType("char", STAR_SIZE, "adql:POINT"); - dbTypes.put(POINT, type); - votTypes.put(type, POINT); - - type = new VotType("char", STAR_SIZE, "adql:REGION"); - dbTypes.put(REGION, type); - votTypes.put(type, REGION); - - dbTypeAliases = new HashMap(8); - // PostgreSQL data types: - dbTypeAliases.put("INT2", SMALLINT); - dbTypeAliases.put("INT", INTEGER); - dbTypeAliases.put("INT4", INTEGER); - dbTypeAliases.put("INT8", BIGINT); - dbTypeAliases.put("FLOAT4", REAL); - dbTypeAliases.put("FLOAT8", DOUBLE); - dbTypeAliases.put("TEXT", VARCHAR); - dbTypeAliases.put("SPOINT", POINT); - } - - /** - * Gets all DB types. - * @return An iterator on DB type name. - */ - public static final Iterator getDBTypes(){ - return dbTypes.keySet().iterator(); - } - - /** - * Gets all DB type aliases. - * @return An iterator on Entry<String,String> whose the key is the alias and the value is its corresponding DB type. - */ - public static final Iterator> getDBTypeAliases(){ - return dbTypeAliases.entrySet().iterator(); - } - - /** - * Gets all VOTable types. - * @return An iterator on {@link VotType}. - */ - public static final Iterator getVotTypes(){ - return votTypes.keySet().iterator(); - } - - /** - *

    Gets the VOTable type corresponding to the given DB type (or a DB type alias).

    - * Important: - *
      - *
    • Spaces before and after the DB type are automatically removed,
    • - *
    • The DB type is automatically formatted in UPPER-CASE,
    • - *
    • Nothing is done if the given DB type is null or empty.
    • - *
    - * - * @param dbType A DB type (ex: SMALLINT, INTEGER, VARCHAR, POINT, ...) - * - * @return The corresponding VOTable type or null if not found. - */ - public static final VotType getVotType(String dbType){ - if (dbType == null) - return null; - - // Normalize the type name (upper case and with no leading and trailing spaces): - dbType = dbType.trim().toUpperCase(); - if (dbType.length() == 0) - return null; - - // Search the corresponding VOTable type: - VotType votType = dbTypes.get(dbType); - // If no match, try again considering the given type as an alias: - if (votType == null) - votType = dbTypes.get(dbTypeAliases.get(dbType)); - - return votType; - } - - /** - *

    Gets the VOTable type (with the given arraysize) corresponding to the given DB type (or a DB type alias).

    - * Important: - *
      - *
    • Spaces before and after the DB type are automatically removed,
    • - *
    • The DB type is automatically formatted in UPPER-CASE,
    • - *
    • Nothing is done if the given DB type is null or empty,
    • - *
    • The given arraysize is used only if the found VOTable type is not special (that's to say: xtype is null).
    • - *
    - * - * @param dbType A DB type (ex: SMALLINT, INTEGER, VARCHAR, POINT, ...) - * @param arraysize Arraysize to set in the found VOTable type. - * - * @return The corresponding VOTable type or null if not found. - */ - public static final VotType getVotType(String dbType, int arraysize){ - VotType votType = getVotType(dbType); - - // If there is a match, set the arraysize: - if (votType != null && votType.xtype == null && arraysize > 0) - votType = new VotType(votType.datatype, arraysize, null); - - return votType; - } - - /** - * - *

    Gets the DB type corresponding to the given DB type alias.

    - * Important: - *
      - *
    • Spaces before and after the DB type are automatically removed,
    • - *
    • The DB type is automatically formatted in UPPER-CASE,
    • - *
    • If the given DB type is not alias but directly a DB type, it is immediately return.
    • - *
    - * - * @param dbTypeAlias A DB type alias. - * - * @return The corresponding DB type or null if not found. - */ - public static final String getDBType(String dbTypeAlias){ - if (dbTypeAlias == null) - return null; - - // Normalize the type name: - dbTypeAlias = dbTypeAlias.trim().toUpperCase(); - if (dbTypeAlias.length() == 0) - return null; - - // Get the corresponding DB type: - if (dbTypes.containsKey(dbTypeAlias)) - return dbTypeAlias; - else - return dbTypeAliases.get(dbTypeAlias); - } - - /** - * - *

    Gets the DB type corresponding to the given VOTable field type.

    - * Important: - *
      - *
    • The research is made only on the following fields: datatype and xtype,
    • - *
    • Case insensitive research.
    • - *
    - * - * @param type A VOTable type. - * - * @return The corresponding DB type or null if not found. - */ - public static final String getDBType(final VotType type){ - if (type == null) - return null; - return votTypes.get(type); - } - - /** - *

    Adds, replaces or removes a DB type alias.

    - * Important: - *
      - *
    • Spaces before and after the DB type are automatically removed,
    • - *
    • The DB type is automatically formatted in UPPER-CASE,
    • - *
    • The same "normalizations" are done on the given alias (so the case sensitivity is ignored),
    • - *
    • Nothing is done if the given alias is null or empty,
    • - *
    • If the given DB type is null, the given alias is removed,
    • - *
    • Nothing is done if the given DB type (!= null) does not match with a known DB type.
    • - *
    - * - * @param alias A DB type alias (ex: spoint) - * @param dbType A DB type (ex: POINT). - * - * @return true if the association has been updated, false otherwise. - */ - public static final boolean putDBTypeAlias(String alias, String dbType){ - if (alias == null) - return false; - - // Normalize the given alias: - alias = alias.trim().toUpperCase(); - if (alias.length() == 0) - return false; - - // Check the existence of the given DB type: - if (dbType != null){ - dbType = dbType.trim().toUpperCase(); - if (dbType.length() == 0) - return false; - else if (!dbTypes.containsKey(dbType)) - return false; - } - - // Update the map of aliases: - if (dbType == null) - dbTypeAliases.remove(alias); - else - dbTypeAliases.put(alias, dbType); - - return true; - } - - /** SELF TEST */ - public final static void main(final String[] args) throws Exception{ - System.out.println("***** DB TYPES *****"); - Iterator itDB = TAPTypes.getDBTypes(); - while(itDB.hasNext()) - System.out.println("\t- " + itDB.next()); - - System.out.println("\n***** DB TYPE ALIASES *****"); - Iterator> itAliases = TAPTypes.getDBTypeAliases(); - while(itAliases.hasNext()){ - Entry e = itAliases.next(); - System.out.println("\t- " + e.getKey() + " = " + e.getValue()); - } - - System.out.println("\n***** VOTABLE TYPES *****"); - Iterator itVot = TAPTypes.getVotTypes(); - while(itVot.hasNext()) - System.out.println("\t- " + itVot.next()); - - byte[] buffer = new byte[1024]; - int nbRead = 0; - String type = null; - - System.out.print("\nDB Type ? "); - nbRead = System.in.read(buffer); - type = new String(buffer, 0, nbRead); - System.out.println(TAPTypes.getVotType(type)); - - int arraysize = 1; - String xtype = null; - VotType votType = null; - System.out.print("\nVOTable datatype ? "); - nbRead = System.in.read(buffer); - type = (new String(buffer, 0, nbRead)).trim(); - System.out.print("VOTable arraysize ? "); - nbRead = System.in.read(buffer); - try{ - arraysize = Integer.parseInt((new String(buffer, 0, nbRead)).trim()); - }catch(NumberFormatException nfe){ - arraysize = STAR_SIZE; - } - System.out.print("VOTable xtype ? "); - nbRead = System.in.read(buffer); - xtype = (new String(buffer, 0, nbRead)).trim(); - if (xtype != null && xtype.length() == 0) - xtype = null; - votType = new VotType(type, arraysize, xtype); - System.out.println(TAPTypes.getDBType(votType)); - } - -} diff --git a/src/tap/metadata/TableSetParser.java b/src/tap/metadata/TableSetParser.java new file mode 100644 index 0000000000000000000000000000000000000000..0d6e1a5fdc49b5fee46c38233dabe5cd1ea7abba --- /dev/null +++ b/src/tap/metadata/TableSetParser.java @@ -0,0 +1,905 @@ +package tap.metadata; + +/* + * This file is part of TAPLibrary. + * + * TAPLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * TAPLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with TAPLibrary. If not, see . + * + * Copyright 2015 - Astronomisches Rechen Institut (ARI) + */ + +import java.io.BufferedInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import javax.xml.stream.XMLInputFactory; +import javax.xml.stream.XMLStreamConstants; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.XMLStreamReader; + +import org.xml.sax.helpers.DefaultHandler; + +import tap.TAPException; +import tap.data.VOTableIterator; +import tap.metadata.TAPTable.TableType; +import adql.db.DBType; +import adql.db.DBType.DBDatatype; + +/** + *

    Let parse an XML document representing a table set, and return the corresponding {@link TAPMetadata} instance.

    + * + *

    Note 1: the table set must follow the syntax specified by the XML Schema http://www.ivoa.net/xml/VODataService.

    + *

    Note 2: only tags specified by VODataService are checked. If there is any other tag, they are merely ignored.

    + * + *

    Exceptions

    + * + *

    A {@link TAPException} is thrown in the following cases:

    + *
      + *
    • the root node is not "tableset"
    • + *
    • table name syntax ([schema.]table) is incorrect
    • + *
    • a single table name (just "table" without schema prefix) is ambiguous (that's to say, the same name is used for tables of different schemas)
    • + *
    • "name" node is missing in nodes "schema", "table" and "column"
    • + *
    • "targetTable" is missing in node "foreignKey"
    • + *
    • "fromColumn" or "targetColumn" is missing in node "fkColumn"
    • + *
    • "name" node is duplicated in the same node
    • + *
    • missing "xsi:type" as attribute in a "dataType" node
    • + *
    • unknown column datatype
    • + *
    + * + *

    Note: catalog prefixes are not supported in this parser.

    + * + *

    Datatype

    + * + *

    + * A column datatype may be specified either as a TAP or a VOTable datatype. Thus, the type of specification must be given with the attribute xsi:type of the + * node "dataType". For instance: + *

    + *
      + *
    • <dataType xsi:type="vs:VOTableType" arraysize="1">float</dataType> for a VOTable datatype
    • + *
    • <dataType xsi:type="vod:TAPType">VARCHAR</dataType> for a TAP datatype
    • + *
    + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (02/2015) + * @since 2.0 + */ +public class TableSetParser extends DefaultHandler { + + /** XML namespace for the XML schema XMLSchema-instance. */ + protected final static String XSI_NAMESPACE = "http://www.w3.org/2001/XMLSchema-instance"; + + /** XML namespace for the XML schema VODataService. */ + protected final static String VODATASERVICE_NAMESPACE = "http://www.ivoa.net/xml/VODataService"; + + /** + *

    Intermediary representation of a Foreign Key.

    + * + *

    + * An instance of this class lets save all information provided in the XML document and needed to create the corresponding TAP metadata ({@link TAPForeignKey}) + * at the end of XML document parsing, once all available tables are listed. + *

    + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (02/2015) + * @since 2.0 + * + * @see TableSetParser#parseFKey(XMLStreamReader) + * @see TableSetParser#parse(InputStream) + */ + protected static class ForeignKey { + /** Foreign key description */ + public String description = null; + /** UType associated with this foreign key. */ + public String utype = null; + /** Source table of the foreign key. + * Note: In the XML document, the foreign key is described inside its table ; + * hence the type of this attribute: TAPTable (it is indeed already known). */ + public TAPTable fromTable = null; + /** Target table of the foreign key. */ + public String targetTable = null; + /** Position of the "targetTable" node inside the XML document. + * Note: this attribute may be used only in case of error. */ + public String targetTablePosition = ""; + /** Columns associations. + * Keys are columns of the source table, whereas values are columns of the target table to associate with. */ + public Map keyColumns = new HashMap(); + } + + /** + * Parse the XML TableSet stored in the specified file. + * + * @param file The regular file containing the TableSet to parse. + * + * @return The corresponding TAP metadata. + * + * @throws IOException If any error occurs while reading the given file. + * @throws TAPException If any error occurs in the XML parsing or in the TAP metadata creation. + * + * @since {@link #parse(InputStream)} + */ + public TAPMetadata parse(final File file) throws IOException, TAPException{ + InputStream input = null; + try{ + input = new BufferedInputStream(new FileInputStream(file)); + return parse(input); + }finally{ + if (input != null){ + try{ + input.close(); + }catch(IOException ioe2){} + } + } + } + + /** + * Parse the XML TableSet stored in the given stream. + * + * @param input The stream containing the TableSet to parse. + * + * @return The corresponding TAP metadata. + * + * @throws IOException If any error occurs while reading the given stream. + * @throws TAPException If any error occurs in the XML parsing or in the TAP metadata creation. + * + * @see #parseSchema(XMLStreamReader, List) + */ + public TAPMetadata parse(final InputStream input) throws IOException, TAPException{ + TAPMetadata meta = null; + + XMLInputFactory factory = XMLInputFactory.newInstance(); + XMLStreamReader reader = null; + + try{ + // Create the XML streaming reader: + reader = factory.createXMLStreamReader(input); + + // Read the first XML tag => MUST BE : + int event = nextTag(reader); + if (event == XMLStreamConstants.START_ELEMENT && reader.getLocalName().equalsIgnoreCase("tableset")){ + + // Build the metadata object: + meta = new TAPMetadata(); + + // Prepare the listing of all foreign keys for a later resolution: + ArrayList allForeignKeys = new ArrayList(20); + + // Read the next XML tag => MUST BE : + while(reader.hasNext() && (event = nextTag(reader)) == XMLStreamConstants.START_ELEMENT){ + if (reader.getLocalName().equalsIgnoreCase("schema")){ + // fetch the schema description and content: + meta.addSchema(parseSchema(reader, allForeignKeys)); + } + } + + // Read the final XML tag => MUST BE : + if (event != XMLStreamConstants.END_ELEMENT || !reader.getLocalName().equalsIgnoreCase("tableset")){ + // throw an error if the tag is not the expected one: + throw new TAPException(getPosition(reader) + " XML tag mismatch: <" + (event == XMLStreamConstants.END_ELEMENT ? "/" : "") + reader.getLocalName() + ">! Expected: ."); + } + + // Resolve all ForeignKey objects into TAPForeignKeys and add them into the dedicated TAPTable: + long keyId = 0; + for(ForeignKey fk : allForeignKeys){ + // search for the target table: + TAPTable targetTable = searchTable(fk.targetTable, meta, fk.targetTablePosition); + // build and add the foreign key: + fk.fromTable.addForeignKey("" + (++keyId), targetTable, fk.keyColumns, fk.description, fk.utype); + } + + }else + throw new TAPException(getPosition(reader) + " Missing root tag: \"tableset\"!"); + + }catch(XMLStreamException xse){ + throw new TAPException(getPosition(reader) + " XML ERROR: " + xse.getMessage() + "!", xse); + } + + return meta; + } + + /* **************************** */ + /* INDIVIDUAL PARSING FUNCTIONS */ + /* **************************** */ + + /** + *

    Parse the XML representation of a TAP schema.

    + * + *

    Important: This function MUST be called just after the start element "schema" has been read!

    + * + *

    Attributes

    + * + *

    No attribute is expected in the start element "schema".

    + * + *

    Children

    + * + * Only the following nodes are taken into account ; the others are ignored: + *
      + *
    • name REQUIRED
    • + *
    • description {0..1}
    • + *
    • title {0..1}
    • + *
    • utype {0..1}
    • + *
    • table {*}
    • + *
    + * + * @param reader XML reader. + * @param allForeignKeys List to fill with all encountered foreign keys. + * note: these keys are not the final TAP meta, but a collection of all information found in the XML document. + * The final TAP meta will be created later, once all available tables and columns are available. + * @throws IllegalStateException If this function is called while the reader has not just read the START ELEMENT tag of "table". + * + * @return The corresponding TAP schema. + * + * @throws XMLStreamException If there is an error processing the underlying XML source. + * @throws TAPException If several "name" nodes are found, or if none such node is found ; exactly one "name" node must be found. + * + * @see #parseTable(XMLStreamReader, List) + */ + protected TAPSchema parseSchema(final XMLStreamReader reader, final List allForeignKeys) throws XMLStreamException, TAPException{ + // Ensure the reader has just read the START ELEMENT of schema: + if (reader.getEventType() != XMLStreamConstants.START_ELEMENT || reader.getLocalName() == null || !reader.getLocalName().equalsIgnoreCase("schema")) + throw new IllegalStateException(getPosition(reader) + " Illegal usage of TableSetParser.parseSchema(XMLStreamParser)! This function can be called only when the reader has just read the START ELEMENT tag \"schema\"."); + + TAPSchema schema = null; + String tag = null, name = null, description = null, title = null, utype = null; + ArrayList tables = new ArrayList(10); + + while(nextTag(reader) == XMLStreamConstants.START_ELEMENT){ + // Get the tag name: + tag = reader.getLocalName(); + + // Identify the current tag: + if (tag.equalsIgnoreCase("name")){ + if (name != null) + throw new TAPException(getPosition(reader) + " Only one \"name\" element can exist in a /tableset/schema!"); + name = getText(reader); + }else if (tag.equalsIgnoreCase("description")) + description = ((description != null) ? (description + "\n") : "") + getText(reader); + else if (tag.equalsIgnoreCase("table")){ + ArrayList keys = new ArrayList(2); + tables.add(parseTable(reader, keys)); + allForeignKeys.addAll(keys); + }else if (tag.equalsIgnoreCase("title")) + title = ((title != null) ? (title + "\n") : "") + getText(reader); + else if (tag.equalsIgnoreCase("utype")) + utype = getText(reader); + } + + // Only one info is required: the schema name! + if (name == null) + throw new TAPException(getPosition(reader) + " Missing schema \"name\"!"); + + // Build the schema: + schema = new TAPSchema(name, description, utype); + schema.setTitle(title); + for(TAPTable t : tables) + schema.addTable(t); + tables = null; + + return schema; + } + + /** + *

    Parse the XML representation of a TAP table.

    + * + *

    Important: This function MUST be called just after the start element "table" has been read!

    + * + *

    Attributes

    + * + * The attribute "type" may be provided in the start element "table". One of the following value is expected: + *
      + *
    • base_table or table
    • + *
    • output
    • + *
    • view
    • + *
    + * + *

    Children

    + * + * Only the following nodes are taken into account ; the others are ignored: + *
      + *
    • name REQUIRED
    • + *
    • description {0..1}
    • + *
    • title {0..1}
    • + *
    • utype {0..1}
    • + *
    • column {*}
    • + *
    • foreignKey {*}
    • + *
    + * + * @param reader XML reader. + * @param keys List to fill with all encountered foreign keys. + * note: these keys are not the final TAP meta, but a collection of all information found in the XML document. + * The final TAP meta will be created later, once all available tables and columns are available. + * + * @return The corresponding TAP table. + * + * @throws XMLStreamException If there is an error processing the underlying XML source. + * @throws TAPException If several "name" nodes are found, or if none such node is found ; exactly one "name" node must be found. + * @throws IllegalStateException If this function is called while the reader has not just read the START ELEMENT tag of "table". + * + * @see #parseColumn(XMLStreamReader) + * @see #parseFKey(XMLStreamReader) + */ + protected TAPTable parseTable(final XMLStreamReader reader, final List keys) throws XMLStreamException, TAPException{ + // Ensure the reader has just read the START ELEMENT of table: + if (reader.getEventType() != XMLStreamConstants.START_ELEMENT || reader.getLocalName() == null || !reader.getLocalName().equalsIgnoreCase("table")) + throw new IllegalStateException(getPosition(reader) + " Illegal usage of TableSetParser.parseTable(XMLStreamParser)! This function can be called only when the reader has just read the START ELEMENT tag \"table\"."); + + TAPTable table = null; + TableType type = TableType.table; + String tag = null, name = null, description = null, title = null, utype = null; + ArrayList columns = new ArrayList(10); + + // Get the table type (attribute "type") [OPTIONAL] : + if (reader.getAttributeCount() > 0){ + int indType = 0; + while(indType < reader.getAttributeCount() && !reader.getAttributeLocalName(indType).equalsIgnoreCase("type")) + indType++; + if (indType < reader.getAttributeCount() && reader.getAttributeLocalName(indType).equalsIgnoreCase("type")){ + String typeTxt = reader.getAttributeValue(indType); + if (typeTxt != null && typeTxt.trim().length() > 0){ + typeTxt = typeTxt.trim().toLowerCase(); + try{ + if (typeTxt.equals("base_table")) + type = TableType.table; + else + type = TableType.valueOf(typeTxt); + }catch(IllegalArgumentException iae){ + /* Note: If type unknown, the given value is ignored and the default type - TableType.table - is kept. */ + } + } + } + } + + // Fetch the other information (tags): + while(nextTag(reader) == XMLStreamConstants.START_ELEMENT){ + // Get the tag name: + tag = reader.getLocalName(); + + // Identify the current tag: + if (tag.equalsIgnoreCase("name")){ + if (name != null) + throw new TAPException(getPosition(reader) + " Only one \"name\" element can exist in a /tableset/schema/table!"); + name = getText(reader); + }else if (tag.equalsIgnoreCase("description")) + description = ((description != null) ? (description + "\n") : "") + getText(reader); + else if (tag.equalsIgnoreCase("column")){ + columns.add(parseColumn(reader)); + }else if (tag.equalsIgnoreCase("foreignKey")) + keys.add(parseFKey(reader)); + else if (tag.equalsIgnoreCase("title")) + title = ((title != null) ? (title + "\n") : "") + getText(reader); + else if (tag.equalsIgnoreCase("utype")) + utype = getText(reader); + } + + // Only one info is required: the table name! + if (name == null) + throw new TAPException(getPosition(reader) + " Missing table \"name\"!"); + + // Build the table: + table = new TAPTable(name, type, description, utype); + table.setTitle(title); + for(TAPColumn c : columns) + table.addColumn(c); + for(ForeignKey k : keys) + k.fromTable = table; + + return table; + } + + /** + *

    Parse the XML representation of a TAP column.

    + * + *

    Important: This function MUST be called just after the start element "column" has been read!

    + * + *

    Attributes

    + * + * The attribute "std" may be provided in the start element "column". One of the following value is expected: + *
      + *
    • false (default value if the attribute is omitted)
    • + *
    • true
    • + *
    + * + *

    Children

    + * + * Only the following nodes are taken into account ; the others are ignored: + *
      + *
    • name REQUIRED
    • + *
    • description {0..1}
    • + *
    • ucd {0..1}
    • + *
    • unit {0..1}
    • + *
    • utype {0..1}
    • + *
    • dataType {0..1}
    • + *
    • flag {*}, but only the values 'nullable', 'indexed' and 'primary' are currently supported by the library)
    • + *
    + * + * @param reader XML reader. + * + * @return The corresponding TAP column. + * + * @throws XMLStreamException If there is an error processing the underlying XML source. + * @throws TAPException If several "name" nodes are found, or if none such node is found ; exactly one "name" node must be found. + * @throws IllegalStateException If this function is called while the reader has not just read the START ELEMENT tag of "column". + * + * @see #parseDataType(XMLStreamReader) + */ + protected TAPColumn parseColumn(final XMLStreamReader reader) throws XMLStreamException, TAPException{ + // Ensure the reader has just read the START ELEMENT of column: + if (reader.getEventType() != XMLStreamConstants.START_ELEMENT || reader.getLocalName() == null || !reader.getLocalName().equalsIgnoreCase("column")) + throw new IllegalStateException(getPosition(reader) + " Illegal usage of TableSetParser.parseColumn(XMLStreamParser)! This function can be called only when the reader has just read the START ELEMENT tag \"column\"."); + + TAPColumn column = null; + boolean std = false, indexed = false, primary = false, nullable = false; + String tag = null, name = null, description = null, unit = null, ucd = null, utype = null; + DBType type = null; + + // Get the column STD flag (attribute "std") [OPTIONAL] : + if (reader.getAttributeCount() > 0){ + int indType = 0; + while(indType < reader.getAttributeCount() && !reader.getAttributeLocalName(indType).equalsIgnoreCase("std")) + indType++; + if (indType < reader.getAttributeCount() && reader.getAttributeLocalName(indType).equalsIgnoreCase("std")){ + String stdTxt = reader.getAttributeValue(indType); + if (stdTxt != null) + std = Boolean.parseBoolean(stdTxt.trim().toLowerCase()); + } + } + + // Fetch the other information (tags): + while(nextTag(reader) == XMLStreamConstants.START_ELEMENT){ + // Get the tag name: + tag = reader.getLocalName(); + + // Identify the current tag: + if (tag.equalsIgnoreCase("name")){ + if (name != null) + throw new TAPException(getPosition(reader) + " Only one \"name\" element can exist in a /tableset/schema/table/column!"); + name = getText(reader); + }else if (tag.equalsIgnoreCase("description")) + description = ((description != null) ? (description + "\n") : "") + getText(reader); + else if (tag.equalsIgnoreCase("dataType")) + type = parseDataType(reader); + else if (tag.equalsIgnoreCase("unit")) + unit = getText(reader); + else if (tag.equalsIgnoreCase("ucd")) + ucd = getText(reader); + else if (tag.equalsIgnoreCase("utype")) + utype = getText(reader); + else if (tag.equalsIgnoreCase("flag")){ + String txt = getText(reader); + if (txt != null){ + if (txt.equalsIgnoreCase("indexed")) + indexed = true; + else if (txt.equalsIgnoreCase("primary")) + primary = true; + else if (txt.equalsIgnoreCase("nullable")) + nullable = true; + } + } + } + + // Only one info is required: the table name! + if (name == null) + throw new TAPException(getPosition(reader) + " Missing column \"name\"!"); + + // Build the column: + column = new TAPColumn(name, type, description, unit, ucd, utype); + column.setStd(std); + column.setIndexed(indexed); + column.setPrincipal(primary); + column.setNullable(nullable); + + return column; + } + + /** + *

    Parse the XML representation of a column datatype.

    + * + *

    Important: This function MUST be called just after the start element "dataType" has been read!

    + * + *

    Attributes

    + * + * The attribute "xsi:type" (where xsi = http://www.w3.org/2001/XMLSchema-instance) MUST be provided. Only the following values are supported and accepted + * (below, vs = http://www.ivoa.net/xml/VODataService): + *
      + *
    • vs:VOTableType, and the following attributes may be also provided: + *
        + *
      • arraysize
      • + *
      • xtype
      • + *
    • + *
    • vs:TAPType, and the attribute "size" may be also provided
    • + *
    + * + *

    Children

    + * + * No child, but a text MUST be provided. Its value depends of the attribute "xsi:type": a VOTable datatype (e.g. char, float, short) if "xsi:type=vs:VOTableType", + * or a TAP type (e.g. VARCHAR, REAL, SMALLINT) if "xsi:type=vs:TAPType". Any other value will be rejected. + * + *

    IMPORTANT: All VOTable datatypes will be converted into TAPType automatically by the library.

    + * + * @param reader XML reader. + * + * @return The corresponding column datatype. + * + * @throws XMLStreamException If there is an error processing the underlying XML source. + * @throws TAPException If the attribute "xsi:type" is missing or incorrect, + * or if the datatype is unknown or not supported. + * @throws IllegalStateException If this function is called while the reader has not just read the START ELEMENT tag of "dataType". + * + * @see VOTableIterator#resolveVotType(String, String, String) + * @see DBType#DBType(DBDatatype, int) + */ + protected DBType parseDataType(final XMLStreamReader reader) throws XMLStreamException, TAPException{ + // Ensure the reader has just read the START ELEMENT of dataType: + if (reader.getEventType() != XMLStreamConstants.START_ELEMENT || reader.getLocalName() == null || !reader.getLocalName().equalsIgnoreCase("dataType")) + throw new IllegalStateException(getPosition(reader) + " Illegal usage of TableSetParser.parseDataType(XMLStreamParser)! This function can be called only when the reader has just read the START ELEMENT tag \"dataType\"."); + + String typeOfType = null, datatype = null, size = null, xtype = null, arraysize = null; + + /* Note: + * The 1st parameter of XMLStreamReader.getAttributeValue(String, String) should be the namespace of the attribute. + * If this value is NULL, the namespace condition is ignored. + * If it is an empty string - "" - an attribute without namespace will be searched. */ + + // Get the type of datatype : + typeOfType = reader.getAttributeValue(XSI_NAMESPACE, "type"); + + // Resolve the datatype: + if (typeOfType == null || typeOfType.trim().length() == 0) + throw new TAPException(getPosition(reader) + " Missing attribute \"xsi:type\" (where xsi = \"" + XSI_NAMESPACE + "\")! Expected attribute value: vs:VOTableType or vs:TAPType, where vs = " + VODATASERVICE_NAMESPACE + "."); + + // Separate the namespace and type parts: + String[] split = typeOfType.split(":"); + + // Ensure the number of parts is 2: + if (split.length != 2) + throw new TAPException(getPosition(reader) + " Unresolved type: \"" + typeOfType + "\"! Missing namespace prefix."); + // ...and ensure the namespace is the expected value: + else{ + String datatypeNamespace = reader.getNamespaceURI(split[0]); + if (datatypeNamespace == null) + throw new TAPException(getPosition(reader) + " Unresolved type: \"" + typeOfType + "\"! Unknown namespace."); + else if (!datatypeNamespace.startsWith(VODATASERVICE_NAMESPACE)) + throw new TAPException(getPosition(reader) + " Unsupported type: \"" + typeOfType + "\"! Expected: vs:VOTableType or vs:TAPType, where vs = " + VODATASERVICE_NAMESPACE + "."); + } + + // Get the other attributes: + size = reader.getAttributeValue("", "size"); + xtype = reader.getAttributeValue("", "xtype"); + arraysize = reader.getAttributeValue("", "arraysize"); + + // Get the datatype: + datatype = getText(reader); + if (datatype == null || datatype.trim().length() == 0) + throw new TAPException(getPosition(reader) + " Missing column datatype!"); + datatype = datatype.trim(); + + // Resolve the datatype in function of the value of xsi:type: + // CASE: VOTable + if (split[1].equalsIgnoreCase("VOTableType")) + return VOTableIterator.resolveVotType(datatype, arraysize, xtype).toTAPType(); + + // CASE: TAP type + else if (split[1].equalsIgnoreCase("TAPType")){ + // normalize the size attribute: + int colSize = -1; + if (size != null && size.trim().length() > 0){ + try{ + colSize = Integer.parseInt(size); + }catch(NumberFormatException nfe){} + } + // build and return the corresponding type: + try{ + return new DBType(DBDatatype.valueOf(datatype.toUpperCase()), colSize); + }catch(IllegalArgumentException iae){ + throw new TAPException(getPosition(reader) + " Unknown TAPType: \"" + datatype + "\"!"); + } + } + // DEFAULT => Throw an exception! + else + throw new TAPException(getPosition(reader) + " Unsupported type: \"" + typeOfType + "\"! Expected: vs:VOTableType or vs:TAPType, where vs = " + VODATASERVICE_NAMESPACE + "."); + } + + /** + *

    Parse the XML representation of a TAP foreign key.

    + * + *

    Important: This function MUST be called just after the start element "foreignKey" has been read!

    + * + *

    Attributes

    + * + *

    No attribute is expected in the start element "foreignKey".

    + * + *

    Children

    + * + * Only the following nodes are taken into account ; the others are ignored: + *
      + *
    • targetTable REQUIRED
    • + *
    • description {0..1}
    • + *
    • utype {0..1}
    • + *
    • fkColumn {1..*} + *
        + *
      • fromColumn REQUIRED
      • + *
      • targetColumn REQUIRED
      • + *
    • + *
    + * + * @param reader XML reader. + * + * @return An object containing all information found in the XML node about the foreign key. + * + * @throws XMLStreamException If there is an error processing the underlying XML source. + * @throws TAPException If "targetTable" node is missing, + * or if no "fkColumn" is provided. + * @throws IllegalStateException If this function is called while the reader has not just read the START ELEMENT tag of "foreignKey". + * + * @see #parseDataType(XMLStreamReader) + */ + protected ForeignKey parseFKey(final XMLStreamReader reader) throws XMLStreamException, TAPException{ + // Ensure the reader has just read the START ELEMENT of foreignKey: + if (reader.getEventType() != XMLStreamConstants.START_ELEMENT || reader.getLocalName() == null || !reader.getLocalName().equalsIgnoreCase("foreignKey")) + throw new IllegalStateException(getPosition(reader) + " Illegal usage of TableSetParser.parseFKey(XMLStreamParser)! This function can be called only when the reader has just read the START ELEMENT tag \"foreignKey\"."); + + String tag; + ForeignKey fk = new ForeignKey(); + + // Fetch the other information (tags): + while(nextTag(reader) == XMLStreamConstants.START_ELEMENT){ + // Get the tag name: + tag = reader.getLocalName(); + + // Identify the current tag: + if (tag.equalsIgnoreCase("targetTable")){ + if (fk.targetTable != null) + throw new TAPException(getPosition(reader) + " Only one \"targetTable\" element can exist in a /tableset/schema/table/foreignKey!"); + fk.targetTable = getText(reader); + fk.targetTablePosition = getPosition(reader); + }else if (tag.equalsIgnoreCase("description")) + fk.description = getText(reader); + else if (tag.equalsIgnoreCase("utype")) + fk.utype = getText(reader); + else if (tag.equalsIgnoreCase("fkColumn")){ + String innerTag, fromCol = null, targetCol = null; + while(nextTag(reader) == XMLStreamConstants.START_ELEMENT){ + innerTag = reader.getLocalName(); + if (innerTag.equalsIgnoreCase("fromColumn")){ + if (fromCol != null) + throw new TAPException(getPosition(reader) + " Only one \"fromColumn\" element can exist in a /tableset/schema/table/foreignKey/fkColumn !"); + fromCol = getText(reader); + }else if (innerTag.equalsIgnoreCase("targetColumn")){ + if (targetCol != null) + throw new TAPException(getPosition(reader) + " Only one \"targetColumn\" element can exist in a /tableset/schema/table/foreignKey/fkColumn !"); + targetCol = getText(reader); + }else + goToEndTag(reader, reader.getLocalName()); + } + // Only two info are required: the source and the target columns! + if (fromCol == null) + throw new TAPException(getPosition(reader) + " Missing \"fromColumn\"!"); + else if (targetCol == null) + throw new TAPException(getPosition(reader) + " Missing \"targetColumn\"!"); + else + fk.keyColumns.put(fromCol, targetCol); + }else + goToEndTag(reader, tag); + } + + // Check the last read tag is the END ELEMENT of a foreignKey node: + if (reader.getEventType() != XMLStreamConstants.END_ELEMENT) + throw new TAPException(getPosition(reader) + " Unexpected tag! An END ELEMENT tag for foreignKey was expected."); + else if (!reader.getLocalName().equalsIgnoreCase("foreignKey")) + throw new TAPException(getPosition(reader) + " Unexpected node end tag: ! An END ELEMENT tag for foreignKey was expected."); + + // The target table name is required! + if (fk.targetTable == null) + throw new TAPException(getPosition(reader) + " Missing \"targetTable\"!"); + // At least one columns association is expected! + else if (fk.keyColumns.size() == 0) + throw new TAPException(getPosition(reader) + " Missing at least one \"fkColumn\"!"); + + return fk; + } + + /* ***************** */ + /* UTILITY FUNCTIONS */ + /* ***************** */ + + /** + *

    Get the current position of the given reader.

    + * + *

    + * This position is returned as a string having the following syntax: "[l.x,c.y]" + * (where x is the line number and y the column number ; x and y start at 1 ; x and y + * are both -1 if the end of the XML document has been reached). + *

    + * + *

    Note: + * The column position is generally just after the read element (node start/end tag, characters). + * However, with CHARACTERS items, this column position may be 2 characters after the real end. + *

    + * + * @param reader XML reader whose the current position must be returned. + * + * @return A string representing the current reader position. + */ + protected final String getPosition(final XMLStreamReader reader){ + return "[l." + reader.getLocation().getLineNumber() + ",c." + reader.getLocation().getColumnNumber() + "]"; + } + + /** + * Skip every elements until a START ELEMENT or an END ELEMENT is reached. + * + * @param reader XML reader. + * + * @return The event of the last read tag. Here, either {@link XMLStreamConstants#START_ELEMENT} or {@link XMLStreamConstants#END_ELEMENT}. + * + * @throws XMLStreamException If there is an error processing the underlying XML source. + */ + protected final int nextTag(final XMLStreamReader reader) throws XMLStreamException{ + int event = -1; + do{ + event = reader.next(); + }while(event != XMLStreamConstants.START_ELEMENT && event != XMLStreamConstants.END_ELEMENT); + return event; + } + + /** + *

    Skip all tags from the current position to the end of the specified node.

    + * + *

    IMPORTANT: + * This function MUST be called ONLY IF the reader is inside the node whose the end tag is searched. + * It may be in a child of this node or not, but the most important is to be inside it. + *

    + * + *

    Note: + * No tag will be read if the given startNode is NULL or an empty string. + *

    + * + * @param reader XML reader. + * @param startNode Name of the node whose the end must be reached. + * + * @throws XMLStreamException If there is an error processing the underlying XML source. + * @throws TAPException If the name of the only corresponding end element does not match the given one, + * or if the END ELEMENT can not be found (2 possible reasons for that: + * 1/ malformed XML document, 2/ this function has been called before the START ELEMENT has been read). + */ + protected final void goToEndTag(final XMLStreamReader reader, final String startNode) throws XMLStreamException, TAPException{ + if (startNode == null || startNode.trim().length() <= 0) + return; + else if (reader.getEventType() == XMLStreamConstants.END_ELEMENT && reader.getLocalName().equalsIgnoreCase(startNode)) + return; + + int level = 0, event; + while(reader.hasNext()){ + event = reader.next(); + switch(event){ + case XMLStreamConstants.START_ELEMENT: + level++; + break; + case XMLStreamConstants.END_ELEMENT: + if (level <= 0 && reader.getLocalName().equalsIgnoreCase(startNode)) // "level <= 0" because the reader may be inside a child of the node whose the end is searched. + return; + else + level--; + } + } + + /* If no matching END ELEMENT, then either the XML document is malformed + * or #goToEndTag(...) has been called before the corresponding START ELEMENT has been read: */ + throw new TAPException(getPosition(reader) + " Malformed XML document: missing an END TAG !"); + } + + /** + *

    Get the text of the current node.

    + * + *

    + * This function iterates while the next tags are of type CHARACTERS. + * Consequently, the next tag (start or end element) is already read when returning this function. + *

    + * + *

    + * All CHARACTERS elements are concatenated. + * All leading and trailing space characters (\r \n \t and ' ') of every lines are deleted ; only the last or the first \n or \r are kept. + *

    + * + *

    Note: + * This function is also skipping all COMMENT elements. This is particularly useful if a COMMENT is splitting a node text content ; + * in such case, the comment is ignored and both divided text are concatenated. + *

    + * + * @param reader XML reader. + * + * @return The whole text content of the current node. + * + * @throws XMLStreamException If there is an error processing the underlying XML source. + */ + protected final String getText(final XMLStreamReader reader) throws XMLStreamException{ + StringBuffer txt = new StringBuffer(); + while(reader.next() == XMLStreamConstants.CHARACTERS || reader.getEventType() == XMLStreamConstants.COMMENT){ + if (reader.getEventType() == XMLStreamConstants.CHARACTERS){ + if (reader.getText() != null) + txt.append(reader.getText().replaceAll("[ \\t]+([\\n\\r]+)", "$1").replaceAll("([\\n\\r]+)[ \\t]+", "$1")); + } + }; + return txt.toString().trim(); + } + + /** + *

    Search for the specified table in the given TAP metadata.

    + * + *

    Note: This function is not case sensitive.

    + * + * @param tableName Name of the table to search. The table name MAY be prefixed by a schema name (e.g. "mySchema.myTable"). + * @param meta All fetched TAP metadata. + * @param position Position of the table name in the XML document. This parameter is ONLY used in case of error. + * + * @return The corresponding TAP table. + * + * @throws TAPException If the table name syntax ([schema.]table) is incorrect, + * or if several tables match to the specified table name (which is not prefixed by a schema name), + * or if no match can be found. + */ + protected final TAPTable searchTable(final String tableName, final TAPMetadata meta, final String position) throws TAPException{ + // Extract the schema name and normalize the table name: + String schema = null, table = tableName.trim(); + if (tableName.indexOf('.') >= 0){ + // get the schema name: + schema = tableName.substring(0, tableName.indexOf('.')).trim(); + // test that the schema name is not null: + if (schema.length() == 0) + throw new TAPException(position + " Incorrect full table name - \"" + tableName + "\": empty schema name!"); + // test that the remaining table name is not null: + else if (tableName.substring(schema.length() + 1).trim().length() == 0) + throw new TAPException(position + " Incorrect full table name - \"" + tableName + "\": empty table name!"); + // test there is no more '.' separator in the remaining table name: + else if (tableName.indexOf('.', schema.length() + 1) >= 0) + throw new TAPException(position + " Incorrect full table name - \"" + tableName + "\": only a schema and a table name can be specified (expected syntax: \"schema.table\")\"!"); + // get the table name: + table = tableName.substring(schema.length() + 1).trim(); + } + + // Find all matching tables: + ArrayList founds = new ArrayList(1); + StringBuffer foundsAsTxt = new StringBuffer(); + TAPTable t; + Iterator allTables = meta.getTables(); + while(allTables.hasNext()){ + // get the table to test: + t = allTables.next(); + if (t == null) + continue; + // store it if the schema and table names match: + if ((schema == null || t.getADQLSchemaName().equalsIgnoreCase(schema)) && t.getADQLName().equalsIgnoreCase(table)){ + // update the result array: + founds.add(t); + // update the text list: + if (foundsAsTxt.length() > 0) + foundsAsTxt.append(", "); + foundsAsTxt.append(t.getADQLSchemaName()).append('.').append(t.getADQLName()); + } + } + + if (founds.size() == 0) + throw new TAPException(position + " Unknown table: \"" + tableName + "\"!"); + else if (founds.size() > 1) + throw new TAPException(position + " Unresolved table: \"" + tableName + "\"! Several tables have the same name but in different schemas (here: " + foundsAsTxt.toString() + "). You must prefix this table name by a schema name (expected syntax: \"schema.table\")."); + else + return founds.get(0); + } + +} diff --git a/src/tap/metadata/VotType.java b/src/tap/metadata/VotType.java index 27daedea467e74a23c21191131225448154eac4f..3f27046f0f3b228cff38d61485bf2ea666a83dfd 100644 --- a/src/tap/metadata/VotType.java +++ b/src/tap/metadata/VotType.java @@ -16,10 +16,14 @@ package tap.metadata; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ -import cds.savot.writer.SavotWriter; +import tap.TAPException; +import uk.ac.starlink.votable.VOSerializer; +import adql.db.DBType; +import adql.db.DBType.DBDatatype; /** *

    Describes a full VOTable type. Thus it includes the following field attributes:

    @@ -29,26 +33,188 @@ import cds.savot.writer.SavotWriter; *
  • xtype.
  • *
* - * @author Grégory Mantelet (CDS) - * @version 11/2011 + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (02/2015) */ public final class VotType { - public final String datatype; - /** A negative or null value means "*" (that's to say: an undetermined arraysize). */ - public int arraysize; + /** + * All possible values for a VOTable datatype (i.e. boolean, short, char, ...). + * + * @author Grégory Mantelet (ARI) - gmantele@ari.uni-heidelberg.de + * @version 2.0 (01/2015) + * @since 2.0 + */ + public static enum VotDatatype{ + BOOLEAN("boolean"), BIT("bit"), UNSIGNEDBYTE("unsignedByte"), SHORT("short"), INT("int"), LONG("long"), CHAR("char"), UNICODECHAR("unicodeChar"), FLOAT("float"), DOUBLE("double"), FLOATCOMPLEX("floatComplex"), DOUBLECOMPLEX("doubleComplex"); + + private final String strExpr; + + private VotDatatype(final String str){ + strExpr = (str == null || str.trim().length() == 0) ? name() : str; + } + + @Override + public String toString(){ + return strExpr; + } + } + + /** Special VOTable type (XType) for TAP/DB type BLOB. + * @since 2.0*/ + public final static String XTYPE_BLOB = "adql:BLOB"; + /** Special VOTable type (XType) for TAP/DB type CLOB. + * @since 2.0 */ + public final static String XTYPE_CLOB = "adql:CLOB"; + /** Special VOTable type (XType) for TAP/DB type TIMESTAMP. + * @since 2.0 */ + public final static String XTYPE_TIMESTAMP = "adql:TIMESTAMP"; + /** Special VOTable type (XType) for TAP/DB type POINT. + * @since 2.0 */ + public final static String XTYPE_POINT = "adql:POINT"; + /** Special VOTable type (XType) for TAP/DB type REGION. + * @since 2.0 */ + public final static String XTYPE_REGION = "adql:REGION"; + + /** VOTable datatype + * @since 2.0 */ + public final VotDatatype datatype; + /** Arraysize string of a VOTable field element. */ + public final String arraysize; + /** Special type specification (i.e. POINT, TIMESTAMP, ...). */ public final String xtype; /** - * @param datatype A datatype (ex: char, int, long, ...). Null value forbidden - * @param arraysize A non-null positive integer. (any value ≤ 0 will be considered as an undetermined arraysize). - * @param xtype A special type (ex: adql:POINT, adql:TIMESTAMP, ...). Null value allowed. + * Build a VOTable field type. + * + * @param datatype A datatype. Null value forbidden + * @param arraysize VOTable arraysize string (may be NULL). + */ + public VotType(final VotDatatype datatype, final String arraysize){ + this(datatype, arraysize, null); + } + + /** + * Build a VOTable field type. + * + * @param datatype A datatype. Null value forbidden + * @param arraysize VOTable arraysize string (may be NULL). + * @param xtype A special type (ex: adql:POINT, adql:TIMESTAMP, ...). (may be NULL). */ - public VotType(final String datatype, final int arraysize, final String xtype){ + public VotType(final VotDatatype datatype, final String arraysize, final String xtype){ + // set the datatype: if (datatype == null) - throw new NullPointerException("Null VOTable datatype !"); - this.datatype = datatype; - this.arraysize = arraysize; - this.xtype = xtype; + throw new NullPointerException("missing VOTable datatype !"); + else + this.datatype = datatype; + + // set the array-size: + if (arraysize != null && arraysize.trim().length() > 0) + this.arraysize = arraysize.trim(); + else + this.arraysize = null; + + // set the xtype: + if (xtype != null && xtype.trim().length() > 0) + this.xtype = xtype.trim(); + else + this.xtype = null; + } + + /** + * Build a {@link VotType} object by converting the given {@link DBType}. + * + * @param tapType {@link DBType} to convert. + */ + public VotType(final DBType tapType){ + switch(tapType.type){ + case SMALLINT: + this.datatype = VotDatatype.SHORT; + this.arraysize = "1"; + this.xtype = null; + break; + + case INTEGER: + this.datatype = VotDatatype.INT; + this.arraysize = "1"; + this.xtype = null; + break; + + case BIGINT: + this.datatype = VotDatatype.LONG; + this.arraysize = "1"; + this.xtype = null; + break; + + case REAL: + this.datatype = VotDatatype.FLOAT; + this.arraysize = "1"; + this.xtype = null; + break; + + case DOUBLE: + this.datatype = VotDatatype.DOUBLE; + this.arraysize = "1"; + this.xtype = null; + break; + + case CHAR: + this.datatype = VotDatatype.CHAR; + this.arraysize = Integer.toString(tapType.length > 0 ? tapType.length : 1); + this.xtype = null; + break; + + case BINARY: + this.datatype = VotDatatype.UNSIGNEDBYTE; + this.arraysize = Integer.toString(tapType.length > 0 ? tapType.length : 1); + this.xtype = null; + break; + + case VARBINARY: + /* TODO HOW TO MANAGE VALUES WHICH WHERE ORIGINALLY NUMERIC ARRAYS ? + * (cf the IVOA document TAP#Upload: votable numeric arrays should be converted into VARBINARY...no more array information and particularly the datatype) + */ + this.datatype = VotDatatype.UNSIGNEDBYTE; + this.arraysize = (tapType.length > 0 ? tapType.length + "*" : "*"); + this.xtype = null; + break; + + case BLOB: + this.datatype = VotDatatype.UNSIGNEDBYTE; + this.arraysize = "*"; + this.xtype = VotType.XTYPE_BLOB; + break; + + case CLOB: + this.datatype = VotDatatype.CHAR; + this.arraysize = "*"; + this.xtype = VotType.XTYPE_CLOB; + break; + + case TIMESTAMP: + this.datatype = VotDatatype.CHAR; + this.arraysize = "*"; + this.xtype = VotType.XTYPE_TIMESTAMP; + break; + + case POINT: + this.datatype = VotDatatype.CHAR; + this.arraysize = "*"; + this.xtype = VotType.XTYPE_POINT; + break; + + case REGION: + this.datatype = VotDatatype.CHAR; + this.arraysize = "*"; + this.xtype = VotType.XTYPE_REGION; + break; + + case VARCHAR: + default: + this.datatype = VotDatatype.CHAR; + this.arraysize = (tapType.length > 0 ? tapType.length + "*" : "*"); + this.xtype = null; + break; + } } @Override @@ -56,13 +222,7 @@ public final class VotType { if (obj == null) return false; try{ - VotType vot = (VotType)obj; - if (datatype.equalsIgnoreCase(vot.datatype)){ - if (xtype == null) - return (vot.xtype == null); - else - return xtype.equalsIgnoreCase(vot.xtype); - } + return toString().equals(obj); }catch(ClassCastException cce){ ; } @@ -71,23 +231,147 @@ public final class VotType { @Override public int hashCode(){ - return datatype.toLowerCase().hashCode(); + return datatype.toString().hashCode(); } @Override public String toString(){ - StringBuffer str = new StringBuffer("datatype=\""); - str.append(datatype).append('"'); + StringBuffer str = new StringBuffer(VOSerializer.formatAttribute("datatype", datatype.toString())); + str.deleteCharAt(0); - if (arraysize == TAPTypes.STAR_SIZE) - str.append(" arraysize=\"*\""); - else if (arraysize != TAPTypes.NO_SIZE && arraysize > 0) - str.append(" arraysize=\"").append(SavotWriter.encodeAttribute("" + arraysize)).append('"'); + if (arraysize != null) + str.append(VOSerializer.formatAttribute("arraysize", arraysize)); if (xtype != null) - str.append(" xtype=\"").append(SavotWriter.encodeAttribute(xtype)).append('"'); + str.append(VOSerializer.formatAttribute("xtype", xtype)); return str.toString(); } + /** + * Convert this VOTable type definition into a TAPColumn type. + * + * @return The corresponding {@link DBType}. + * + * @throws TAPException If the conversion is impossible (particularly if the array-size refers to a multi-dimensional array ; only 1D arrays are allowed). + */ + public DBType toTAPType() throws TAPException{ + + /* Stop immediately if the arraysize refers to a multi-dimensional array: + * (Note: 'x' is the dimension separator of the VOTable attribute 'arraysize') */ + if (arraysize != null && arraysize.indexOf('x') >= 0) + throw new TAPException("failed conversion of a VOTable datatype: multi-dimensional arrays (" + datatype + "[" + arraysize + "]) are not allowed!"); + + // Convert the VOTable datatype into TAP datatype: + switch(datatype){ + /* NUMERIC TYPES */ + case SHORT: + case BOOLEAN: + return convertNumericType(DBDatatype.SMALLINT); + + case INT: + return convertNumericType(DBDatatype.INTEGER); + + case LONG: + return convertNumericType(DBDatatype.BIGINT); + + case FLOAT: + return convertNumericType(DBDatatype.REAL); + + case DOUBLE: + return convertNumericType(DBDatatype.DOUBLE); + + /* BINARY TYPES */ + case UNSIGNEDBYTE: + // BLOB exception: + if (xtype != null && xtype.equalsIgnoreCase(XTYPE_BLOB)) + return new DBType(DBDatatype.BLOB); + + // Or else, just (var)binary: + else + return convertVariableLengthType(DBDatatype.VARBINARY, DBDatatype.BINARY); + + /* CHARACTER TYPES */ + case CHAR: + default: + /* Special type cases: */ + if (xtype != null){ + if (xtype.equalsIgnoreCase(VotType.XTYPE_CLOB)) + return new DBType(DBDatatype.CLOB); + else if (xtype.equalsIgnoreCase(VotType.XTYPE_TIMESTAMP)) + return new DBType(DBDatatype.TIMESTAMP); + else if (xtype.equalsIgnoreCase(VotType.XTYPE_POINT)) + return new DBType(DBDatatype.POINT); + else if (xtype.equalsIgnoreCase(VotType.XTYPE_REGION)) + return new DBType(DBDatatype.REGION); + } + + // Or if not known or missing, just a (var)char: + return convertVariableLengthType(DBDatatype.VARCHAR, DBDatatype.CHAR); + } + } + + /** + *

Convert this numeric {@link VotType} object into a corresponding {@link DBType} whose the datatype is provided in parameter.

+ * + *

+ * Thus, just the arraysize must be managed here. If there is no arraysize or if equals to '1', the given datatype will be used. + * Otherwise, it is ignored and a {@link DBType} with VARBINARY is returned. + *

+ * + * @param tapDatatype TAP datatype corresponding to this {@link VotType} (only when arraysize != '*' and 'n'). + * + * @return The corresponding {@link DBType}. + */ + protected DBType convertNumericType(final DBDatatype tapDatatype){ + // If no arraysize: + if (arraysize == null || arraysize.equals("1")) + return new DBType(tapDatatype); + + // If only one dimension: + else + return new DBType(DBDatatype.VARBINARY); + + /* Note: The test of multi-dimensional array should have been already done at the beginning of #toTAPType(). */ + } + + /** + *

+ * Convert this variable length {@link VotType} (unsignedByte and char) object into a corresponding {@link DBType} + * whose the variable length and fixed length versions are given in parameters. + *

+ * + *

Thus, just the arraysize must be managed here. The following cases are taken into account:

+ *
    + *
  • No arraysize or '*': variable length type (i.e. VARCHAR, VARBINARY),
  • + *
  • 'n*': variable length type with the maximal length (i.e. VARCHAR(n), VARBINARY(n)),
  • + *
  • 'n': fixed length type with the exact length (i.e. CHAR(n), BINARY(n)).
  • + *
+ * + * @param varType Variable length type (i.e. VARCHAR, VARBINARY). + * @param fixedType Fixed length type (i.e. CHAR, BINARY). + * + * @return The corresponding {@link DBType}. + * + * @throws TAPException If the arraysize is not valid (that's to say, different from the following syntaxes: NULL, '*', 'n' or 'n*' (where n is a positive and not-null integer)). + */ + protected DBType convertVariableLengthType(final DBDatatype varType, final DBDatatype fixedType) throws TAPException{ + try{ + // no arraysize or '*' => VARCHAR or VARBINARY + if (arraysize == null || arraysize.equals("*")) + return new DBType(varType); + + // 'n*' => VARCHAR(n) or VARBINARY(n) + else if (arraysize.charAt(arraysize.length() - 1) == '*') + return new DBType(varType, Integer.parseInt(arraysize.substring(0, arraysize.length() - 1))); + + // 'n' => CHAR(n) or BINARY(n) + else + return new DBType(fixedType, Integer.parseInt(arraysize)); + + }catch(NumberFormatException nfe){ + throw new TAPException("failed conversion of a VOTable datatype: non-numeric arraysize (" + arraysize + ")!"); + } + } + } diff --git a/src/tap/parameters/DALIUpload.java b/src/tap/parameters/DALIUpload.java new file mode 100644 index 0000000000000000000000000000000000000000..fdf001db3881e2a1a6fa6a18c8fa624627654306 --- /dev/null +++ b/src/tap/parameters/DALIUpload.java @@ -0,0 +1,601 @@ +package tap.parameters; + +/* + * This file is part of TAPLibrary. + * + * TAPLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * TAPLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with TAPLibrary. If not, see . + * + * Copyright 2014 - Astronomisches Rechen Institut (ARI) + */ + +import java.io.IOException; +import java.io.InputStream; +import java.io.UnsupportedEncodingException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URLDecoder; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import tap.TAPException; +import tap.TAPJob; +import uws.UWSException; +import uws.service.file.UWSFileManager; +import uws.service.file.UnsupportedURIProtocolException; +import uws.service.request.RequestParser; +import uws.service.request.UploadFile; + +/** + *

Description of an uploaded content specified using the DALI/TAP syntax.

+ * + *

How to access the upload content?

+ * + *

+ * This parameter is either a reference to a distant content and is then specified by a URI, + * or a pointer to the stored version of a file submitted inline in a HTTP request. In both cases, + * this class lets access the upload content with the function {@link #open()}. + *

+ * + *

How to get {@link DALIUpload} objects from HTTP request parameters?

+ * + *

+ * The static function {@link #getDALIUploads(Map, boolean, UWSFileManager)} should be used in order to + * extract the {@link DALIUpload} items specified in a list of request parameters. + *

+ *

Note: + * It is recommended to provide these parameters as a map generated by a {@link RequestParser}. + * If not, you should ensure that values of the map associated to the "UPLOAD" parameter(s) are {@link String}s, {@link String}[]s, + * {@link DALIUpload}s, {@link DALIUpload}[]s or {@link Object}[] containing {@link String}s and/or {@link DALIUpload}s. + * Besides, the request parameters referenced using the syntax "param:{param-name}" must be instances of only {@link UploadFile} + * or an array of {@link Object}s containing at least one {@link UploadFile} instance (if several are found, just the last one will be used). + *

+ *

+ * Calling this function will also modify a little the given list of parameters by rewriting the "UPLOAD" parameter and + * removing unreferenced uploaded files (from the list and from the file-system). + *

+ * + *

Reminder about the "UPLOAD" parameter

+ * + *

+ * The IVOA standards DAL and TAP define both the same special parameter: "UPLOAD" (not case-sensitive). + *

+ * + *

+ * This parameter lists all upload items. A such item can be either an inline file or a reference to a distant file. + * In both cases, it is specified as a URI. The parameter "UPLOAD" sets also a label/name to this item. + * The syntax to use for a single item is the following: "{label},{URI}". Several items can be provided, but there is + * a slight difference between DALI and TAP in the way to do it. DALI says that multiple uploads MUST be done + * by several submit of a single "UPLOAD" parameter with the syntax described above. TAP says that multiple uploads CAN + * be done in one "UPLOAD" parameter by separating each item with a semicolon (;). For instance: + *

+ *
    + *
  • In TAP: "UPLOAD=tableA,param:foo;tableB,http://..." => only 1 parameter for 2 uploads
  • + *
  • In DALI: "UPLOAD=tableA,param:foo" and "UPLOAD=tableB,http://..." => 2 parameters, one for each upload
  • + *
+ * + *

Note: + * The drawback of the TAP method is: what happens when a URI contains a semicolon? URI can indeed contain a such character + * and in this case the parsing becomes more tricky, or even impossible in some cases. In such cases, it is strongly + * recommended to either encode the URI (so the ";" becomes "%3B") or to forbid the TAP syntax. This latter can be + * done by setting the second parameter of {@link #getDALIUploads(Map, boolean, UWSFileManager)} to false. + *

+ * + * @author Grégory Mantelet (ARI) + * @version 2.0 (12/2014) + * @since 2.0 + * + * @see RequestParser + */ +public class DALIUpload { + + /**

Pointer to the stored version of the file submitted inline in a HTTP request.

+ *

Note: + * If NULL, this {@link DALIUpload} is then a "byReference" upload, meaning that its content is distant + * and can be accessed only with the URI {@link #uri}. + *

*/ + public final UploadFile file; + + /**

URI toward a distant resource.

+ *

Note: + * If NULL, this {@link DALIUpload} corresponds to a file submitted inline in a HTTP request. + * Its content has then been stored by this service and can be accessed using the pointer {@link #file}. + *

*/ + public final URI uri; + + /**

Name to use in the service to label this upload.

+ *

Note: + * In a TAP service, this label is the name of the table to create in the database + * when creating the corresponding table inside it. + *

*/ + public final String label; + + /** The file manager to use when a stream will be opened toward the given URI. + * It should know how to access it, because the URI can use a URL scheme (http, https, ftp) but also another scheme + * unknown by the library (e.g. ivo, vos). */ + protected final UWSFileManager fileManager; + + /** + *

Build a {@link DALIUpload} whose the content has been submitted inline in an HTTP request.

+ * + *

+ * A such upload has been specified by referencing another HTTP request parameter containing an inline file. + * The used syntax was then: "{label},param:{param-name}". + *

+ * + * @param label Label of the DALIUpload (i.e. {label} inside an "UPLOAD" parameter value "{label},{URI}"). + * Note: If NULL, the file name will be used as label. + * @param file Pointer to the uploaded file. + */ + public DALIUpload(final String label, final UploadFile file){ + if (file == null) + throw new NullPointerException("Missing UploadFile! => Can not build a DaliUpload instance."); + + this.label = (label == null) ? file.paramName : label; + this.file = file; + this.uri = null; + this.fileManager = null; + } + + /** + *

Build a {@link DALIUpload} whose the content is distant and specified by a URI.

+ * + *

+ * A such upload has been specified by referencing a URI (whose the scheme is different from "param"). + * The used syntax was then: "{label},{URI}". + *

+ * + * @param label Label of the DALIUpload (i.e. {label} inside an "UPLOAD" parameter value "{label},{URI}"). Note: If NULL, the URI will be used as label. + * @param uri URI toward a distant file. The scheme of this URI must be different from "param". This scheme is indeed reserved by the DALI syntax to reference a HTTP request parameter containing an inline file. + * @param fileManager The file manager to use when a stream will be opened toward the given URI. This file manager should know how to access it, + * because the URI can use a URL scheme (http, https, ftp) but also another scheme unknown by the library (e.g. ivo, vos). + */ + public DALIUpload(final String label, final URI uri, final UWSFileManager fileManager){ + if (uri == null) + throw new NullPointerException("Missing URI! => Can not build a DaliUpload instance."); + else if (uri.getScheme() != null && uri.getScheme().equalsIgnoreCase("param")) + throw new IllegalArgumentException("Wrong URI scheme: \"param\" is reserved to reference a HTTP request parameter! If used, the content of this parameter must be stored in a file, then the parameter must be represented by an UploadFile and integrated into a DALIUpload with the other constructor."); + else if (uri.getScheme() != null && uri.getScheme().equalsIgnoreCase("file")) + throw new IllegalArgumentException("Wrong URI scheme: \"file\" is forbidden!"); + else if (fileManager == null) + throw new NullPointerException("Missing File Manager! => Can not build a DaliUpload instance."); + + this.label = (label == null) ? uri.toString() : label; + this.uri = uri; + this.file = null; + this.fileManager = fileManager; + } + + /** + * Tell whether this upload is actually a reference toward a distant resource. + * + * @return true if this upload is referenced by a URI, + * false if the upload has been submitted inline in the HTTP request. + */ + public boolean isByReference(){ + return (file == null); + } + + /** + * Open a stream to the content of this upload. + * + * @return An InputStream. + * + * @throws UnsupportedURIProtocolException If the URI of this upload item is using a protocol not supported by this service implementation. + * @throws IOException If the stream can not be opened. + */ + public InputStream open() throws UnsupportedURIProtocolException, IOException{ + if (file == null) + return fileManager.openURI(uri); + else + return file.open(); + } + + @Override + public String toString(){ + return label + "," + (file != null ? "param:" + file.paramName : uri.toString()); + } + + /* ****************************** */ + /* EXTRACTION OF DALI/TAP UPLOADS */ + /* ****************************** */ + + /**

Regular expression of an UPLOAD parameter as defined by DALI (REC-DALI-1.0-20131129).

+ *

Note: + * In DALI, multiple uploads must be done by posting several UPLOAD parameters. + * It is not possible to provide directly a list of parameters as in TAP. + * However, the advantage of the DALI method is to allow ; in URI (while ; is the + * parameter separator in TAP). + *

*/ + protected static final String DALI_UPLOAD_REGEXP = "[^,]+,\\s*(param:.+|.+)"; + + /**

Regular expression of an UPLOAD parameter as defined by TAP (REC-TAP-1.0).

+ *

Note: + * In TAP, multiple uploads may be done by POSTing only one UPLOAD parameter + * whose the value is a list of DALI UPLOAD parameters, separated by a ; + *

*/ + protected static final String TAP_UPLOAD_REGEXP = DALI_UPLOAD_REGEXP + "(\\s*;\\s*" + DALI_UPLOAD_REGEXP + ")*"; + + /** + *

Get all uploads specified in the DALI parameter "UPLOAD" from the given request parameters.

+ * + *

Note: + * This function is case INsensitive for the "UPLOAD" parameter. + *

+ *

WARNING: + * Calling this function modifies the given map ONLY IF the "UPLOAD" parameter (whatever is its case) is found. + * In such case, the following modifications are applied: + *

+ *
    + *
  • + * All "UPLOAD" parameters will be removed and then added again in the map with their corresponding {@link DALIUpload} item (not any more a String). + *
  • + *
  • + * If allowTAPSyntax is true, several uploads may be specified in the same "UPLOAD" parameter value. + * For more clarity for the user (once the parameters listed), this list of uploads will be split in the same number of "UPLOAD" parameters. + * That's to say, there will be only one "UPLOAD" item in the Map, but its value will be an array containing every specified uploads: + * an array of {@link DALIUpload} objects. + *
  • + *
  • + * If there is at least one "UPLOAD" parameter, all uploaded files (parameters associated with instances of {@link UploadFile}) will be removed + * from the map (and also from the file system). They are indeed not useful for a DALI service since all interesting uploads have already been + * listed. + *
  • + *
+ * + *

Note: + * This function can be called several times on the same map. After a first call, this function will just gathers into a List + * all found {@link DALIUpload} objects. Of course, only uploads specified in the "UPLOAD" parameter(s) will be returned and others will be removed + * as explained above. + *

+ * + *

DALI and TAP syntax

+ *

+ * The "UPLOAD" parameter lists all files to consider as uploaded. + * The syntax for one item is the following: "{name},{uri}", where {uri} is "param:{param-ref}" when the file is provided + * inline in the parameter named {param-ref}, otherwise, it can be any valid URI (http:..., ftp:..., vos:..., ivo:..., etc...). + *

+ * + *

+ * The parameter allowTAPSyntax lets switch between the DALI and TAP syntax. + * The only difference between them, is in the way to list multiple uploads. In TAP, they can be given as a semicolon separated + * list in a single parameter, whereas in DALI, there must be submitted as several individual parameters. For instance: + *

+ *
    + *
  • In TAP: "UPLOAD=tableA,param:foo;tableB,http://..." => only 1 parameter
  • + *
  • In DALI: "UPLOAD=tableA,param:foo" and "UPLOAD=tableB,http://..." => 2 parameters
  • + *
+ * + *

Note: + * Because of the possible presence of a semicolon in a URI (which is also used as separator of uploads in the TAP syntax), + * there could be a problem while splitting the uploads specified in "UPLOAD". In that case, it is strongly recommended to + * either encode the URI (in UTF-8) (i.e. ";" becomes "%3B") or to merely restrict the syntax to the DALI one. In this last case, + * the parameter "allowTAPSyntax" should be set to false and then all parameters should be submitted individually. + *

+ * + * @param requestParams All parameters extracted from an HTTP request by a {@link RequestParser}. + * @param allowTAPSyntax true to allow a list of several upload items in one "UPLOAD" parameter value (each item separated by a semicolon), + * false to forbid it (and so, multiple upload items shall be submitted individually). + * @param fileManager The file manager to use in order to build a {@link DALIUpload} objects from a URI. + * (a link to the file manager will be set in the {@link DALIUpload} object in order to open it + * whenever it will asked after its creation) + * + * @return List of all uploads specified with the DALI or TAP syntax. + * + * @throws TAPException If the syntax of an "UPLOAD" parameter is wrong. + * + * @see RequestParser#parse(javax.servlet.http.HttpServletRequest) + */ + public final static List getDALIUploads(final Map requestParams, final boolean allowTAPSyntax, final UWSFileManager fileManager) throws TAPException{ + + // 1. Get all "UPLOAD" parameters and build/get their corresponding DALIUpload(s): + ArrayList uploads = new ArrayList(3); + ArrayList usedFiles = new ArrayList(3); + Iterator> it = requestParams.entrySet().iterator(); + Map.Entry entry; + Object value; + while(it.hasNext()){ + entry = it.next(); + + // If the parameter is an "UPLOAD" one: + if (entry.getKey() != null && entry.getKey().toLowerCase().equals(TAPJob.PARAM_UPLOAD)){ + // get its value: + value = entry.getValue(); + + if (value != null){ + // CASE DALIUpload: just add the upload item inside the list: + if (value instanceof DALIUpload){ + DALIUpload upl = (DALIUpload)value; + uploads.add(upl); + if (!upl.isByReference()) + usedFiles.add(upl.file.paramName); + } + // CASE String: it must be parsed and transformed into a DALIUpload item which will be then added inside the list: + else if (value instanceof String) + fetchDALIUploads(uploads, usedFiles, (String)value, requestParams, allowTAPSyntax, fileManager); + + // CASE Array: + else if (value.getClass().isArray()){ + Object[] objects = (Object[])value; + for(Object o : objects){ + if (o != null){ + if (o instanceof DALIUpload) + uploads.add((DALIUpload)o); + else if (o instanceof String) + fetchDALIUploads(uploads, usedFiles, (String)o, requestParams, allowTAPSyntax, fileManager); + } + } + } + } + + // remove this "UPLOAD" parameter ; if it was not NULL, it will be added again in the map but as DALIUpload item(s) after this loop: + it.remove(); + } + } + + // 2. Remove all other files of the request parameters ONLY IF there was a not-NULL "UPLOAD" parameter: + if (uploads.size() > 0){ + it = requestParams.entrySet().iterator(); + while(it.hasNext()){ + entry = it.next(); + value = entry.getValue(); + if (value == null) + it.remove(); + else if (value instanceof UploadFile && !usedFiles.contains(entry.getKey())){ + try{ + ((UploadFile)value).deleteFile(); + }catch(IOException ioe){} + it.remove(); + }else if (value.getClass().isArray()){ + Object[] objects = (Object[])value; + int cnt = objects.length; + for(int i = 0; i < objects.length; i++){ + if (objects[i] == null){ + objects[i] = null; + cnt--; + }else if (objects[i] instanceof UploadFile && !usedFiles.contains(entry.getKey())){ + try{ + ((UploadFile)objects[i]).deleteFile(); + }catch(IOException ioe){} + objects[i] = null; + cnt--; + } + } + if (cnt == 0) + it.remove(); + } + } + } + + // 3. Re-add a new "UPLOAD" parameter gathering all extracted DALI Uploads: + if (uploads.size() > 0) + requestParams.put("UPLOAD", uploads.toArray(new DALIUpload[uploads.size()])); + + return uploads; + } + + /** + *

Fetch all uploads specified in the DALI/TAP "UPLOAD" parameter. + * The fetched {@link DALIUpload}s are added in the given {@link ArrayList}.

+ * + *

Note: A DALI upload can be either a URI or an inline file (specified as "param:{param-ref}").

+ * + * @param uploads List of {@link DALIUpload}s. to update. + * @param usedFiles List of the the names of the referenced file parameters. to update. + * @param uploadParam Value of the "UPLOAD" parameter. + * @param parameters List of all extracted parameters (including {@link UploadFile}(s)). + * @param allowTAPSyntax true to allow a list of several upload items in one "UPLOAD" parameter value (each item separated by a semicolon), + * false to forbid it (and so, multiple upload items shall be submitted individually). + * @param fileManager The file manager to use in order to build a {@link DALIUpload} objects from a URI. + * (a link to the file manager will be set in the {@link DALIUpload} object in order to open it + * whenever it will asked after its creation) + * + * @throws TAPException If the syntax of the given "UPLOAD" parameter is incorrect. + */ + protected static void fetchDALIUploads(final ArrayList uploads, final ArrayList usedFiles, String uploadParam, final Map parameters, final boolean allowTAPSyntax, final UWSFileManager fileManager) throws TAPException{ + if (uploadParam == null || uploadParam.trim().length() <= 0) + return; + + // TAP SYNTAX (list of DALI UPLOAD items, separated by a semicolon): + if (allowTAPSyntax && uploadParam.matches("([^,]+,.+);([^,]+,.+)")){ + Pattern p = Pattern.compile("([^,]+,.+);([^,]+,.+)"); + Matcher m = p.matcher(uploadParam); + while(m != null && m.matches()){ + // Fetch the last UPLOAD item: + DALIUpload upl = fetchDALIUpload(m.group(2), parameters, fileManager); + uploads.add(upl); + if (!upl.isByReference()) + usedFiles.add(upl.file.paramName); + + // Prepare the fetching of the other DALI parameters: + if (m.group(1) != null) + m = p.matcher(uploadParam = m.group(1)); + } + } + + // DALI SYNTAX (only one UPLOAD item): + if (uploadParam.matches("[^,]+,.+")){ + // Fetch the single UPLOAD item: + DALIUpload upl = fetchDALIUpload(uploadParam, parameters, fileManager); + uploads.add(upl); + if (!upl.isByReference()) + usedFiles.add(upl.file.paramName); + } + + // /!\ INCORRECT SYNTAX /!\ + else + throw new TAPException("Wrong DALI syntax for the parameter UPLOAD \"" + uploadParam + "\"!", UWSException.BAD_REQUEST); + } + + /** + * Fetch the single upload item (a pair with the syntax: "{label},{URI}". + * + * @param uploadParam Value of the "UPLOAD" parameter. A single upload item is expected ; that's to say something like "{label},{URI}". + * @param parameters List of extracted parameters. The fetched LOB must be added as a new parameter in this map. MUST not be NULL + * @param fileManager The file manager to use in order to build a {@link DALIUpload} objects from a URI. + * (a link to the file manager will be set in the {@link DALIUpload} object in order to open it + * whenever it will asked after its creation) + * + * @return The corresponding {@link DALIUpload} object. + * + * @throws TAPException If the syntax of the given "UPLOAD" parameter is incorrect. + * + * @see #parseDALIParam(String) + * @see #buildDALIUpload(String, String, Map, UWSFileManager) + */ + protected static DALIUpload fetchDALIUpload(final String uploadParam, final Map parameters, final UWSFileManager fileManager) throws TAPException{ + if (uploadParam.matches("[^,]+,.+")){ + // Check and extract the pair parts ([0]=label, [1]=URI): + String[] parts = parseDALIParam(uploadParam); + + // Build the corresponding DALIUpload: + return buildDALIUpload(parts[0], parts[1], parameters, fileManager); + }else + throw new TAPException("Wrong DALI syntax for the parameter UPLOAD \"" + uploadParam + "\"!", UWSException.BAD_REQUEST); + } + + /** + *

Extract the two parts (label and URI) of the given DALI parameter, and then, check their syntax.

+ * + *

Important note: + * It MUST be ensured before calling this function that the given DALI parameter is not NULL + * and contains at least one comma (,). + *

+ * + *

+ * The first comma found in the given string will be the separator of the two parts + * of the given DALI parameter: {label},{URI} + *

+ * + *

+ * The label part - {label} - must start with one letter and may be followed by a letter, + * a digit or an underscore. The corresponding regular expression is: [a-zA-Z][a-zA-Z0-9_]* + *

+ * + *

+ * The URI part - {URI} - must start with a scheme, followed by a colon (:) and then by several characters + * (no restriction). A scheme must start with one letter and may be followed by a letter, + * a digit, a plus (+), a dot (.) or an hyphen/minus (-). The corresponding regular expression is: + * [a-zA-Z][a-zA-Z0-9\+\.-]* + *

+ * + * @param definition MUST BE A PAIR label,value + * + * @return An array of exactly 2 items: [0]=upload label/name, [1]=an URI. (note: the special DALI syntax "param:..." is also a valid URI) + * + * @throws TAPException If the given upload definition is not following the valid DALI syntax. + */ + protected static String[] parseDALIParam(final String definition) throws TAPException{ + // Locate the separator: + int sep = definition.indexOf(','); + if (sep <= 0) + throw new TAPException("A DALI parameter must be a pair whose the items are separated by a colon!", UWSException.INTERNAL_SERVER_ERROR); + + // Extract the two parts: {label},{uri} + String[] parts = new String[]{definition.substring(0, sep),definition.substring(sep + 1)}; + + // Check the label: + if (!parts[0].matches("[a-zA-Z][a-zA-Z0-9_]*")) + throw new TAPException("Wrong uploaded item name syntax: \"" + parts[0] + "\"! An uploaded item must have a label respecting the 'regular_identifier' production of ADQL 2.0 (regular expression: [a-zA-Z][a-zA-Z0-9_]*).", UWSException.BAD_REQUEST); + // Check the URI: + else if (!parts[1].matches("[a-zA-Z][a-zA-Z0-9\\+\\.\\-]*:.+")) + throw new TAPException("Bad URI syntax: \"" + parts[1] + "\"! A URI must start with: \":\", where =\"[a-zA-Z][a-zA-Z0-9+.-]*\".", UWSException.BAD_REQUEST); + + return parts; + } + + /** + *

Build a {@link DALIUpload} corresponding to the specified URI.

+ * + *

+ * If the URI starts, case-insensitively, with "param:", it is then a reference to another request parameter containing a file content. + * In this case, the file content has been already stored inside a local file and represented by an {@link UploadFile} instance in the map. + *

+ * + *

+ * If the URI does not start with "param:", the DALI upload is considered as a reference to a distant file which can be accessed using this URI. + * Any URI scheme is allowed here, but the given file manager should be able to interpret it and open a stream toward the referenced resource + * whenever it will be asked. + *

+ * + *

Note: + * If the URI is not a parameter reference (i.e. started by "param:"), it will be decoded using {@link URLDecoder#decode(String, String)} + * (character encoding: UTF-8). + *

+ * + * @param label Label of the {@link DALIUpload} to build. + * @param uri URI of the LOB. MUST be NOT-NULL + * @param parameters All parameters extracted from an HTTP request by a {@link RequestParser}. + * @param fileManager The file manager to use in order to build a {@link DALIUpload} objects from a URI. + * (a link to the file manager will be set in the {@link DALIUpload} object in order to open it + * whenever it will asked after its creation) + * + * @return The corresponding {@link DALIUpload} object. + * + * @throws TAPException If the parameter reference is broken or if the given URI has a wrong syntax. + */ + protected final static DALIUpload buildDALIUpload(final String label, String uri, final Map parameters, final UWSFileManager fileManager) throws TAPException{ + // FILE case: + if (uri.toLowerCase().startsWith("param:")){ + + // get the specified parameter name: + uri = uri.substring(6); + + // get the corresponding file: + Object obj = parameters.get(uri); + + /* a map value can be an array of objects in case several parameters have the same name ; + * in this case, we just keep the last instance of UploadFile: */ + if (obj != null && obj.getClass().isArray()){ + Object[] objects = (Object[])obj; + obj = null; + for(Object o : objects){ + if (o != null && o instanceof UploadFile) + obj = o; + } + } + + // ensure the type of the retrieved parameter is correct: + if (obj == null) + throw new TAPException("Missing file parameter to upload: \"" + uri + "\"!", UWSException.BAD_REQUEST); + else if (!(obj instanceof UploadFile)) + throw new TAPException("Incorrect parameter type \"" + uri + "\": a file was expected!", UWSException.BAD_REQUEST); + + // build the LOB: + return new DALIUpload(label, (UploadFile)obj); + } + + // URI case: + else{ + // extract the URI as it is given: + uri = uri.trim(); + if (uri.toLowerCase().startsWith("file:")) + throw new TAPException("Wrong URI scheme in the upload specification labeled \"" + label + "\": \"file\" is forbidden!", UWSException.BAD_REQUEST); + // decode it in case there is any illegal character: + try{ + uri = URLDecoder.decode(uri, "UTF-8"); + }catch(UnsupportedEncodingException uee){} + try{ + // build the LOB: + return new DALIUpload(label, new URI(uri), fileManager); + }catch(URISyntaxException e){ + throw new TAPException("Incorrect URI syntax: \"" + uri + "\"!", UWSException.BAD_REQUEST); + } + } + } + +} diff --git a/src/tap/parameters/FormatController.java b/src/tap/parameters/FormatController.java index 46f2d2fea72773f756ff2bee96075036eccb7060..0092251fd3f4c03d817708a0c11aea2fa2dff0bb 100644 --- a/src/tap/parameters/FormatController.java +++ b/src/tap/parameters/FormatController.java @@ -16,7 +16,8 @@ package tap.parameters; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.util.Iterator; @@ -25,15 +26,33 @@ import tap.ServiceConnection; import tap.TAPJob; import tap.formatter.OutputFormat; import uws.UWSException; -import uws.UWSExceptionFactory; import uws.job.parameters.InputParamController; -public class FormatController< R > implements InputParamController { +/** + *

Let controlling the format of all job result in a TAP service. + * The default values are provided by the service connection.

+ * + *

Note: + * By default, the format can be modified by anyone without any limitation. + *

+ * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (09/2014) + */ +public class FormatController implements InputParamController { + + /** Connection to the service which knows the maximum and default value of this parameter. */ + protected final ServiceConnection service; - protected final ServiceConnection service; + /** Indicates whether the output limit of jobs can be modified. */ protected boolean allowModification = true; - public FormatController(final ServiceConnection service){ + /** + * Build a controller for the Format parameter. + * + * @param service Connection to the TAP service. + */ + public FormatController(final ServiceConnection service){ this.service = service; } @@ -42,6 +61,11 @@ public class FormatController< R > implements InputParamController { return allowModification; } + /** + * Lets indicating whether the format parameter can be modified. + * + * @param allowModif true if the format can be modified, false otherwise. + */ public final void allowModification(final boolean allowModif){ this.allowModification = allowModif; } @@ -54,7 +78,7 @@ public class FormatController< R > implements InputParamController { @Override public Object check(Object format) throws UWSException{ if (format == null) - return null; + return getDefault(); if (format instanceof String){ String strFormat = ((String)format).trim(); @@ -62,18 +86,24 @@ public class FormatController< R > implements InputParamController { return getDefault(); if (service.getOutputFormat(strFormat) == null) - throw new UWSException(UWSException.BAD_REQUEST, "Unknown output format (=" + strFormat + ") ! This TAP service can format query results ONLY in the following formats:" + getAllowedFormats() + "."); + throw new UWSException(UWSException.BAD_REQUEST, "Unknown value for the parameter \"format\": \"" + strFormat + "\". It should be " + getAllowedFormats()); else return strFormat; }else - throw UWSExceptionFactory.badFormat(null, TAPJob.PARAM_FORMAT, format.toString(), format.getClass().getName(), "A String equals to one of the following values: " + getAllowedFormats() + "."); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Wrong type for the parameter \"format\": class \"" + format.getClass().getName() + "\"! It should be a String."); } - public final String getAllowedFormats(){ - Iterator> itFormats = service.getOutputFormats(); - StringBuffer allowedFormats = new StringBuffer(); + /** + * Get a list of all allowed output formats (for each, the main MIME type + * but also the short type representation are given). + * + * @return List of all output formats. + */ + protected final String getAllowedFormats(){ + Iterator itFormats = service.getOutputFormats(); + StringBuffer allowedFormats = new StringBuffer("a String value among: "); int i = 0; - OutputFormat formatter; + OutputFormat formatter; while(itFormats.hasNext()){ formatter = itFormats.next(); allowedFormats.append((i == 0) ? "" : ", ").append(formatter.getMimeType()); @@ -81,7 +111,10 @@ public class FormatController< R > implements InputParamController { allowedFormats.append(" (or ").append(formatter.getShortMimeType()).append(')'); i++; } - return allowedFormats.toString(); + if (i > 0) + return allowedFormats.toString(); + else + return "a String value."; } } diff --git a/src/tap/parameters/MaxRecController.java b/src/tap/parameters/MaxRecController.java index 08f29b6c605ccce2f2dafb2e3636bda05b630261..9424a0ab61e566c6e22664e50a6bb71a8acc053d 100644 --- a/src/tap/parameters/MaxRecController.java +++ b/src/tap/parameters/MaxRecController.java @@ -17,53 +17,82 @@ package tap.parameters; * along with TAPLibrary. If not, see . * * Copyright 2012-2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomisches Rechen Institute (ARI) + * Astronomisches Rechen Institut (ARI) */ import tap.ServiceConnection; import tap.ServiceConnection.LimitUnit; import tap.TAPJob; import uws.UWSException; -import uws.UWSExceptionFactory; import uws.job.parameters.InputParamController; /** - * The logic of the output limit is set in this class. Here it is: + *

Let controlling the maximum number of rows that can be output by a TAP service. + * The maximum and default values are provided by the service connection.

* - * - If no value is specified by the TAP client, none is returned. - * - If no default value is provided, no default limitation is set (={@link TAPJob#UNLIMITED_MAX_REC}). - * - If no maximum value is provided, there is no output limit (={@link TAPJob#UNLIMITED_MAX_REC}). + *

Note: + * By default, this parameter can be modified by anyone without any limitation. + * The default and maximum value is set by default to {@link TAPJob#UNLIMITED_MAX_REC}. + *

* - * @author Grégory Mantelet (CDS;ARI) - gmantele@ari.uni-heidelberg.de - * @version 1.1 (03/2014) + *

Note: + * The special value 0 means that just the metadata of the result must be returned. + * Considering the meaning of this value, it will not be considered as an {@link TAPJob#UNLIMITED_MAX_REC}, + * but like a valid value. The maximum value can then be also 0. + *

+ * + *

The logic of the output limit is set in this class. Here it is:

+ *
    + *
  • If no value is specified by the TAP client, the default value is returned.
  • + *
  • If no default value is provided, the maximum output limit is returned.
  • + *
  • If no maximum value is provided, there is no limit (={@link TAPJob#UNLIMITED_MAX_REC}).
  • + *
+ * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (03/2015) */ public class MaxRecController implements InputParamController { - protected final ServiceConnection service; + /** Connection to the service which knows the maximum and default value of this parameter. */ + protected final ServiceConnection service; /** Indicates whether the output limit of jobs can be modified. */ protected boolean allowModification = true; - public MaxRecController(final ServiceConnection service){ + /** + * Build a controller for the MaxRec parameter. + * + * @param service Connection to the TAP service. + */ + public MaxRecController(final ServiceConnection service){ this.service = service; - allowModification(allowModification); } @Override public final Object getDefault(){ - // If a default output limit is set by the TAP service connection, return it: + // Get the default output limit: + int defaultLimit = TAPJob.UNLIMITED_MAX_REC; if (service.getOutputLimit() != null && service.getOutputLimit().length >= 2 && service.getOutputLimitType() != null && service.getOutputLimitType().length == service.getOutputLimit().length){ - if (service.getOutputLimit()[0] > 0 && service.getOutputLimitType()[0] == LimitUnit.rows) - return service.getOutputLimit()[0]; + if (service.getOutputLimit()[0] >= 0 && service.getOutputLimitType()[0] == LimitUnit.rows) + defaultLimit = service.getOutputLimit()[0]; } - // Otherwise, return no limitation: - return TAPJob.UNLIMITED_MAX_REC; + + // Get the maximum output limit, for comparison: + int maxLimit = getMaxOutputLimit(); + + // Ensure the default limit is less or equal the maximum limit: + return (defaultLimit < 0 || (maxLimit >= 0 && defaultLimit > maxLimit)) ? maxLimit : defaultLimit; } + /** + * Get the maximum number of rows that can be output. + * + * @return Maximum output limit. + */ public final int getMaxOutputLimit(){ // If a maximum output limit is set by the TAP service connection, return it: if (service.getOutputLimit() != null && service.getOutputLimit().length >= 2 && service.getOutputLimitType() != null && service.getOutputLimitType().length == service.getOutputLimit().length){ - if (service.getOutputLimit()[1] > 0 && service.getOutputLimitType()[1] == LimitUnit.rows) + if (service.getOutputLimit()[1] >= 0 && service.getOutputLimitType()[1] == LimitUnit.rows) return service.getOutputLimit()[1]; } // Otherwise, there is no limit: @@ -74,7 +103,7 @@ public class MaxRecController implements InputParamController { public Object check(Object value) throws UWSException{ // If no limit is provided by the TAP client, none is returned: if (value == null) - return null; + return getDefault(); // Parse the provided limit: int maxOutputLimit = getMaxOutputLimit(); @@ -86,17 +115,17 @@ public class MaxRecController implements InputParamController { try{ maxRec = Integer.parseInt(strValue); }catch(NumberFormatException nfe){ - throw UWSExceptionFactory.badFormat(null, TAPJob.PARAM_MAX_REC, strValue, null, "An integer value between " + TAPJob.UNLIMITED_MAX_REC + " and " + maxOutputLimit + " (Default value: " + defaultOutputLimit + ")."); + throw new UWSException(UWSException.BAD_REQUEST, "Wrong format for the parameter \"maxrec\": \"" + strValue + "\"! It should be a integer value between " + TAPJob.UNLIMITED_MAX_REC + " and " + maxOutputLimit + " (Default value: " + defaultOutputLimit + ")."); } }else - throw UWSExceptionFactory.badFormat(null, TAPJob.PARAM_MAX_REC, null, value.getClass().getName(), "An integer value between " + TAPJob.UNLIMITED_MAX_REC + " and " + maxOutputLimit + " (Default value: " + defaultOutputLimit + ")."); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Wrong type for the parameter \"maxrec\": class \"" + value.getClass().getName() + "\"! It should be an integer or a string containing only an integer value."); // A negative output limit is considered as an unlimited output limit: - if (maxRec < TAPJob.UNLIMITED_MAX_REC) + if (maxRec < 0) maxRec = TAPJob.UNLIMITED_MAX_REC; // If the limit is greater than the maximum one, an exception is thrown: - if (maxRec == TAPJob.UNLIMITED_MAX_REC || maxRec > maxOutputLimit) + if (maxRec < 0 || (maxOutputLimit >= 0 && maxRec > maxOutputLimit)) maxRec = maxOutputLimit; return maxRec; diff --git a/src/tap/parameters/TAPDestructionTimeController.java b/src/tap/parameters/TAPDestructionTimeController.java index 33444bdcf35ecae0b0515686f98552e69478f774..076cc0524524a913d9ed0ebda5f67b07d31f3969 100644 --- a/src/tap/parameters/TAPDestructionTimeController.java +++ b/src/tap/parameters/TAPDestructionTimeController.java @@ -16,7 +16,8 @@ package tap.parameters; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.text.ParseException; @@ -24,21 +25,44 @@ import java.util.Calendar; import java.util.Date; import tap.ServiceConnection; -import tap.TAPJob; - +import uws.ISO8601Format; import uws.UWSException; -import uws.UWSExceptionFactory; - -import uws.job.UWSJob; -import uws.job.parameters.InputParamController; import uws.job.parameters.DestructionTimeController.DateField; +import uws.job.parameters.InputParamController; +/** + *

Let controlling the destruction time of all jobs managed by a TAP service. + * The maximum and default values are provided by the service connection.

+ * + *

Note: + * By default, the destruction time can be modified by anyone without any limitation. + * There is no default value (that means jobs may stay forever). + *

+ * + *

The logic of the destruction time is set in this class. Here it is:

+ *
    + *
  • If no value is specified by the UWS client, the default value is returned.
  • + *
  • If no default value is provided, the maximum destruction date is returned.
  • + *
  • If no maximum value is provided, there is no destruction.
  • + *
+ * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (11/2014) + */ public class TAPDestructionTimeController implements InputParamController { - protected final ServiceConnection service; + /** Connection to the service which knows the maximum and default value of this parameter. */ + protected final ServiceConnection service; + + /** Indicates whether the execution duration of jobs can be modified. */ protected boolean allowModification = true; - public TAPDestructionTimeController(final ServiceConnection service){ + /** + * Build a controller for the Destruction parameter. + * + * @param service Connection to the TAP service. + */ + public TAPDestructionTimeController(final ServiceConnection service){ this.service = service; } @@ -47,10 +71,21 @@ public class TAPDestructionTimeController implements InputParamController { return allowModification; } + /** + * Let indicate whether the destruction time of any managed job can be modified. + * + * @param allowModif true if the destruction time can be modified, false otherwise. + */ public final void allowModification(final boolean allowModif){ allowModification = allowModif; } + /** + * Get the default period during which a job is kept. + * After this period, the job should be destroyed. + * + * @return The default retention period, -1 if none is provided. + */ public final int getDefaultRetentionPeriod(){ if (service.getRetentionPeriod() != null && service.getRetentionPeriod().length >= 2){ if (service.getRetentionPeriod()[0] > 0) @@ -61,19 +96,31 @@ public class TAPDestructionTimeController implements InputParamController { @Override public final Object getDefault(){ + // Get the default period and ensure it is always less or equal the maximum period, if any: int defaultPeriod = getDefaultRetentionPeriod(); + int maxPeriod = getMaxRetentionPeriod(); + if (defaultPeriod <= 0 || (maxPeriod > 0 && defaultPeriod > maxPeriod)) + defaultPeriod = maxPeriod; + + // Build and return the date: if (defaultPeriod > 0){ Calendar date = Calendar.getInstance(); try{ date.add(DateField.SECOND.getFieldIndex(), defaultPeriod); return date.getTime(); - }catch(ArrayIndexOutOfBoundsException ex){ - return null; - } - }else - return null; + }catch(ArrayIndexOutOfBoundsException ex){} + } + + // If no default period is specified or if an exception occurs, the maximum destruction time must be returned: + return getMaxDestructionTime(); } + /** + * Get the maximum period during which a job is kept. + * After this period, the job should be destroyed. + * + * @return The maximum retention period, -1 if none is provided. + */ public final int getMaxRetentionPeriod(){ if (service.getRetentionPeriod() != null && service.getRetentionPeriod().length >= 2){ if (service.getRetentionPeriod()[1] > 0) @@ -82,42 +129,54 @@ public class TAPDestructionTimeController implements InputParamController { return -1; } + /** + * Gets the maximum destruction time: either computed with an interval of time or obtained directly by a maximum destruction time. + * + * @return The maximum destruction time (null means that jobs may stay forever). + */ public final Date getMaxDestructionTime(){ + // Get the maximum period: int maxPeriod = getMaxRetentionPeriod(); + + // Build and return the maximum destruction date: if (maxPeriod > 0){ Calendar date = Calendar.getInstance(); try{ date.add(DateField.SECOND.getFieldIndex(), maxPeriod); return date.getTime(); - }catch(ArrayIndexOutOfBoundsException ex){ - return null; - } - }else - return null; + }catch(ArrayIndexOutOfBoundsException ex){} + } + + // If no maximum period is specified or if an exception occurs, NULL must be returned: + return null; } @Override public Object check(Object value) throws UWSException{ + // If NULL value, return the default value: if (value == null) - return null; + return getDefault(); + // Parse the given date: Date date = null; if (value instanceof Date) date = (Date)value; else if (value instanceof String){ String strValue = (String)value; try{ - date = UWSJob.dateFormat.parse(strValue); + date = ISO8601Format.parseToDate(strValue); }catch(ParseException pe){ - throw UWSExceptionFactory.badFormat(null, TAPJob.PARAM_DESTRUCTION_TIME, strValue, null, "A date not yet expired."); + throw new UWSException(UWSException.BAD_REQUEST, pe, "Wrong date format for the parameter \"destruction\": \"" + strValue + "\"! A date must be formatted in the ISO8601 format (\"yyyy-MM-dd'T'hh:mm:ss[.sss]['Z'|[+|-]hh:mm]\", fields inside brackets are optional)."); } }else - throw UWSExceptionFactory.badFormat(null, TAPJob.PARAM_DESTRUCTION_TIME, value.toString(), value.getClass().getName(), "A date not yet expired."); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Wrong type for the parameter \"destruction\": class \"" + value.getClass().getName() + "\"! It should be a Date or a string containing a date formatted in ISO8601 (\"yyyy-MM-dd'T'hh:mm:ss[.sss]['Z'|[+|-]hh:mm]\", fields inside brackets are optional)."); + // Ensure the date is before the maximum destruction time (from now): Date maxDate = getMaxDestructionTime(); if (maxDate != null && date.after(maxDate)) - throw new UWSException(UWSException.BAD_REQUEST, "The TAP service limits the DESTRUCTION INTERVAL (since now) to " + getMaxRetentionPeriod() + " s !"); + date = maxDate; + // Return the parsed date return date; } diff --git a/src/tap/parameters/TAPExecutionDurationController.java b/src/tap/parameters/TAPExecutionDurationController.java index 68f5797e6bb8860f3917d749ce4083da49fb0388..0f3985c601c192c8a3e9b22157bf3ed008def222 100644 --- a/src/tap/parameters/TAPExecutionDurationController.java +++ b/src/tap/parameters/TAPExecutionDurationController.java @@ -16,23 +16,48 @@ package tap.parameters; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import tap.ServiceConnection; import tap.TAPJob; - import uws.UWSException; -import uws.UWSExceptionFactory; - import uws.job.parameters.InputParamController; +/** + *

Let controlling the execution duration of all jobs managed by a TAP service. + * The maximum and default values are provided by the service connection.

+ * + *

Note: + * By default, the execution duration can be modified by anyone without any limitation. + * The default value is {@link TAPJob#UNLIMITED_DURATION}. + *

+ * + *

The logic of the execution duration is set in this class. Here it is:

+ *
    + *
  • If no value is specified by the TAP client, the default value is returned.
  • + *
  • If no default value is provided, the maximum duration is returned.
  • + *
  • If no maximum value is provided, there is no limit (={@link TAPJob#UNLIMITED_DURATION}).
  • + *
+ * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (11/2014) + */ public class TAPExecutionDurationController implements InputParamController { - protected final ServiceConnection service; + /** Connection to the service which knows the maximum and default value of this parameter. */ + protected final ServiceConnection service; + + /** Indicate whether the execution duration of jobs can be modified. */ protected boolean allowModification = true; - public TAPExecutionDurationController(final ServiceConnection service){ + /** + * Build a controller for the ExecutionDuration parameter. + * + * @param service Connection to the TAP service. + */ + public TAPExecutionDurationController(final ServiceConnection service){ this.service = service; } @@ -41,19 +66,32 @@ public class TAPExecutionDurationController implements InputParamController { return allowModification; } + /** + * Let indicate whether the execution duration of any managed job can be modified. + * + * @param allowModif true if the execution duration can be modified, false otherwise. + */ public final void allowModification(final boolean allowModif){ allowModification = allowModif; } @Override public final Object getDefault(){ - if (service.getExecutionDuration() != null && service.getExecutionDuration().length >= 2){ - if (service.getExecutionDuration()[0] > 0) - return service.getExecutionDuration()[0]; - } - return TAPJob.UNLIMITED_DURATION; + // Get the default value from the service connection: + long defaultVal = TAPJob.UNLIMITED_DURATION; + if (service.getExecutionDuration() != null && service.getExecutionDuration().length >= 2) + defaultVal = service.getExecutionDuration()[0]; + + // The default value is also limited by the maximum value if any: + long maxVal = getMaxDuration(); + return (defaultVal <= 0 || (maxVal > 0 && defaultVal > maxVal)) ? maxVal : defaultVal; } + /** + * Gets the maximum execution duration. + * + * @return The maximum execution duration (0 or less mean an unlimited duration). + */ public final long getMaxDuration(){ if (service.getExecutionDuration() != null && service.getExecutionDuration().length >= 2){ if (service.getExecutionDuration()[1] > 0) @@ -63,28 +101,36 @@ public class TAPExecutionDurationController implements InputParamController { } @Override - public Object check(Object value) throws UWSException{ + public Object check(final Object value) throws UWSException{ + // If no value, return the default one: if (value == null) - return null; + return getDefault(); - long defaultDuration = ((Long)getDefault()).longValue(), maxDuration = getMaxDuration(); - Long duration; + // Get the default and maximum durations for comparison: + long defaultDuration = (Long)getDefault(), maxDuration = getMaxDuration(); + // Parse the given duration: + Long duration; if (value instanceof Long) duration = (Long)value; + else if (value instanceof Integer) + duration = (long)((Integer)value).intValue(); else if (value instanceof String){ try{ duration = Long.parseLong((String)value); }catch(NumberFormatException nfe){ - throw UWSExceptionFactory.badFormat(null, TAPJob.PARAM_EXECUTION_DURATION, value.toString(), null, "A long value between " + TAPJob.UNLIMITED_DURATION + " and " + maxDuration + " (Default value: " + defaultDuration + ")."); + throw new UWSException(UWSException.BAD_REQUEST, "Wrong format for the parameter \"executionduration\": \"" + value.toString() + "\"! It should be a long numeric value between " + TAPJob.UNLIMITED_DURATION + " and " + maxDuration + " (Default value: " + defaultDuration + ")."); } }else - throw UWSExceptionFactory.badFormat(null, TAPJob.PARAM_EXECUTION_DURATION, null, value.getClass().getName(), "A long value between " + TAPJob.UNLIMITED_DURATION + " and " + maxDuration + " (Default value: " + defaultDuration + ")."); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Wrong type for the parameter \"executionduration\": class \"" + value.getClass().getName() + "\"! It should be long or a string containing only a long value."); - if (duration < TAPJob.UNLIMITED_DURATION) + // A negative value must be considered as an unlimited duration: + if (duration <= 0) duration = TAPJob.UNLIMITED_DURATION; - else if (maxDuration > TAPJob.UNLIMITED_DURATION && duration > maxDuration) - throw new UWSException(UWSException.BAD_REQUEST, "The TAP service limits the execution duration to maximum " + maxDuration + " seconds !"); + + // Ensure the given value is less than the maximum duration: + if (maxDuration > 0 && (duration > maxDuration || duration <= 0)) + duration = maxDuration; return duration; } diff --git a/src/tap/parameters/TAPParameters.java b/src/tap/parameters/TAPParameters.java index f45c833ff840e87d28ac3c7a5e9a9a93f16a473b..e59109297780ef8b554347e69a0f85da0fab1d08 100644 --- a/src/tap/parameters/TAPParameters.java +++ b/src/tap/parameters/TAPParameters.java @@ -16,219 +16,224 @@ package tap.parameters; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ -import java.io.File; -import java.io.IOException; - -import java.util.Collection; -import java.util.Date; -import java.util.Enumeration; +import java.util.Arrays; import java.util.HashMap; +import java.util.Iterator; +import java.util.List; import java.util.Map; +import java.util.Map.Entry; import javax.servlet.http.HttpServletRequest; -import com.oreilly.servlet.MultipartRequest; -import com.oreilly.servlet.multipart.FileRenamePolicy; - import tap.ServiceConnection; import tap.TAPException; import tap.TAPJob; - -import tap.upload.TableLoader; - import uws.UWSException; - import uws.job.parameters.InputParamController; import uws.job.parameters.StringParamController; import uws.job.parameters.UWSParameters; /** - * This class describes all defined parameters of a TAP request. + * This class lets list and describe all standard TAP parameters + * submitted by a TAP client to this TAP service. * - * @author Grégory Mantelet (CDS) - * @version 06/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (12/2014) */ public class TAPParameters extends UWSParameters { + /** All the TAP parameters. */ + protected static final List TAP_PARAMETERS = Arrays.asList(new String[]{TAPJob.PARAM_REQUEST,TAPJob.PARAM_LANGUAGE,TAPJob.PARAM_VERSION,TAPJob.PARAM_FORMAT,TAPJob.PARAM_QUERY,TAPJob.PARAM_MAX_REC,TAPJob.PARAM_UPLOAD}); + /** - * All the TAP parameters. + * Create an empty list of parameters. + * + * @param service Description of the TAP service in which the parameters are created and will be used. */ - protected static final String[] TAP_PARAMETERS = new String[]{TAPJob.PARAM_REQUEST,TAPJob.PARAM_LANGUAGE,TAPJob.PARAM_VERSION,TAPJob.PARAM_FORMAT,TAPJob.PARAM_QUERY,TAPJob.PARAM_MAX_REC,TAPJob.PARAM_UPLOAD}; - - /** Part of HTTP content type header. */ - public static final String MULTIPART = "multipart/"; - - /** All the tables to upload. If NULL, there is no tables to upload. */ - protected TableLoader[] tablesToUpload = null; - - @SuppressWarnings({"unchecked"}) - public TAPParameters(final ServiceConnection service){ - this(service, (Collection)null, null); + public TAPParameters(final ServiceConnection service){ + super(TAP_PARAMETERS, buildDefaultControllers(service)); } - public TAPParameters(final ServiceConnection service, final Collection expectedAdditionalParams, final Map inputParamControllers){ - super(expectedAdditionalParams, inputParamControllers); - initDefaultTAPControllers(service); - } - - public TAPParameters(final HttpServletRequest request, final ServiceConnection service) throws UWSException, TAPException{ - this(request, service, null, null); + /** + * Create a {@link TAPParameters} instance whose the parameters must be extracted from the given {@link HttpServletRequest}. + * + * @param request HTTP request containing the parameters to gather inside this class. + * @param service Description of the TAP service in which the parameters are created and will be used. + * + * @throws TAPException If any error occurs while extracting the DALIParameters OR while setting a parameter. + * + * @see #getParameters(HttpServletRequest) + */ + public TAPParameters(final HttpServletRequest request, final ServiceConnection service) throws TAPException{ + this(service, getParameters(request)); } - @SuppressWarnings("unchecked") - public TAPParameters(final HttpServletRequest request, final ServiceConnection service, final Collection expectedAdditionalParams, final Map inputParamControllers) throws UWSException, TAPException{ - this(service, expectedAdditionalParams, inputParamControllers); - MultipartRequest multipart = null; + /** + * Create a {@link TAPParameters} instance whose the parameters are given in parameter. + * + * @param service Description of the TAP service. Limits of the standard TAP parameters are listed in it. + * @param params List of parameters to load inside this object. + * + * @throws TAPException If any error occurs while extracting the DALIParameters OR while setting a parameter. + */ + public TAPParameters(final ServiceConnection service, final Map params) throws TAPException{ + super(TAP_PARAMETERS, buildDefaultControllers(service)); - // Multipart HTTP parameters: - if (isMultipartContent(request)){ - if (!service.uploadEnabled()) - throw new TAPException("Request error ! This TAP service has no Upload capability !"); + if (params != null && !params.isEmpty()){ + // Deal with the UPLOAD parameter(s): + DALIUpload.getDALIUploads(params, true, service.getFileManager()); - File uploadDir = service.getFileManager().getUploadDirectory(); + // Load all parameters: + Iterator> it = params.entrySet().iterator(); + Entry entry; try{ - multipart = new MultipartRequest(request, (uploadDir != null) ? uploadDir.getAbsolutePath() : null, service.getMaxUploadSize(), new FileRenamePolicy(){ - @Override - public File rename(File file){ - return new File(file.getParentFile(), (new Date()).toString() + "_" + file.getName()); - } - }); - Enumeration e = multipart.getParameterNames(); - while(e.hasMoreElements()){ - String param = e.nextElement(); - set(param, multipart.getParameter(param)); + while(it.hasNext()){ + entry = it.next(); + set(entry.getKey(), entry.getValue()); } - }catch(IOException ioe){ - throw new TAPException("Error while reading the Multipart content !", ioe); - }catch(IllegalArgumentException iae){ - String confError = iae.getMessage(); - if (service.getMaxUploadSize() <= 0) - confError = "The maximum upload size (see ServiceConnection.getMaxUploadSize() must be positive !"; - else if (uploadDir == null) - confError = "Missing upload directory (see TAPFileManager.getUploadDirectory()) !"; - throw new TAPException("Incorrect Upload capability configuration ! " + confError, iae); - } - - }// Classic HTTP parameters (GET or POST): - else{ - // Extract and identify each pair (key,value): - Enumeration e = request.getParameterNames(); - while(e.hasMoreElements()){ - String name = e.nextElement(); - set(name, request.getParameter(name)); + }catch(UWSException ue){ + throw new TAPException(ue); } } - - // Identify the tables to upload, if any: - String uploadParam = getUpload(); - if (service.uploadEnabled() && uploadParam != null) - tablesToUpload = buildLoaders(uploadParam, multipart); } - public TAPParameters(final ServiceConnection service, final Map params) throws UWSException, TAPException{ - this(service, params, null, null); - } - - public TAPParameters(final ServiceConnection service, final Map params, final Collection expectedAdditionalParams, final Map inputParamControllers) throws UWSException, TAPException{ - super(params, expectedAdditionalParams, inputParamControllers); - initDefaultTAPControllers(service); - } - - @Override - protected final HashMap getDefaultControllers(){ - return new HashMap(10); - } - - protected < R > void initDefaultTAPControllers(final ServiceConnection service){ - if (!mapParamControllers.containsKey(TAPJob.PARAM_EXECUTION_DURATION)) - mapParamControllers.put(TAPJob.PARAM_EXECUTION_DURATION, new TAPExecutionDurationController(service)); - - if (!mapParamControllers.containsKey(TAPJob.PARAM_DESTRUCTION_TIME)) - mapParamControllers.put(TAPJob.PARAM_DESTRUCTION_TIME, new TAPDestructionTimeController(service)); - - if (!mapParamControllers.containsKey(TAPJob.PARAM_REQUEST)) - mapParamControllers.put(TAPJob.PARAM_REQUEST, new StringParamController(TAPJob.PARAM_REQUEST, null, new String[]{TAPJob.REQUEST_DO_QUERY,TAPJob.REQUEST_GET_CAPABILITIES}, true)); - - if (!mapParamControllers.containsKey(TAPJob.PARAM_LANGUAGE)) - mapParamControllers.put(TAPJob.PARAM_LANGUAGE, new StringParamController(TAPJob.PARAM_LANGUAGE, TAPJob.LANG_ADQL, null, true)); - - if (!mapParamControllers.containsKey(TAPJob.PARAM_VERSION)) - mapParamControllers.put(TAPJob.PARAM_VERSION, new StringParamController(TAPJob.PARAM_VERSION, TAPJob.VERSION_1_0, new String[]{TAPJob.VERSION_1_0}, true)); - - if (!mapParamControllers.containsKey(TAPJob.PARAM_QUERY)) - mapParamControllers.put(TAPJob.PARAM_QUERY, new StringParamController(TAPJob.PARAM_QUERY)); - - if (!mapParamControllers.containsKey(TAPJob.PARAM_UPLOAD)) - mapParamControllers.put(TAPJob.PARAM_UPLOAD, new StringParamController(TAPJob.PARAM_UPLOAD)); - - if (!mapParamControllers.containsKey(TAPJob.PARAM_FORMAT)) - mapParamControllers.put(TAPJob.PARAM_FORMAT, new FormatController(service)); - - if (!mapParamControllers.containsKey(TAPJob.PARAM_MAX_REC)) - mapParamControllers.put(TAPJob.PARAM_MAX_REC, new MaxRecController(service)); - } - - @Override - protected String normalizeParamName(String name){ - if (name != null && !name.trim().isEmpty()){ - for(String tapParam : TAP_PARAMETERS){ - if (name.equalsIgnoreCase(tapParam)) - return tapParam; - } - } - return super.normalizeParamName(name); + /** + *

Build a map containing all controllers for all standard TAP parameters.

+ * + *

Note: + * All standard parameters, except UPLOAD. Indeed, since this parameter can be provided in several times (in one HTTP request) + * and needs to be interpreted immediately after initialization, no controller has been set for it. Its value will be actually + * tested in the constructor while interpreting it. + *

+ * + * @param service Description of the TAP service. + * + * @return Map of all default controllers. + * + * @since 2.0 + */ + protected static final Map buildDefaultControllers(final ServiceConnection service){ + Map controllers = new HashMap(10); + controllers.put(TAPJob.PARAM_EXECUTION_DURATION, new TAPExecutionDurationController(service)); + controllers.put(TAPJob.PARAM_DESTRUCTION_TIME, new TAPDestructionTimeController(service)); + controllers.put(TAPJob.PARAM_REQUEST, new StringParamController(TAPJob.PARAM_REQUEST, null, new String[]{TAPJob.REQUEST_DO_QUERY,TAPJob.REQUEST_GET_CAPABILITIES}, true)); + controllers.put(TAPJob.PARAM_LANGUAGE, new StringParamController(TAPJob.PARAM_LANGUAGE, TAPJob.LANG_ADQL, null, true)); + controllers.put(TAPJob.PARAM_VERSION, new StringParamController(TAPJob.PARAM_VERSION, TAPJob.VERSION_1_0, new String[]{TAPJob.VERSION_1_0}, true)); + controllers.put(TAPJob.PARAM_QUERY, new StringParamController(TAPJob.PARAM_QUERY)); + controllers.put(TAPJob.PARAM_FORMAT, new FormatController(service)); + controllers.put(TAPJob.PARAM_MAX_REC, new MaxRecController(service)); + return controllers; } - @Override - public String[] update(UWSParameters newParams) throws UWSException{ - if (newParams != null && !(newParams instanceof TAPParameters)) - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Can not update a TAPParameters instance with only a UWSException !"); - - String[] updated = super.update(newParams); - for(String p : updated){ - if (p.equals(TAPJob.PARAM_UPLOAD)){ - tablesToUpload = ((TAPParameters)newParams).tablesToUpload; - break; + /** + *

Get the value of the given parameter, but as a String, whatever is its original type.

+ * + *

Basically, the different cases of conversion into String are the following:

+ *
    + *
  • NULL: NULL is returned.
  • + *
  • An array (of whatever is the items' type): a string in which each Object.toString() are concatenated ; each item is separated by a semicolon
  • + *
  • Anything else: Object.toString()
  • + *
+ * + * @param paramName Name of the parameter whose the value must be returned as a String. + * + * @return The string value of the specified parameter. + */ + protected final String getStringParam(final String paramName){ + // Get the parameter value as an Object: + Object value = params.get(paramName); + + // Convert this Object into a String: + // CASE: NULL + if (value == null) + return null; + + // CASE: ARRAY + else if (value.getClass().isArray()){ + StringBuffer buf = new StringBuffer(); + for(Object o : (Object[])value){ + if (buf.length() > 0) + buf.append(';'); + buf.append(o.toString()); } + return buf.toString(); } - return updated; - } - - protected final String getStringParam(final String paramName){ - return (params.get(paramName) != null) ? params.get(paramName).toString() : null; + // DEFAULT: + else + return value.toString(); } + /** + * Get the value of the standard TAP parameter "REQUEST". + * @return "REQUEST" value. + */ public final String getRequest(){ return getStringParam(TAPJob.PARAM_REQUEST); } + /** + * Get the value of the standard TAP parameter "LANG". + * @return "LANG" value. + */ public final String getLang(){ return getStringParam(TAPJob.PARAM_LANGUAGE); } + /** + * Get the value of the standard TAP parameter "VERSION". + * @return "VERSION" value. + */ public final String getVersion(){ return getStringParam(TAPJob.PARAM_VERSION); } + /** + * Get the value of the standard TAP parameter "FORMAT". + * @return "FORMAT" value. + */ public final String getFormat(){ return getStringParam(TAPJob.PARAM_FORMAT); } + /** + * Get the value of the standard TAP parameter "QUERY". + * @return "QUERY" value. + */ public final String getQuery(){ return getStringParam(TAPJob.PARAM_QUERY); } + /** + *

Get the value of the standard TAP parameter "UPLOAD".

+ *

Note: + * This parameter is generally a set of several Strings, each representing one table to upload. + * This function returns this set as a String in which each items are joined, semicolon separated, inside a single String. + *

+ * @return "UPLOAD" value. + */ public final String getUpload(){ return getStringParam(TAPJob.PARAM_UPLOAD); } - public final TableLoader[] getTableLoaders(){ - return tablesToUpload; + /** + * Get the list of all tables uploaded and defined by the standard TAP parameter "UPLOAD". + * + * @return Tables to upload in database at query execution. + */ + public final DALIUpload[] getUploadedTables(){ + return (DALIUpload[])get(TAPJob.PARAM_UPLOAD); } + /** + * Get the value of the standard TAP parameter "MAX_REC". + * This value is the maximum number of rows that the result of the query must contain. + * + * @return Maximum number of output rows. + */ public final Integer getMaxRec(){ Object value = params.get(TAPJob.PARAM_MAX_REC); if (value != null){ @@ -250,93 +255,26 @@ public class TAPParameters extends UWSParameters { } /** - * Utility method that determines whether the request contains multipart - * content. - * - * @param request The servlet request to be evaluated. Must be non-null. - * - * @return true if the request is multipart; - * false otherwise. - */ - public static final boolean isMultipartContent(HttpServletRequest request){ - if (!"post".equals(request.getMethod().toLowerCase())){ - return false; - } - String contentType = request.getContentType(); - if (contentType == null){ - return false; - } - if (contentType.toLowerCase().startsWith(MULTIPART)){ - return true; - } - return false; - } - - /** - * Builds as many TableLoader instances as tables to upload. - * - * @param upload The upload field (syntax: "tableName1,URI1 ; tableName2,URI2 ; ...", where URI may start by "param:" to indicate that the VOTable is inline). - * @param multipart The multipart content of the request if any. + *

Check the coherence between all TAP parameters.

* - * @return All table loaders (one per table to upload). + *

+ * This function does not test individually each parameters, but all of them as a coherent whole. + * Thus, the parameter REQUEST must be provided and if its value is "doQuery", the parameters LANG and QUERY must be also provided. + *

* - * @throws TAPException If the syntax of the "upload" field is incorrect. + * @throws TAPException If one required parameter is missing. */ - private TableLoader[] buildLoaders(final String upload, final MultipartRequest multipart) throws TAPException{ - if (upload == null || upload.trim().isEmpty()) - return new TableLoader[0]; - - String[] pairs = upload.split(";"); - TableLoader[] loaders = new TableLoader[pairs.length]; - - for(int i = 0; i < pairs.length; i++){ - String[] table = pairs[i].split(","); - if (table.length != 2) - throw new TAPException("Bad syntax ! An UPLOAD parameter must contain a list of pairs separated by a ';'. Each pair is composed of 2 parts, a table name and a URI separated by a ','."); - loaders[i] = new TableLoader(table[0], table[1], multipart); - } - - return loaders; - } - public void check() throws TAPException{ // Check that required parameters are not NON-NULL: String requestParam = getRequest(); if (requestParam == null) - throw new TAPException("The parameter \"" + TAPJob.PARAM_REQUEST + "\" must be provided and its value must be equal to \"" + TAPJob.REQUEST_DO_QUERY + "\" or \"" + TAPJob.REQUEST_GET_CAPABILITIES + "\" !"); + throw new TAPException("The parameter \"" + TAPJob.PARAM_REQUEST + "\" must be provided and its value must be equal to \"" + TAPJob.REQUEST_DO_QUERY + "\" or \"" + TAPJob.REQUEST_GET_CAPABILITIES + "\"!", UWSException.BAD_REQUEST); if (requestParam.equals(TAPJob.REQUEST_DO_QUERY)){ if (get(TAPJob.PARAM_LANGUAGE) == null) - throw new TAPException("The parameter \"" + TAPJob.PARAM_LANGUAGE + "\" must be provided if " + TAPJob.PARAM_REQUEST + "=" + TAPJob.REQUEST_DO_QUERY + " !"); + throw new TAPException("The parameter \"" + TAPJob.PARAM_LANGUAGE + "\" must be provided if " + TAPJob.PARAM_REQUEST + "=" + TAPJob.REQUEST_DO_QUERY + "!", UWSException.BAD_REQUEST); else if (get(TAPJob.PARAM_QUERY) == null) - throw new TAPException("The parameter \"" + TAPJob.PARAM_QUERY + "\" must be provided if " + TAPJob.PARAM_REQUEST + "=" + TAPJob.REQUEST_DO_QUERY + " !"); - } - - // Check the version if needed: - /*Object versionParam = get(TAPJob.PARAM_VERSION); - if (versionParam != null && !versionParam.equals("1") && !versionParam.equals("1.0")) - throw new TAPException("Version \""+versionParam+"\" of TAP not implemented !");*/ - - /*// Check format if needed: - if (format == null) - format = FORMAT_VOTABLE; - - // Check maxrec: - if (maxrec <= -1) - maxrec = defaultOutputLimit; - - if (maxOutputLimit > -1){ - if (maxrec > maxOutputLimit) - maxrec = maxOutputLimit; - else if (maxrec <= -1) - maxrec = maxOutputLimit; - }*/ - } - - public static final void deleteUploadedTables(final TableLoader[] loaders){ - if (loaders != null){ - for(TableLoader loader : loaders) - loader.deleteFile(); + throw new TAPException("The parameter \"" + TAPJob.PARAM_QUERY + "\" must be provided if " + TAPJob.PARAM_REQUEST + "=" + TAPJob.REQUEST_DO_QUERY + "!", UWSException.BAD_REQUEST); } } } diff --git a/src/tap/resource/ASync.java b/src/tap/resource/ASync.java index 38ba10cc2cb45fda3c76f05c31245cefd1a7b7bf..7aef6e2dd72462ce1249c9c10f4bdc90825998fd 100644 --- a/src/tap/resource/ASync.java +++ b/src/tap/resource/ASync.java @@ -16,10 +16,12 @@ package tap.resource; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; + import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; @@ -27,21 +29,74 @@ import javax.servlet.http.HttpServletResponse; import tap.ServiceConnection; import tap.TAPException; +import tap.TAPJob; import uws.UWSException; import uws.job.JobList; +import uws.job.UWSJob; +import uws.job.manager.AbstractQueuedExecutionManager; +import uws.job.manager.QueuedExecutionManager; import uws.service.UWSService; import uws.service.backup.UWSBackupManager; +import uws.service.log.UWSLog; +import uws.service.log.UWSLog.LogLevel; +/** + *

Asynchronous resource of a TAP service.

+ * + *

+ * Requests sent to this resource are ADQL queries (plus some execution parameters) to execute asynchronously. + * Results and/or errors of the execution are stored on the server side and can be fetched by the user whenever he wants. + *

+ * + *

+ * This resource is actually another VO service: a UWS (Universal Worker Service pattern). + * That's why all requests sent to this resource are actually forwarded to an instance of {@link UWSService}. + * All the behavior of UWS described by the IVOA is already fully implemented by this implementation. + *

+ * + *

This resource is also representing the only jobs' list of this UWS service.

+ * + *

The UWS service is created and configured at the creation of this resource. Here are the list of the most important configured elements:

+ *
    + *
  • User identification: the user identifier is the same as the one used by the TAP service. It is provided by the given {@link ServiceConnection}.
  • + *
  • Jobs' lists: the /async resource of TAP contains only one jobs' list. Its name is "async" and is accessed directly when requesting the /async resource.
  • + *
  • Job execution management: an execution manager is created at the creation of this resource. It is queuing jobs when a maximum number of asynchronous jobs + * is already running. This maximum is provided by the TAP service description: {@link ServiceConnection#getNbMaxAsyncJobs()}. Jobs are also queued if no more DB + * connection is available ; when connection(s) will be available, this resource will be notified by {@link #freeConnectionAvailable()} so that the execution manager + * can be refreshed.
  • + *
  • Backup and Restoration: UWS jobs can be saved at any defined moment. It is particularly useful when an grave error occurs and merely when the service must be restarted. + * Then, at the creation of this resource, the jobs are restored. Thus, the restart has been transparent for the users: they did not lose any job + * (except those at the origin of the grave error maybe).
  • + *
  • Error logging: the created {@link UWSService} instance is using the same logger as the TAP service. It is also provided by the given {@link ServiceConnection} object at creation.
  • + *
+ * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (04/2015) + * + * @see UWSService + */ public class ASync implements TAPResource { + /** Name of this TAP resource. */ public static final String RESOURCE_NAME = "async"; - @SuppressWarnings("unchecked") + /** Description of the TAP service owning this resource. */ protected final ServiceConnection service; + /** UWS service represented by this TAP resource. */ protected final UWSService uws; - - @SuppressWarnings("unchecked") - public ASync(ServiceConnection service) throws UWSException, TAPException{ + /** The only jobs' list managed by the inner UWS service. This resource represent the UWS but also this jobs' list. */ + protected final JobList jobList; + + /** + * Build an Asynchronous Resource of a TAP service. + * + * @param service Description of the TAP service which will own this resource. + * + * @throws TAPException If any error occurs while creating a UWS service or its backup manager. + * @throws UWSException If any error occurs while setting a new execution manager to the recent inner UWS service, + * or while restoring a UWS backup. + */ + public ASync(final ServiceConnection service) throws UWSException, TAPException{ this.service = service; uws = service.getFactory().createUWS(); @@ -49,8 +104,12 @@ public class ASync implements TAPResource { if (uws.getUserIdentifier() == null) uws.setUserIdentifier(service.getUserIdentifier()); - if (uws.getJobList(getName()) == null) - uws.addJobList(new JobList(getName())); + if (uws.getJobList(getName()) == null){ + jobList = new JobList(getName()); + uws.addJobList(jobList); + jobList.setExecutionManager(new AsyncExecutionManager(service.getLogger(), service.getNbMaxAsyncJobs())); + }else + jobList = uws.getJobList(getName()); if (uws.getBackupManager() == null) uws.setBackupManager(service.getFactory().createUWSBackupManager(uws)); @@ -61,49 +120,114 @@ public class ASync implements TAPResource { int[] report = uws.getBackupManager().restoreAll(); String errorMsg = null; if (report == null || report.length == 0) - errorMsg = "GRAVE error while the restoration of the asynchronous jobs !"; + errorMsg = "GRAVE error while the restoration of the asynchronous jobs!"; else if (report.length < 4) - errorMsg = "Incorrect restoration report format ! => Impossible to know the restoration status !"; + errorMsg = "Incorrect restoration report format! => Impossible to know the restoration status!"; else if (report[0] != report[1]) - errorMsg = "FAILED restoration of the asynchronous jobs: " + report[0] + " on " + report[1] + " restored !"; + errorMsg = "FAILED restoration of the asynchronous jobs: " + report[0] + " on " + report[1] + " restored!"; else backupManager.setEnabled(true); if (errorMsg != null){ errorMsg += " => Backup disabled."; - service.getLogger().error(errorMsg); + service.getLogger().logTAP(LogLevel.FATAL, null, "ASYNC_INIT", errorMsg, null); throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, errorMsg); } } } + /** + *

Notify this TAP resource that free DB connection(s) is(are) now available. + * It means that the execution manager should be refreshed in order to execute one or more queued jobs.

+ * + *

Note: + * This function has no effect if there is no execution manager. + *

+ */ + public void freeConnectionAvailable(){ + if (jobList.getExecutionManager() != null) + jobList.getExecutionManager().refresh(); + } + @Override public String getName(){ return RESOURCE_NAME; } @Override - public void setTAPBaseURL(String baseURL){ + public void setTAPBaseURL(final String baseURL){ ; } + /** + * Get the UWS behind this TAP resource. + * + * @return The inner UWS used by this TAP resource. + */ public final UWSService getUWS(){ return uws; } @Override - public void init(ServletConfig config) throws ServletException{ + public void init(final ServletConfig config) throws ServletException{ ; } @Override public void destroy(){ - ; + if (uws != null) + uws.destroy(); } @Override - public boolean executeResource(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException, TAPException, UWSException{ - return uws.executeRequest(request, response); + public boolean executeResource(final HttpServletRequest request, final HttpServletResponse response) throws IOException, TAPException{ + try{ + + // Ensure the service is currently available: + if (!service.isAvailable()) + throw new TAPException("Can not execute a query: this TAP service is not available! " + service.getAvailability(), UWSException.SERVICE_UNAVAILABLE); + + // Forward the request to the UWS service: + return uws.executeRequest(request, response); + + }catch(UWSException ue){ + service.getLogger().logTAP(LogLevel.FATAL, null, null, "Error while executing the /async resource.", ue); + throw new TAPException(ue); + } + } + + /** + * An execution manager which queues jobs when too many asynchronous jobs are running or + * when no more DB connection is available for the moment. + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (02/2015) + * @since 2.0 + */ + private class AsyncExecutionManager extends AbstractQueuedExecutionManager { + + /** The maximum number of running jobs. */ + protected int nbMaxRunningJobs = QueuedExecutionManager.NO_QUEUE; + + /** + * Build a queuing execution manager. + * + * @param logger Logger to use. + * @param maxRunningJobs Maximum number of asynchronous jobs that can run in the same time. + */ + public AsyncExecutionManager(UWSLog logger, int maxRunningJobs){ + super(logger); + nbMaxRunningJobs = (maxRunningJobs <= 0) ? QueuedExecutionManager.NO_QUEUE : maxRunningJobs; + } + + @Override + public boolean isReadyForExecution(final UWSJob jobToExecute){ + if (!hasQueue()) + return ((TAPJob)jobToExecute).isReadyForExecution(); + else + return (runningJobs.size() < nbMaxRunningJobs) && ((TAPJob)jobToExecute).isReadyForExecution(); + } + } } diff --git a/src/tap/resource/Availability.java b/src/tap/resource/Availability.java index 73832f8c6fb939ba0770769cfd2406a45fd3dcd1..aede393998a18765a953cd156eb06da7300e64ef 100644 --- a/src/tap/resource/Availability.java +++ b/src/tap/resource/Availability.java @@ -16,7 +16,8 @@ package tap.resource; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; @@ -28,23 +29,42 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import tap.ServiceConnection; +import tap.TAPException; +import uk.ac.starlink.votable.VOSerializer; +import uws.UWSToolBox; +/** + *

TAP resource describing the availability of a TAP service.

+ * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (04/2015) + */ public class Availability implements TAPResource, VOSIResource { + /** Name of this TAP resource. */ public static final String RESOURCE_NAME = "availability"; - private final ServiceConnection service; + /** Description of the TAP service owning this resource. */ + protected final ServiceConnection service; + + /**

URL toward this TAP resource. + * This URL is particularly important for its declaration in the capabilities of the TAP service.

+ * + *

Note: By default, it is just the name of this resource. It is updated after initialization of the service + * when the TAP service base URL is communicated to its resources. Then, it is: baseTAPURL + "/" + RESOURCE_NAME.

*/ protected String accessURL = getName(); - protected Availability(ServiceConnection service){ + /** + * Build a "availability" resource. + * + * @param service Description of the TAP service which will own this resource. + */ + protected Availability(final ServiceConnection service){ this.service = service; } - public ServiceConnection getService(){ - return service; - } - - public final void setTAPBaseURL(String baseURL){ + @Override + public final void setTAPBaseURL(final String baseURL){ accessURL = ((baseURL == null) ? "" : (baseURL + "/")) + getName(); } @@ -69,7 +89,7 @@ public class Availability implements TAPResource, VOSIResource { } @Override - public void init(ServletConfig config) throws ServletException{ + public void init(final ServletConfig config) throws ServletException{ ; } @@ -79,19 +99,40 @@ public class Availability implements TAPResource, VOSIResource { } @Override - public boolean executeResource(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException{ - if (!request.getMethod().equalsIgnoreCase("GET")) // ERREUR 405 selon VOSI (cf p.4) - response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, "The AVAILABILITY resource is only accessible in HTTP-GET !"); - + public boolean executeResource(final HttpServletRequest request, final HttpServletResponse response) throws IOException, TAPException{ + /* "In the REST binding, the support interfaces shall have distinct URLs in the HTTP scheme and shall be accessible by the GET operation in the HTTP protocol. + * The response to an HTTP POST, PUT or DELETE to these resources is not defined by this specification. However, if an implementation has no special action + * to perform for these requests, the normal response would be a 405 "Method not allowed" error." + * (Extract of the VOSI definition: http://www.ivoa.net/documents/VOSI/20100311/PR-VOSI-1.0-20100311.html#sec2) */ + if (!request.getMethod().equalsIgnoreCase("GET")) + throw new TAPException("The AVAILABILITY resource is only accessible in HTTP-GET! No special action can be perfomed with another HTTP method.", HttpServletResponse.SC_METHOD_NOT_ALLOWED); + + // Set the response MIME type (XML): response.setContentType("text/xml"); - String xml = "\n"; - xml += "\n"; - xml += "\t" + service.isAvailable() + "\n\t" + service.getAvailability() + "\n"; - xml += ""; + // Set the character encoding: + response.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); + // Get the output stream: PrintWriter pw = response.getWriter(); - pw.print(xml); + + // ...And write the XML document describing the availability of the TAP service: + pw.println(""); + pw.println(""); + + // available ? (true or false) + pw.print("\t"); + pw.print(service.isAvailable()); + pw.println(""); + + // reason/description of the (non-)availability: + pw.print("\t"); + if (service.getAvailability() != null) + pw.print(VOSerializer.formatText(service.getAvailability())); + pw.println(""); + + pw.println(""); + pw.flush(); return true; diff --git a/src/tap/resource/Capabilities.java b/src/tap/resource/Capabilities.java index be6ff926f639ac43b5859f3aa9f94ff026f345f3..e8b2aa744fcc9fd70b3162530a18030af61ab43f 100644 --- a/src/tap/resource/Capabilities.java +++ b/src/tap/resource/Capabilities.java @@ -16,7 +16,8 @@ package tap.resource; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; @@ -28,19 +29,44 @@ import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; +import tap.TAPException; +import uk.ac.starlink.votable.VOSerializer; +import uws.UWSToolBox; + +/** + *

TAP resource describing the capabilities of a TAP service.

+ * + *

This resource just return an XML document giving a description of the TAP service and list all its VOSI resources.

+ * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (04/2015) + */ public class Capabilities implements TAPResource, VOSIResource { + /** Name of this TAP resource. */ public static final String RESOURCE_NAME = "capabilities"; - private final TAP tap; + /** Representation of the whole TAP service. This object list all available resources ; + * resources that correspond to the capabilities this resource must list. */ + private final TAP tap; + + /**

URL toward this TAP resource. + * This URL is particularly important for its declaration in the capabilities of the TAP service.

+ * + *

Note: By default, it is just the name of this resource. It is updated after initialization of the service + * when the TAP service base URL is communicated to its resources. Then, it is: baseTAPURL + "/" + RESOURCE_NAME.

*/ protected String accessURL = getName(); - public Capabilities(TAP tap){ + /** + * Build a "/capabilities" resource. + * + * @param tap Object representation of the whole TAP service. + */ + public Capabilities(final TAP tap){ this.tap = tap; } - /** - */ + @Override public final void setTAPBaseURL(String baseURL){ accessURL = ((baseURL == null) ? "" : (baseURL + "/")) + getName(); } @@ -67,52 +93,74 @@ public class Capabilities implements TAPResource, VOSIResource { @Override public void init(ServletConfig config) throws ServletException{ - + ; } @Override public void destroy(){ - // TODO Auto-generated method stub - + ; } @Override - public boolean executeResource(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException{ + public boolean executeResource(HttpServletRequest request, HttpServletResponse response) throws IOException, TAPException{ + /* "In the REST binding, the support interfaces shall have distinct URLs in the HTTP scheme and shall be accessible by the GET operation in the HTTP protocol. + * The response to an HTTP POST, PUT or DELETE to these resources is not defined by this specification. However, if an implementation has no special action + * to perform for these requests, the normal response would be a 405 "Method not allowed" error." + * (Extract of the VOSI definition: http://www.ivoa.net/documents/VOSI/20100311/PR-VOSI-1.0-20100311.html#sec2) */ + if (!request.getMethod().equalsIgnoreCase("GET")) + throw new TAPException("The CAPABILITIES resource is only accessible in HTTP-GET! No special action can be perfomed with another HTTP method.", HttpServletResponse.SC_METHOD_NOT_ALLOWED); + + // Set the response MIME type (XML): response.setContentType("application/xml"); - StringBuffer xml = new StringBuffer("\n"); - xml.append("\n"); + // Set the character encoding: + response.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); + + // Get the response stream: + PrintWriter out = response.getWriter(); - xml.append(tap.getCapability()); + // Write the XML document header: + out.println(""); + out.print(""); - // Build the xml document: - Iterator it = tap.getTAPResources(); + // Write the full list of this TAP capabilities: + out.print(tap.getCapability()); + + // Write the capabilities of all VOSI resources: + Iterator it = tap.getResources(); while(it.hasNext()){ TAPResource res = it.next(); if (res instanceof VOSIResource){ String cap = ((VOSIResource)res).getCapability(); - if (cap != null) - xml.append('\n').append(cap); + if (cap != null){ + out.println(); + out.print(cap); + } } } - xml.append("\n"); + // Write the end of the XML document: + out.println("\n"); - // Write the Capabilities resource into the ServletResponse: - PrintWriter out = response.getWriter(); - out.print(xml.toString()); out.flush(); return true; } - public static final String getDefaultCapability(VOSIResource res){ - return "\t\n" + "\t\t\n" + "\t\t\t " + ((res.getAccessURL() == null) ? "" : res.getAccessURL()) + " \n" + "\t\t\n" + "\t"; + /** + * Write the XML description of the given VOSI resource. + * + * @param res Resource to describe in XML. + * + * @return XML description of the given VOSI resource. + */ + public static final String getDefaultCapability(final VOSIResource res){ + return "\t\n" + "\t\t\n" + "\t\t\t " + ((res.getAccessURL() == null) ? "" : VOSerializer.formatText(res.getAccessURL())) + " \n" + "\t\t\n" + "\t"; } } diff --git a/src/tap/resource/HomePage.java b/src/tap/resource/HomePage.java new file mode 100644 index 0000000000000000000000000000000000000000..b3210bc70fba8dfa6406dc7ca5e07a9987dc6caf --- /dev/null +++ b/src/tap/resource/HomePage.java @@ -0,0 +1,307 @@ +package tap.resource; + +/* + * This file is part of TAPLibrary. + * + * TAPLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * TAPLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with TAPLibrary. If not, see . + * + * Copyright 2015 - Astronomisches Rechen Institut (ARI) + */ + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.io.PrintWriter; +import java.net.MalformedURLException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.Iterator; + +import javax.servlet.ServletConfig; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import tap.TAPException; +import tap.formatter.OutputFormat; +import uws.ClientAbortException; +import uws.UWSToolBox; +import uws.service.log.UWSLog.LogLevel; + +/** + *

Write the content of the TAP service's home page.

+ * + *

Note: + * This class is using the two following {@link TAP} attributes in order to display the home page: + * {@link TAP#homePageURI} and {@link TAP#homePageMimeType}. The MIME type is used only for the third case below (local file). + *

+ * + *

Four cases are taken into account in this class, in function of the {@link TAP#homePageURI} value:

+ *
    + *
  1. a default content if no custom home page (URI) has been specified using {@link TAP#setHomePageURI(String)}. + * This default home page is hard-coded in this class and displays just an HTML list of + * links. There is one link for each resources of this TAP service (excluding the home page).
  2. + *
  3. a file inside WebContent if the given URI has no scheme (e.g. "tapIndex.jsp" or "/myFiles/tapIndex.html"). + * The URI is then an absolute (if starting with "/") or a relative path to file inside the WebContent directory. + * In this case the request is forwarded to this file. It is neither a redirection nor a copy, + * but a kind of inclusion of the interpreted file into the response. + * This method MUST be used if your home page is a JSP.
  4. + *
  5. a local file if a URI starts with "file:". In this case, the content of the local file is copied in the HTTP response. There is no interpretation. So this method should not be used for JSP.
  6. + *
  7. a distance document in all other cases. Indeed, if there is a scheme different from "file:" the given URI will be considered as a URL. + * In this case, any request to the TAP home page is redirected to this URL.
  8. + *
+ * + * @author Grégory Mantelet (ARI) + * @version 2.0 (04/2015) + * @since 2.0 + */ +public class HomePage implements TAPResource { + + /** Name of this TAP resource. */ + public static final String RESOURCE_NAME = "HOME PAGE"; + + /** TAP service owning this resource. */ + protected final TAP tap; + + public HomePage(final TAP tap){ + if (tap == null) + throw new NullPointerException("Missing TAP object! The HOME PAGE resource can not be initialized without a TAP instance."); + this.tap = tap; + } + + @Override + public void init(final ServletConfig config) throws ServletException{} + + @Override + public void destroy(){} + + @Override + public void setTAPBaseURL(String baseURL){} + + @Override + public final String getName(){ + return RESOURCE_NAME; + } + + @Override + public boolean executeResource(final HttpServletRequest request, final HttpServletResponse response) throws IOException, TAPException{ + boolean written = false; + + // Display the specified home page, if any is specified: + if (tap.homePageURI != null){ + + URI uri = null; + try{ + uri = new URI(tap.homePageURI); + /* CASE: FILE IN WebContent */ + if (uri.getScheme() == null){ + try{ + if (request.getServletContext().getResource(tap.homePageURI) != null){ + request.getRequestDispatcher(tap.homePageURI).forward(request, response); + written = true; + }else + logError("Web application file not found", null); + }catch(MalformedURLException mue){ + logError("incorrect URL syntax", mue); + } + } + /* CASE: LOCAL FILE */ + else if (uri.getScheme().equalsIgnoreCase("file")){ + // Set the content type: + response.setContentType(tap.homePageMimeType); + + // Set the character encoding: + response.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); + + // Get the character writer: + PrintWriter writer = response.getWriter(); + + // Get an input toward the custom home page: + BufferedReader input = null; + try{ + File f = new File(uri.getPath()); + if (f.exists() && !f.isDirectory() && f.canRead()){ + // set the content length: + response.setContentLength((int)f.length()); + + // get the input stream: + input = new BufferedReader(new FileReader(f)); + + // Copy the content of the input into the given writer: + char[] buffer = new char[2048]; + int nbReads = 0, nbBufferWritten = 0; + while((nbReads = input.read(buffer)) > 0){ + writer.write(buffer, 0, nbReads); + if ((++nbBufferWritten) % 4 == 0){ // the minimum and default buffer size of an HttpServletResponse is 8kiB => 4*2048 + UWSToolBox.flush(writer); + nbBufferWritten = 0; + } + } + UWSToolBox.flush(writer); + + // copy successful: + written = true; + }else + logError("file not found or not readable (" + f.exists() + !f.isDirectory() + f.canRead() + ")", null); + + }catch(ClientAbortException cae){ + /* This exception is an extension of IOException thrown only by some functions of UWSToolBox. + * It aims to notify about an IO error while trying to write the content of an HttpServletResponse. + * Such exception just means that the connection with the HTTP client has been closed/aborted. + * Consequently, no error nor result can be written any more in the HTTP response. + * This error, is just propagated to the TAP instance, so that stopping any current process + * for this request and so that being logged without any attempt of writing the error in the HTTP response. + */ + throw cae; + + }catch(IOException ioe){ + /* This IOException can be thrown only by InputStream.read(...) (because PrintWriter.print(...) + * silently fallbacks in case of error). + * So this error must not be propagated but caught and logged right now. Thus the default home page + * can be displayed after the error has been logged. */ + logError("the following error occurred while reading the specified local file", ioe); + + }finally{ + if (input != null) + input.close(); + } + + // Stop trying to write the home page if the HTTP request has been aborted/closed: + /*if (requestAborted) + throw new IOException("HTTP request aborted or connection with the HTTP client closed for another reason!");*/ + } + /* CASE: HTTP/HTTPS/FTP/... */ + else{ + response.sendRedirect(tap.homePageURI); + written = true; + } + + }catch(IOException ioe){ + /* This IOException can be caught here only if caused by a HTTP client abortion or by a closing of the HTTPrequest. + * So, it must be propagated until the TAP instance, where it will be merely logged as INFO. No response/error can be + * returned in the HTTP response. */ + throw ioe; + + }catch(IllegalStateException ise){ + /* This exception is caused by an attempt to reset the HTTP response buffer while a part of its + * content has already been submitted to the HTTP client. + * It must be propagated to the TAP instance so that being logged as a FATAL error. */ + throw ise; + + }catch(Exception e){ + /* The other errors are just logged, but not reported to the HTTP client, + * and then the default home page is displayed. */ + if (e instanceof URISyntaxException) + logError("the given URI has a wrong and unexpected syntax", e); + else + logError(null, e); + } + } + + // DEFAULT: list all available resources: + if (!written){ + // Set the content type: HTML document + response.setContentType("text/html"); + + // Set the character encoding: + response.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); + + // Get the output stream: + PrintWriter writer = response.getWriter(); + + // HTML header + CSS + Javascript: + writer.print("\n\n\t\n\t\t\n\t\tTAP HOME PAGE\n\t\t\n\t\t\n\t\n\t"); + + // Page title: + writer.print("\n\t\t

TAP HOME PAGE
"); + if (tap.getServiceConnection().getProviderName() != null) + writer.print("- " + tap.getServiceConnection().getProviderName() + " -"); + writer.print("

"); + + // Service description: + if (tap.getServiceConnection().getProviderDescription() != null) + writer.print("\n\n\t\t

Service description

\n\t\t

" + tap.getServiceConnection().getProviderDescription() + "

"); + + // List of all available resources: + writer.print("\n\n\t\t

Available resources

\n\t\t
    "); + for(TAPResource res : tap.resources.values()) + writer.println("\n\t\t\t
  • " + res.getName() + "
  • "); + writer.print("\n\t\t
"); + + // ADQL query form: + writer.print("\n\t\t\n\t\t

ADQL query

\n\t\t"); + writer.print("\n\t\t
\n\t\t\t\n\t\t\t\n\t\t\t\n\t\t\t\n\t\t\t
\n\t\t\t\tQuery:\n\t\t\t\t\n\t\t\t
"); + writer.print("\n\n\t\t\t
\n\t\t\t\tExecution mode: \n\t\t\t\t\n\t\t\t
"); + writer.print("\n\t\t\t
Format:\n\t\t\t\t\n\t\t\t
"); + + // Result limit: + writer.print("\n\t\t\t
\n\t\t\t\t rows (0 to get only metadata ; a value < 0 means 'default value')\n\t\t\t\t"); + if (tap.getServiceConnection().getOutputLimit() != null && tap.getServiceConnection().getOutputLimit().length >= 2){ + writer.print("\n\t\t\t\t\t"); + writer.print("\n\t\t\t\t\t"); + } + writer.print("\n\t\t\t\t\n\t\t\t
"); + + // Execution duration limit: + writer.print("\n\t\t\t
\n\t\t\t\t seconds (a value ≤ 0 means 'default value')\n\t\t\t\t"); + if (tap.getServiceConnection().getExecutionDuration() != null && tap.getServiceConnection().getExecutionDuration().length >= 2){ + writer.print("\n\t\t\t\t\t"); + writer.print("\n\t\t\t\t\t"); + } + writer.print("\n\t\t\t\t\n\t\t\t
"); + + // Upload feature: + if (tap.getServiceConnection().uploadEnabled()) + writer.print("\n\t\t\t
\n\t\t\t\t (the uploaded table must be referenced in the ADQL query with the following full name: TAP_UPLOAD.upload)\n\t\t\t
"); + + // Footer: + writer.print("\n\t\t\t\n\t\t
\n\t\t
\n\t\t
\n\t\t
\n\t\t\t

Page generated by TAPLibrary v2.0

\n\t\t
\n\t\n"); + + writer.flush(); + + written = true; + } + + return written; + } + + /** + *

Log the given error as a TAP log message with the {@link LogLevel} ERROR, and the event "HOME_PAGE".

+ * + *

+ * The logged message starts with: Can not write the specified home page content ({tap.homePageURI}). + * After the specified error message, the following is appended: ! => The default home page will be displayed.. + *

+ * + *

+ * If the message parameter is missing, the {@link Throwable} message will be taken instead. + * And if this latter is also missing, none will be written. + *

+ * + * @param message Error message to log. + * @param error The exception at the origin of the error. + */ + protected void logError(final String message, final Throwable error){ + tap.getLogger().logTAP(LogLevel.ERROR, null, "HOME_PAGE", "Can not write the specified home page content (" + tap.homePageURI + ") " + (message == null ? (error == null ? "" : ": " + error.getMessage()) : ": " + message) + "! => The default home page will be displayed.", error); + } + +} diff --git a/src/tap/resource/Sync.java b/src/tap/resource/Sync.java index 71be76f48da2c1312fe70a6497fd646a06603d25..9b93392fe39ed677bb292b9c2df473a0b425d5db 100644 --- a/src/tap/resource/Sync.java +++ b/src/tap/resource/Sync.java @@ -16,33 +16,55 @@ package tap.resource; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; + import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import tap.TAPJob; import tap.ServiceConnection; import tap.TAPException; +import tap.TAPJob; import tap.TAPSyncJob; import tap.parameters.TAPParameters; import uws.UWSException; +/** + *

Synchronous resource of a TAP service.

+ * + *

+ * Requests sent to this resource can be either to get the capabilities of the TAP service (which should actually be accessed with the resource /capabilities) + * or to execute synchronously an ADQL query. For the second case, "synchronously" means that result or error is returned immediately when the execution ends. + * Besides, generally, the execution time is much more limited than an asynchronous query. + *

+ * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (09/2014) + */ public class Sync implements TAPResource { + /** Name of this TAP resource. */ public static final String RESOURCE_NAME = "sync"; - protected String accessURL = null; - - protected final ServiceConnection service; + /** Description of the TAP service owning this resource. */ + protected final ServiceConnection service; + /** List of all capabilities of the TAP service. */ protected final Capabilities capabilities; - public Sync(ServiceConnection service, Capabilities capabilities){ + /** + * Build a synchronous resource for the TAP service whose the description and + * the capabilities are provided in parameters. + * + * @param service Description of the TAP service which will own this resource. + * @param capabilities Capabilities of the TAP service. + */ + public Sync(final ServiceConnection service, final Capabilities capabilities){ this.service = service; this.capabilities = capabilities; } @@ -53,12 +75,12 @@ public class Sync implements TAPResource { } @Override - public void setTAPBaseURL(String baseURL){ - accessURL = ((baseURL != null) ? (baseURL + "/") : "") + getName(); + public void setTAPBaseURL(final String baseURL){ + ; } @Override - public void init(ServletConfig config) throws ServletException{ + public void init(final ServletConfig config) throws ServletException{ ; } @@ -68,22 +90,27 @@ public class Sync implements TAPResource { } @Override - public boolean executeResource(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException, TAPException, UWSException{ - TAPParameters params = (TAPParameters)service.getFactory().createUWSParameters(request); + public boolean executeResource(final HttpServletRequest request, final HttpServletResponse response) throws IOException, TAPException{ + // Retrieve the execution parameters: + TAPParameters params = service.getFactory().createTAPParameters(request); params.check(); + // CASE 1: GET CAPABILITIES + /* If the user asks for the capabilities through the TAP parameters, execute the corresponding resource. */ if (params.getRequest().equalsIgnoreCase(TAPJob.REQUEST_GET_CAPABILITIES)) return capabilities.executeResource(request, response); - if (!service.isAvailable()){ - response.sendError(HttpServletResponse.SC_SERVICE_UNAVAILABLE, service.getAvailability()); - return false; - } + // CASE 2: EXECUTE SYNCHRONOUSLY AN ADQL QUERY + // Ensure the service is currently available: + if (!service.isAvailable()) + throw new TAPException("Can not execute a query: this TAP service is not available! " + service.getAvailability(), UWSException.SERVICE_UNAVAILABLE); + // Execute synchronously the given job: TAPSyncJob syncJob = new TAPSyncJob(service, params); syncJob.start(response); return true; + } } diff --git a/src/tap/resource/TAP.java b/src/tap/resource/TAP.java index 3ad3a555d13bc52bf1060a4839bab34d5ce32998..2bc876fd96330eaad3849d7a05689a14e04ef353 100644 --- a/src/tap/resource/TAP.java +++ b/src/tap/resource/TAP.java @@ -16,20 +16,11 @@ package tap.resource; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomisches Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ -import java.io.BufferedInputStream; -import java.io.File; -import java.io.FileInputStream; import java.io.IOException; -import java.io.PrintWriter; -import java.net.MalformedURLException; -import java.net.URL; -import java.text.DateFormat; -import java.text.SimpleDateFormat; -import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.Map; @@ -42,39 +33,113 @@ import javax.servlet.http.HttpServletResponse; import tap.ServiceConnection; import tap.ServiceConnection.LimitUnit; import tap.TAPException; -import tap.db.DBConnection; import tap.error.DefaultTAPErrorWriter; import tap.formatter.OutputFormat; import tap.log.TAPLog; import tap.metadata.TAPMetadata; +import uk.ac.starlink.votable.VOSerializer; import uws.UWSException; -import uws.job.ErrorType; -import uws.job.UWSJob; +import uws.UWSToolBox; import uws.job.user.JobOwner; +import uws.service.UWS; import uws.service.UWSService; -import uws.service.UWSUrl; import uws.service.error.ServiceErrorWriter; +import uws.service.log.UWSLog.LogLevel; +import adql.db.FunctionDef; -public class TAP< R > implements VOSIResource { - - private static final long serialVersionUID = 1L; - - protected final ServiceConnection service; - +/** + *

Root/Home of the TAP service. It is also the resource (HOME) which gathers all the others of the same TAP service.

+ * + *

At its creation it is creating and configuring the other resources in function of the given description of the TAP service.

+ * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (04/2015) + */ +public class TAP implements VOSIResource { + + /**

Name of the TAP AVAILABILITY resource. + * This resource tells whether the TAP service is available (i.e. whether it accepts queries or not).

+ *

Note: this name is suffixing the root TAP URL in order to access one of its resources.

+ * @since 2.0 */ + public final static String RESOURCE_AVAILABILITY = "availability"; + /**

Name of the TAP CAPABILITIES resource. + * This resource list all capabilities (e.g. output limits and formats, uploads, ...) of this TAP resource.

+ *

Note: this name is suffixing the root TAP URL in order to access one of its resources.

+ * @since 2.0 */ + public final static String RESOURCE_CAPABILITIES = "capabilities"; + /**

Name of the TAP HOME PAGE resource. + * This resource lists and describes all published and query-able schemas, tables and columns.

+ *

Note: this name is suffixing the root TAP URL in order to access one of its resources.

+ * @since 2.0 */ + public final static String RESOURCE_METADATA = "tables"; + /**

Name of the TAP HOME PAGE resource. + * This resource is used to submit ADQL queries to run asynchronously.

+ *

Note: this name is suffixing the root TAP URL in order to access one of its resources.

+ * @since 2.0 */ + public final static String RESOURCE_ASYNC = "async"; + /**

Name of the TAP HOME PAGE resource. + * This resource is used to submit ADQL queries to run synchronously.

+ *

Note: this name is suffixing the root TAP URL in order to access one of its resources.

+ * @since 2.0 */ + public final static String RESOURCE_SYNC = "sync"; + + /** Description of the TAP service owning this resource. */ + protected final ServiceConnection service; + + /** List of all the other TAP resources of the service. */ protected final Map resources; + /** Base URL of the TAP service. It is also the URL of this resource (HOME). */ protected String tapBaseURL = null; + /** + *

HOME PAGE resource. + * This resource lets write the home page.

+ *

Note: + * at the URI {@link #homePageURI} or it is a very simple HTML page listing the link of all available + * TAP resources. + *

+ * @since 2.0 + */ + protected HomePage homePage = null; + + /** URI of the page or path of the file to display when this resource is requested. */ protected String homePageURI = null; + /** MIME type of the custom home page. By default, it is "text/html". */ + protected String homePageMimeType = "text/html"; + + /** Object to use when an error occurs or comes until this resource from the others. + * This object fills the HTTP response in the most appropriate way in function of the error. */ protected ServiceErrorWriter errorWriter; - public TAP(ServiceConnection serviceConnection) throws UWSException, TAPException{ + /** Last generated request ID. If the next generated request ID is equivalent to this one, + * a new one will generate in order to ensure the uniqueness. + * @since 2.0 */ + protected static String lastRequestID = null; + + /** + * Build a HOME resource of a TAP service whose the description is given in parameter. + * All the other TAP resources will be created and configured here thanks to the given {@link ServiceConnection}. + * + * @param serviceConnection Description of the TAP service. + * + * @throws UWSException If an error occurs while creating the /async resource. + * @throws TAPException If any other error occurs. + */ + public TAP(final ServiceConnection serviceConnection) throws UWSException, TAPException{ service = serviceConnection; resources = new HashMap(); - errorWriter = new DefaultTAPErrorWriter(service); + // Get the error writer to use, or create a default instance if none are provided by the factory: + errorWriter = serviceConnection.getFactory().getErrorWriter(); + if (errorWriter == null) + errorWriter = new DefaultTAPErrorWriter(service); + + // Set the default home page: + homePage = new HomePage(this); + // Set all the standard TAP resources: TAPResource res = new Availability(service); resources.put(res.getName(), res); @@ -86,68 +151,316 @@ public class TAP< R > implements VOSIResource { res = new ASync(service); resources.put(res.getName(), res); - getUWS().setErrorWriter(errorWriter); - if (service.uploadEnabled()){ - DBConnection dbConn = null; - try{ - dbConn = service.getFactory().createDBConnection("TAP(ServiceConnection)"); - dbConn.dropSchema("TAP_UPLOAD"); - dbConn.createSchema("TAP_UPLOAD"); - }catch(TAPException e){ - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, e, "Error while creating the schema TAP_UPLOAD !"); - }finally{ - if (dbConn != null) - dbConn.close(); - } - } - - updateTAPMetadata(); + TAPMetadata metadata = service.getTAPMetadata(); + resources.put(metadata.getName(), metadata); } + /** + * Get the logger used by this resource and all the other resources managed by it. + * + * @return The used logger. + */ public final TAPLog getLogger(){ return service.getLogger(); } - public void setTAPBaseURL(String baseURL){ + /** + *

Let initialize this resource and all the other managed resources.

+ * + *

This function is called by the library just once: when the servlet is initialized.

+ * + * @param config Configuration of the servlet. + * + * @throws ServletException If any error occurs while reading the given configuration. + * + * @see TAPResource#init(ServletConfig) + */ + public void init(final ServletConfig config) throws ServletException{ + for(TAPResource res : resources.values()) + res.init(config); + } + + /** + *

Free all the resources used by this resource and the other managed resources.

+ * + *

This function is called by the library just once: when the servlet is destroyed.

+ * + * @see TAPResource#destroy() + */ + public void destroy(){ + // Set the availability to "false" and the reason to "The application server is stopping!": + service.setAvailable(false, "The application server is stopping!"); + + // Destroy all web resources: + for(TAPResource res : resources.values()) + res.destroy(); + + // Destroy also all resources allocated in the factory: + service.getFactory().destroy(); + + // Log the end: + getLogger().logTAP(LogLevel.INFO, this, "STOP", "TAP Service stopped!", null); + } + + /** + *

Set the base URL of this TAP service.

+ * + *

+ * This URL must be the same as the one of this resource ; it corresponds to the + * URL of the root (or home) of the TAP service. + *

+ * + *

The given URL will be propagated to the other TAP resources automatically.

+ * + * @param baseURL URL of this resource. + * + * @see TAPResource#setTAPBaseURL(String) + */ + public void setTAPBaseURL(final String baseURL){ tapBaseURL = baseURL; for(TAPResource res : resources.values()) res.setTAPBaseURL(tapBaseURL); } - public void setTAPBaseURL(HttpServletRequest request){ + /** + *

Build the base URL from the given HTTP request, and use it to set the base URL of this TAP service.

+ * + *

The given URL will be propagated to the other TAP resources automatically.

+ * + * @param request HTTP request from which a TAP service's base URL will be extracted. + * + * @see #setTAPBaseURL(String) + */ + public void setTAPBaseURL(final HttpServletRequest request){ setTAPBaseURL(request.getScheme() + "://" + request.getServerName() + ":" + request.getServerPort() + request.getContextPath() + request.getServletPath()); } + /* ******************** */ + /* RESOURCES MANAGEMENT */ + /* ******************** */ + + /** + * Get the description of this service. + * + * @return Description/Configuration of this TAP service. + * + * @since 2.0 + */ + public final ServiceConnection getServiceConnection(){ + return service; + } + + /** + * Get the /availability resource of this TAP service. + * + * @return The /availability resource. + */ public final Availability getAvailability(){ - return (Availability)resources.get(Availability.RESOURCE_NAME); + return (Availability)resources.get(RESOURCE_AVAILABILITY); } + /** + * Get the /capabilities resource of this TAP service. + * + * @return The /capabilities resource. + */ public final Capabilities getCapabilities(){ - return (Capabilities)resources.get(Capabilities.RESOURCE_NAME); + return (Capabilities)resources.get(RESOURCE_CAPABILITIES); } + /** + * Get the /sync resource of this TAP service. + * + * @return The /sync resource. + */ public final Sync getSync(){ - return (Sync)resources.get(Sync.RESOURCE_NAME); + return (Sync)resources.get(RESOURCE_SYNC); } + /** + * Get the /async resource of this TAP service. + * + * @return The /async resource. + */ public final ASync getASync(){ - return (ASync)resources.get(ASync.RESOURCE_NAME); + return (ASync)resources.get(RESOURCE_ASYNC); + } + + /** + * Get the UWS service used for the /async service. + * + * @return The used UWS service. + */ + public final UWSService getUWS(){ + TAPResource res = getASync(); + if (res != null) + return ((ASync)res).getUWS(); + else + return null; } + /** + *

Get the object managing all the metadata (information about the published columns and tables) + * of this TAP service.

+ * + *

This object is also to the /tables resource.

+ * + * @return List of all metadata of this TAP service. + */ public final TAPMetadata getTAPMetadata(){ return (TAPMetadata)resources.get(TAPMetadata.RESOURCE_NAME); } - public final Iterator getTAPResources(){ + /** + *

Add the given resource in this TAP service.

+ * + *

The ID of this resource (which is also its URI) will be its name (given by {@link TAPResource#getName()}).

+ * + *

WARNING: + * If another resource with an ID strictly identical (case sensitively) to the name of the given resource, it will be overwritten! + * You should check (thanks to {@link #hasResource(String)}) before calling this function that no resource is associated with the same URI. + * If it is the case, you should then use the function {@link #addResource(String, TAPResource)} with a different ID/URI. + *

+ * + *

Note: + * This function is equivalent to {@link #addResource(String, TAPResource)} with {@link TAPResource#getName()} in first parameter. + *

+ * + * @param newResource Resource to add in the service. + * + * @return true if the given resource has been successfully added, + * false otherwise (and particularly if the given resource is NULL). + * + * @see #addResource(String, TAPResource) + */ + public final boolean addResource(final TAPResource newResource){ + return addResource(newResource.getName(), newResource); + } + + /** + *

Add the given resource in this TAP service with the given ID (which will be also the URI to access this resource).

+ * + *

WARNING: + * If another resource with an ID strictly identical (case sensitively) to the name of the given resource, it will be overwritten! + * You should check (thanks to {@link #hasResource(String)}) before calling this function that no resource is associated with the same URI. + * If it is the case, you should then use the function {@link #addResource(String, TAPResource)} with a different ID/URI. + *

+ * + *

Note: + * If the given ID is NULL, the name of the resource will be used. + *

+ * + * @param resourceId ID/URI of the resource to add. + * @param newResource Resource to add. + * + * @return true if the given resource has been successfully added to this service with the given ID/URI, + * false otherwise (and particularly if the given resource is NULL). + */ + public final boolean addResource(final String resourceId, final TAPResource newResource){ + if (newResource == null) + return false; + resources.put((resourceId == null) ? newResource.getName() : resourceId, newResource); + return true; + } + + /** + * Get the number of all resources managed by this TAP service (this resource - HOME - excluded). + * + * @return Number of managed resources. + */ + public final int getNbResources(){ + return resources.size(); + } + + /** + *

Get the specified resource.

+ * + *

Note: + * The research is case sensitive. + *

+ * + * @param resourceId Exact ID/URI of the resource to get. + * + * @return The corresponding resource, + * or NULL if no match can be found. + */ + public final TAPResource getResource(final String resourceId){ + return resources.get(resourceId); + } + + /** + * Let iterate over the full list of the TAP resources managed by this TAP service. + * + * @return Iterator over the available TAP resources. + */ + public final Iterator getResources(){ return resources.values().iterator(); } + /** + * Let iterate over the full list of the TAP resources managed by this TAP service. + * + * @return Iterator over the available TAP resources. + * @deprecated The name of this function has been normalized. So now, you should use {@link #getResources()} + * which is doing exactly the same thing. + */ + @Deprecated + public final Iterator getTAPResources(){ + return getResources(); + } + + /** + *

Tell whether a resource is already associated with the given ID/URI.

+ * + *

Note: + * The research is case sensitive. + *

+ * + * @param resourceId Exact ID/URI of the resource to find. + * + * @return true if a resource is already associated with the given ID/URI, + * false otherwise. + */ + public final boolean hasResource(final String resourceId){ + return resources.containsKey(resourceId); + } + + /** + *

Remove the resource associated with the given ID/URI.

+ * + *

Note: + * The research is case sensitive. + *

+ * + * @param resourceId Exact ID/URI of the resource to remove. + * + * @return The removed resource, if associated with the given ID/URI, + * otherwise, NULL is returned. + */ + public final TAPResource removeResource(final String resourceId){ + return resources.remove(resourceId); + } + + /* **************** */ + /* ERROR MANAGEMENT */ + /* **************** */ + + /** + * Get the object to use in order to report errors to the user in replacement of the expected result. + * + * @return Used error writer. + */ public final ServiceErrorWriter getErrorWriter(){ return errorWriter; } - public final void setErrorWriter(ServiceErrorWriter errorWriter){ + /** + * Set the object to use in order to report errors to the user in replacement of the expected result. + * + * @param errorWriter Error writer to use. (if NULL, nothing will be done) + */ + public final void setErrorWriter(final ServiceErrorWriter errorWriter){ if (errorWriter != null){ this.errorWriter = errorWriter; getUWS().setErrorWriter(errorWriter); @@ -168,29 +481,72 @@ public class TAP< R > implements VOSIResource { public String getCapability(){ StringBuffer xml = new StringBuffer(); - xml.append("\n"); + // Header: + xml.append("\n"); + + // TAP access: xml.append("\t\n"); - xml.append("\t\t").append(getAccessURL()).append("\n"); + xml.append("\t\t").append((getAccessURL() == null) ? "" : VOSerializer.formatText(getAccessURL())).append("\n"); xml.append("\t\n"); + + // Language description: xml.append("\t\n"); xml.append("\t\tADQL\n"); - xml.append("\t\t2.0\n"); + xml.append("\t\t2.0\n"); xml.append("\t\tADQL 2.0\n"); + + // Geometrical functions: + if (service.getGeometries() != null && service.getGeometries().size() > 0){ + xml.append("\t\t"); + for(String geom : service.getGeometries()){ + if (geom != null){ + xml.append("\t\t\t"); + xml.append("\t\t\t\t
").append(VOSerializer.formatText(geom.toUpperCase())).append("
"); + xml.append("\t\t\t
"); + } + } + xml.append("\t\t
"); + } + + // User Defined Functions (UDFs): + if (service.getUDFs() != null && service.getUDFs().size() > 0){ + xml.append("\t\t"); + for(FunctionDef udf : service.getUDFs()){ + if (udf != null){ + xml.append("\t\t\t"); + xml.append("\t\t\t\t
").append(VOSerializer.formatText(udf.toString())).append("
"); + if (udf.description != null && udf.description.length() > 0) + xml.append("\t\t\t\t").append(VOSerializer.formatText(udf.description)).append(""); + xml.append("\t\t\t
"); + } + } + xml.append("\t\t
"); + } + xml.append("\t
\n"); - Iterator> itFormats = service.getOutputFormats(); - OutputFormat formatter; + // Available output formats: + Iterator itFormats = service.getOutputFormats(); + OutputFormat formatter; while(itFormats.hasNext()){ formatter = itFormats.next(); xml.append("\t\n"); - xml.append("\t\t").append(formatter.getMimeType()).append("\n"); + xml.append("\t\t").append(VOSerializer.formatText(formatter.getMimeType())).append("\n"); if (formatter.getShortMimeType() != null) - xml.append("\t\t").append(formatter.getShortMimeType()).append("\n"); + xml.append("\t\t").append(VOSerializer.formatText(formatter.getShortMimeType())).append("\n"); if (formatter.getDescription() != null) - xml.append("\t\t").append(formatter.getDescription()).append("\n"); + xml.append("\t\t").append(VOSerializer.formatText(formatter.getDescription())).append("\n"); xml.append("\t\n"); } + // Write upload methods: INLINE, HTTP, FTP: + if (service.uploadEnabled()){ + xml.append("\t\n"); + xml.append("\t\n"); + xml.append("\t\n"); + } + + // Retention period (for asynchronous jobs): int[] retentionPeriod = service.getRetentionPeriod(); if (retentionPeriod != null && retentionPeriod.length >= 2){ if (retentionPeriod[0] > -1 || retentionPeriod[1] > -1){ @@ -203,6 +559,7 @@ public class TAP< R > implements VOSIResource { } } + // Execution duration (still for asynchronous jobs): int[] executionDuration = service.getExecutionDuration(); if (executionDuration != null && executionDuration.length >= 2){ if (executionDuration[0] > -1 || executionDuration[1] > -1){ @@ -215,238 +572,334 @@ public class TAP< R > implements VOSIResource { } } + // Output/Result limit: int[] outputLimit = service.getOutputLimit(); LimitUnit[] outputLimitType = service.getOutputLimitType(); if (outputLimit != null && outputLimit.length >= 2 && outputLimitType != null && outputLimitType.length >= 2){ if (outputLimit[0] > -1 || outputLimit[1] > -1){ xml.append("\t\n"); - if (outputLimit[0] > -1) - xml.append("\t\t").append(outputLimit[0]).append("\n"); - if (outputLimit[1] > -1) - xml.append("\t\t").append(outputLimit[1]).append("\n"); + String limitType; + if (outputLimit[0] > -1){ + long limit = outputLimit[0] * outputLimitType[0].bytesFactor(); + limitType = (outputLimitType[0] == null || outputLimitType[0] == LimitUnit.rows) ? LimitUnit.rows.toString() : LimitUnit.bytes.toString(); + xml.append("\t\t").append(limit).append("\n"); + } + if (outputLimit[1] > -1){ + long limit = outputLimit[1] * outputLimitType[1].bytesFactor(); + limitType = (outputLimitType[1] == null || outputLimitType[1] == LimitUnit.rows) ? LimitUnit.rows.toString() : LimitUnit.bytes.toString(); + xml.append("\t\t").append(limit).append("\n"); + } xml.append("\t\n"); } } + // Upload limits if (service.uploadEnabled()){ - // Write upload methods: INLINE, HTTP, FTP: - xml.append(""); - xml.append(""); - xml.append(""); - xml.append(""); - xml.append(""); - xml.append(""); - // Write upload limits: int[] uploadLimit = service.getUploadLimit(); LimitUnit[] uploadLimitType = service.getUploadLimitType(); if (uploadLimit != null && uploadLimit.length >= 2 && uploadLimitType != null && uploadLimitType.length >= 2){ if (uploadLimit[0] > -1 || uploadLimit[1] > -1){ xml.append("\t\n"); - if (uploadLimit[0] > -1) - xml.append("\t\t").append(uploadLimit[0]).append("\n"); - if (uploadLimit[1] > -1) - xml.append("\t\t").append(uploadLimit[1]).append("\n"); + String limitType; + if (uploadLimit[0] > -1){ + long limit = uploadLimit[0] * uploadLimitType[0].bytesFactor(); + limitType = (uploadLimitType[0] == null || uploadLimitType[0] == LimitUnit.rows) ? LimitUnit.rows.toString() : LimitUnit.bytes.toString(); + xml.append("\t\t").append(limit).append("\n"); + } + if (uploadLimit[1] > -1){ + long limit = uploadLimit[1] * uploadLimitType[1].bytesFactor(); + limitType = (uploadLimitType[1] == null || uploadLimitType[1] == LimitUnit.rows) ? LimitUnit.rows.toString() : LimitUnit.bytes.toString(); + xml.append("\t\t").append(limit).append("\n"); + } xml.append("\t\n"); } } } + // Footer: xml.append("\t
"); return xml.toString(); } - public final UWSService getUWS(){ - TAPResource res = resources.get("async"); - if (res != null) - return ((ASync)res).getUWS(); - else - return null; + /* ************************************* */ + /* MANAGEMENT OF THIS RESOURCE'S CONTENT */ + /* ************************************* */ + + /** + * Get the HOME PAGE resource of this TAP service. + * + * @return The HOME PAGE resource. + * + * @since 2.0 + */ + public final HomePage getHomePage(){ + return homePage; } /** - * @return The homePageURI. + *

Change the whole behavior of the TAP home page.

+ * + *

Note: + * If the given resource is NULL, the default home page (i.e. {@link HomePage}) is set. + *

+ * + * @param newHomePageResource The new HOME PAGE resource for this TAP service. + * + * @since 2.0 + */ + public final void setHomePage(final HomePage newHomePageResource){ + if (newHomePageResource == null){ + if (homePage == null || !(homePage instanceof HomePage)) + homePage = new HomePage(this); + }else + homePage = newHomePageResource; + } + + /** + *

Get the URL or the file path of a custom home page.

+ * + *

The home page will be displayed when this resource is directly requested.

+ * + *

Note: + * This function has a sense only if the HOME PAGE resource of this TAP service + * is still the default home page (i.e. {@link HomePage}). + *

+ * + * @return URL or file path of the file to display as home page, + * or NULL if no custom home page has been specified. */ public final String getHomePageURI(){ return homePageURI; } - public final void setHomePageURI(String uri){ + /** + *

Set the URL or the file path of a custom home page.

+ * + *

The home page will be displayed when this resource is directly requested.

+ * + *

Note: + * This function has a sense only if the HOME PAGE resource of this TAP service + * is still the default home page (i.e. {@link HomePage}). + *

+ * + * @param uri URL or file path of the file to display as home page, or NULL to display the default home page. + */ + public final void setHomePageURI(final String uri){ homePageURI = (uri != null) ? uri.trim() : uri; if (homePageURI != null && homePageURI.length() == 0) homePageURI = null; } - public void init(ServletConfig config) throws ServletException{ - for(TAPResource res : resources.values()) - res.init(config); + /** + *

Get the MIME type of the custom home page.

+ * + *

By default, it is the same as the default home page: "text/html".

+ * + *

Note: + * This function has a sense only if the HOME PAGE resource of this TAP service + * is still the default home page (i.e. {@link HomePage}). + *

+ * + * @return MIME type of the custom home page. + */ + public final String getHomePageMimeType(){ + return homePageMimeType; } - public void destroy(){ - for(TAPResource res : resources.values()) - res.destroy(); + /** + *

Set the MIME type of the custom home page.

+ * + *

A NULL value will be considered as "text/html".

+ * + *

Note: + * This function has a sense only if the HOME PAGE resource of this TAP service + * is still the default home page (i.e. {@link HomePage}). + *

+ * + * @param mime MIME type of the custom home page. + */ + public final void setHomePageMimeType(final String mime){ + homePageMimeType = (mime == null || mime.trim().length() == 0) ? "text/html" : mime.trim(); } - public void executeRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException{ - response.setContentType("text/plain"); + /** + *

Generate a unique ID for the given request.

+ * + *

By default, a timestamp is returned.

+ * + * @param request Request whose an ID is asked. + * + * @return The ID of the given request. + * + * @since 2.0 + */ + protected synchronized String generateRequestID(final HttpServletRequest request){ + String id; + do{ + id = System.currentTimeMillis() + ""; + }while(lastRequestID != null && lastRequestID.startsWith(id)); + lastRequestID = id; + return id; + } - if (tapBaseURL == null) - setTAPBaseURL(request); + /** + *

Execute the given request in the TAP service by forwarding it to the appropriate resource.

+ * + *

Home page

+ *

+ * If the appropriate resource is the home page, the request is propagated to a {@link TAPResource} + * (by default {@link HomePage}) whose the resource name is "HOME PAGE". Once called, this resource + * displays directly the home page in the given response by calling. + * The default implementation of the default implementation ({@link HomePage}) takes several cases into account. + * Those are well documented in the Javadoc of {@link HomePage}. What you should know, is that sometimes it is + * using the following attributes of this class: {@link #getHomePage()}, {@link #getHomePageURI()}, {@link #getHomePageMimeType()}. + *

+ * + *

Error/Exception management

+ *

+ * Only this resource (the root) should write any errors in the response. For that, it catches any {@link Throwable} and + * write an appropriate message in the HTTP response. The format and the content of this message is designed by the {@link ServiceErrorWriter} + * set in this class. By changing it, it is then possible to change, for instance, the format of the error responses. + *

+ * + *

Request ID & Log

+ *

+ * Each request is identified by a unique identifier (see {@link #generateRequestID(HttpServletRequest)}). + * This ID is used only for logging purpose. Request and jobs/threads can then be associated more easily in the logs. + * Besides, every requests and their response are logged as INFO with this ID. + *

+ * + * @param request Request of the user to execute in this TAP service. + * @param response Object in which the result of the request must be written. + * + * @throws ServletException If any grave/fatal error occurs. + * @throws IOException If any error occurs while reading or writing from or into a stream (and particularly the given request or response). + */ + public void executeRequest(final HttpServletRequest request, final HttpServletResponse response) throws ServletException, IOException{ + if (request == null || response == null) + return; + + // Generate a unique ID for this request execution (for log purpose only): + final String reqID = generateRequestID(request); + if (request.getAttribute(UWS.REQ_ATTRIBUTE_ID) == null) + request.setAttribute(UWS.REQ_ATTRIBUTE_ID, reqID); - JobOwner owner = null; - String resourceName = null; + // Extract all parameters: + if (request.getAttribute(UWS.REQ_ATTRIBUTE_PARAMETERS) == null){ + try{ + request.setAttribute(UWS.REQ_ATTRIBUTE_PARAMETERS, getUWS().getRequestParser().parse(request)); + }catch(UWSException ue){ + getLogger().log(LogLevel.WARNING, "REQUEST_PARSER", "Can not extract the HTTP request parameters!", ue); + } + } + // Retrieve the resource path parts: + String[] resourcePath = (request.getPathInfo() == null) ? null : request.getPathInfo().split("/"); + String resourceName = (resourcePath == null || resourcePath.length < 1) ? "" : resourcePath[1].trim(); + + // Log the reception of the request, only if the asked resource is not UWS (because UWS is already logging the received request): + if (!resourceName.equalsIgnoreCase(ASync.RESOURCE_NAME)) + getLogger().logHttp(LogLevel.INFO, request, reqID, null, null); + + // Initialize the base URL of this TAP service by guessing it from the received request: + if (tapBaseURL == null){ + // initialize the base URL: + setTAPBaseURL(request); + // log the successful initialization: + getLogger().logUWS(LogLevel.INFO, this, "INIT", "TAP successfully initialized (" + tapBaseURL + ").", null); + } + + JobOwner user = null; try{ // Identify the user: - if (service.getUserIdentifier() != null) - owner = service.getUserIdentifier().extractUserId(new UWSUrl(request), request); - - String[] resourcePath = (request.getPathInfo() == null) ? null : request.getPathInfo().split("/"); - // Display the TAP Main Page: - if (resourcePath == null || resourcePath.length < 1){ - resourceName = "homePage"; - response.setContentType("text/html"); - writeHomePage(response.getWriter(), owner); + try{ + user = UWSToolBox.getUser(request, service.getUserIdentifier()); + }catch(UWSException ue){ + getLogger().logTAP(LogLevel.ERROR, null, "IDENT_USER", "Can not identify the HTTP request user!", ue); + throw new TAPException(ue); + } + + // Set the character encoding: + response.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); + + // Display the TAP Home Page: + if (resourceName.length() == 0){ + resourceName = homePage.getName(); + homePage.executeResource(request, response); } // or Display/Execute the selected TAP Resource: else{ - resourceName = resourcePath[1].trim().toLowerCase(); + // search for the corresponding resource: TAPResource res = resources.get(resourceName); + // if one is found, execute it: if (res != null) res.executeResource(request, response); + // otherwise, throw an error: else - errorWriter.writeError("This TAP service does not have a resource named \"" + resourceName + "\" !", ErrorType.TRANSIENT, HttpServletResponse.SC_NOT_FOUND, response, request, null, "Get a TAP resource"); + throw new TAPException("Unknown TAP resource: \"" + resourceName + "\"!", UWSException.NOT_IMPLEMENTED); } - service.getLogger().httpRequest(request, owner, resourceName, HttpServletResponse.SC_OK, "[OK]", null); - response.flushBuffer(); - }catch(IOException ioe){ - errorWriter.writeError(ioe, response, request, owner, (resourceName == null) ? "Writing the TAP home page" : ("Executing the TAP resource " + resourceName)); - }catch(UWSException ue){ - errorWriter.writeError(ue, response, request, owner, (resourceName == null) ? "Writing the TAP home page" : ("Executing the TAP resource " + resourceName)); - }catch(TAPException te){ - writeError(te, response); - }catch(Throwable t){ - errorWriter.writeError(t, response, request, owner, (resourceName == null) ? "Writing the TAP home page" : ("Executing the TAP resource " + resourceName)); - } - } - - public void writeHomePage(final PrintWriter writer, final JobOwner owner) throws IOException{ - // By default, list all available resources: - if (homePageURI == null){ - writer.println("TAP HOME PAGE

TAP HOME PAGE

Available resources:

    "); - for(TAPResource res : resources.values()) - writer.println("
  • " + res.getName() + "
  • "); - writer.println("
"); - } - // or Display the specified home page: - else{ - BufferedInputStream input = null; - try{ - input = new BufferedInputStream((new URL(homePageURI)).openStream()); - }catch(MalformedURLException mue){ - input = new BufferedInputStream(new FileInputStream(new File(homePageURI))); - } - if (input == null) - throw new IOException("Incorrect TAP home page URI !"); - byte[] buffer = new byte[255]; - int nbReads = 0; - while((nbReads = input.read(buffer)) > 0) - writer.print(new String(buffer, 0, nbReads)); - } - } - public void writeError(TAPException ex, HttpServletResponse response) throws ServletException, IOException{ - service.getLogger().error(ex); - response.reset(); - response.setStatus(ex.getHttpErrorCode()); - response.setContentType("text/xml"); - writeError(ex, response.getWriter()); - } - - protected void writeError(TAPException ex, PrintWriter output) throws ServletException, IOException{ - output.println(""); - output.println(""); - output.println("\t"); - - // Print the error: - output.println("\t\t"); - output.print("\t\t\t\t\t"); - - // Print the current date: - DateFormat dateFormat = new SimpleDateFormat(UWSJob.DEFAULT_DATE_FORMAT); - output.print("\t\t"); - - // Print the provider (if any): - if (service.getProviderName() != null){ - output.print("\t\t\n\t\t\t\n\t\t"); - }else - output.println("\" />"); - } - - // Print the query (if any): - if (ex.getQuery() != null){ - output.print("\t\t\n\t\t\t\t\t"); - } - - output.println("\t"); - output.println(""); - - output.flush(); - } + // Log the successful execution of the action, only if the asked resource is not UWS (because UWS is already logging the received request): + if (!resourceName.equalsIgnoreCase(ASync.RESOURCE_NAME)) + getLogger().logHttp(LogLevel.INFO, response, reqID, user, "Action \"" + resourceName + "\" successfully executed.", null); - public final boolean addResource(TAPResource newResource){ - if (newResource == null) - return false; - resources.put(newResource.getName(), newResource); - return true; - } - - public final boolean addResource(String resourceId, TAPResource newResource){ - if (newResource == null) - return false; - resources.put((resourceId == null) ? newResource.getName() : resourceId, newResource); - return true; - } - - public final int getNbResources(){ - return resources.size(); - } - - public final TAPResource getResource(String resourceId){ - return resources.get(resourceId); - } - - public final boolean hasResource(String resourceId){ - return resources.containsKey(resourceId); - } + }catch(IOException ioe){ + /* + * Any IOException thrown while writing the HTTP response is generally caused by a client abortion (intentional or timeout) + * or by a connection closed with the client for another reason. + * Consequently, a such error should not be considered as a real error from the server or the library: the request is + * canceled, and so the response is not expected. It is anyway not possible any more to send it (header and/or body) totally + * or partially. + * Nothing can solve this error. So the "error" is just reported as a simple information and theoretically the action + * executed when this error has been thrown is already stopped. + */ + getLogger().logHttp(LogLevel.INFO, response, reqID, user, "HTTP request aborted or connection with the client closed => the TAP resource \"" + resourceName + "\" has stopped and the body of the HTTP response can not have been partially or completely written!", null); - public final TAPResource removeResource(String resourceId){ - return resources.remove(resourceId); - } + }catch(TAPException te){ + /* + * Any known/"expected" TAP exception is logged but also returned to the HTTP client in an XML error document. + * Since the error is known, it is supposed to have already been logged with a full stack trace. Thus, there + * is no need to log again its stack trace...just its message is logged. + */ + // Write the error in the response and return the appropriate HTTP status code: + errorWriter.writeError(te, response, request, reqID, user, resourceName); + // Log the error: + getLogger().logHttp(LogLevel.ERROR, response, reqID, user, "TAP resource \"" + resourceName + "\" execution FAILED with the error: \"" + te.getMessage() + "\"!", null); + + }catch(IllegalStateException ise){ + /* + * Any IllegalStateException that reaches this point, is supposed coming from a HttpServletResponse operation which + * has to reset the response buffer (e.g. resetBuffer(), sendRedirect(), sendError()). + * If this exception happens, the library tried to rewrite the HTTP response body with a message or a result, + * while this body has already been partially sent to the client. It is then no longer possible to change its content. + * Consequently, the error is logged as FATAL and a message will be appended at the end of the already submitted response + * to alert the HTTP client that an error occurs and the response should not be considered as complete and reliable. + */ + // Write the error in the response and return the appropriate HTTP status code: + errorWriter.writeError(ise, response, request, reqID, user, resourceName); + // Log the error: + getLogger().logHttp(LogLevel.FATAL, response, reqID, user, "HTTP response already partially committed => the TAP resource \"" + resourceName + "\" has stopped and the body of the HTTP response can not have been partially or completely written!", (ise.getCause() != null) ? ise.getCause() : ise); - public boolean updateTAPMetadata(){ - TAPMetadata metadata = service.getTAPMetadata(); - if (metadata != null){ - resources.put(metadata.getName(), metadata); - return true; + }catch(Throwable t){ + /* + * Any other error is considered as unexpected if it reaches this point. Consequently, it has not yet been logged. + * So its stack trace will be fully logged, and an appropriate message will be returned to the HTTP client. The + * returned XML document should contain not too technical information which would be useless for the user. + */ + // Write the error in the response and return the appropriate HTTP status code: + errorWriter.writeError(t, response, request, reqID, user, resourceName); + // Log the error: + getLogger().logHttp(LogLevel.FATAL, response, reqID, user, "TAP resource \"" + resourceName + "\" execution FAILED with a GRAVE error!", t); + + }finally{ + // Notify the queue of the asynchronous jobs that a new connection may be available: + if (resourceName.equalsIgnoreCase(Sync.RESOURCE_NAME)) + getASync().freeConnectionAvailable(); } - return false; } } diff --git a/src/tap/resource/TAPResource.java b/src/tap/resource/TAPResource.java index 4a830c91d5ba0c6098f04d2b71733e52dc3b69d1..37938da4dc51c314c05de3247cba5a294e066673 100644 --- a/src/tap/resource/TAPResource.java +++ b/src/tap/resource/TAPResource.java @@ -16,7 +16,8 @@ package tap.resource; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; @@ -27,18 +28,77 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import tap.TAPException; -import uws.UWSException; +/** + *

List the common functions that a TAP resource must have. + * Basically, any TAP resource may be initialized, may be destroyed, must have a name and must execute a request provided by its TAP service.

+ * + *

Important note: + * It is strongly recommended that the name of the TAP resource is also provided through a public static attribute named "RESOURCE_NAME". + * If this attribute exists, its value must be the same as the one returned by {@link #getName()}. + *

+ * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (09/2014) + */ public interface TAPResource { + /** + * Let initialize this TAP resource. + * + * @param config Servlet configuration. (may be useful for the configuration of this resource) + * + * @throws ServletException If any error prevent the initialization of this TAP resource. In case a such exception is thrown, the service should stop immediately. + */ public void init(ServletConfig config) throws ServletException; + /** + * Let free properly all system/file/DB resources kept opened by this TAP resource. + */ public void destroy(); + /** + *

Let diffuse the base URL of the TAP service to all its TAP resources.

+ * + *

Important note: + * This function should be called just once: either at the creation of the service or when the first request is sent to the TAP service + * (in this case, the request is also used to finish the initialization of the TAP service, and of all its resources). + *

+ * + * @param baseURL Common URL/URI used in all requests sent by any user to the TAP service. + */ public void setTAPBaseURL(String baseURL); + /** + *

Get the name of this TAP resource.

+ * + *

Important note: + * This name MUST NOT be NULL and SHOULD NEVER change. + *

+ * + * @return Name of this TAP resource. + */ public String getName(); - public boolean executeResource(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException, TAPException, UWSException; + /** + *

Interpret the given request, execute the appropriate action and finally return a result or display information to the user.

+ * + *

IMPORTANT: + * "TAP resources can not take the law in their own hands!" :-) + * Errors that could occur inside this function should not be written directly in the given {@link HttpServletResponse}. + * They should be thrown to the resources executor: an instance of {@link TAP}, which + * will fill the {@link HttpServletResponse} with the error in the format described by the IVOA standard - VOTable. Besides, {@link TAP} may also + * add more information and may log the error (in function of this type). + *

+ * + * @param request Request sent by the user and which should be interpreted/executed here. + * @param response Response in which the result of the request must be written. + * + * @return true if the request has been successfully executed, false otherwise (but generally an exception will be sent if the request can't be executed). + * + * @throws IOException If any error occurs while writing the result of the given request. + * @throws TAPException If any other error occurs while interpreting and executing the request or by formating and writing its result. + */ + public boolean executeResource(HttpServletRequest request, HttpServletResponse response) throws IOException, TAPException; } diff --git a/src/tap/resource/VOSIResource.java b/src/tap/resource/VOSIResource.java index 758c8ad43a323133f86bc9ad1a278c91bd029c4e..216b55b1a5e87a0216ef9df4510e869059509ae3 100644 --- a/src/tap/resource/VOSIResource.java +++ b/src/tap/resource/VOSIResource.java @@ -16,15 +16,70 @@ package tap.resource; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ +/** + *

VOSI - VO Support Interfaces - lets describe a minimal interface that VO web services should provide.

+ * + *

+ * This interface aims to give information about the capabilities, the availability and the reliability of the service. + * To reach this goal the 3 following endpoints (resources) should be provided: + *

+ *
    + *
  1. Capability metadata: list all available resources, give their access URL and a standard ID (helping to identify the type of resource). + * More information related to the service itself (or about the VO standard it is implementing) may be provided.
  2. + * + *
  3. Availability metadata: indicate whether the service is available or not. It may also provide a note and some other information about + * its reliability, such as the date at which it is up, or since when it is down and when it will be back.
  4. + * + *
  5. Tables metadata: since some VO services deal with tabular data (in output, in input or queriable by a language like ADQL), + * a VOSI-compliant service shall provide a list and a description of them.
  6. + *
+ * + *

+ * Implementing the VOSI interface means that each service endpoint/resource must be described in the capability endpoint with an access URL and a standard VO ID. + *

+ * + *

The standard IDs of the VOSI endpoints are the following:

+ *
    + *
  • Capabilities: ivo://ivoa.net/std/VOSI#capabilities
  • + *
  • Availability: ivo://ivoa.net/std/VOSI#availability
  • + *
  • Tables: ivo://ivoa.net/std/VOSI#tables
  • + *
+ * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (09/2014) + */ public interface VOSIResource { + /** + * Get the capabilities of this resource. + * + * @return Resource capabilities. + */ public String getCapability(); + /** + * Get the URL which lets access this resource. + * + * @return Access URL. + */ public String getAccessURL(); + /** + *

Get the standardID of this endpoint of the VOSI interface.

+ * + *

The standard IDs of the VOSI endpoints are the following:

+ *
    + *
  • Capabilities: ivo://ivoa.net/std/VOSI#capabilities
  • + *
  • Availability: ivo://ivoa.net/std/VOSI#availability
  • + *
  • Tables: ivo://ivoa.net/std/VOSI#tables
  • + *
+ * + * @return Standard ID of this VOSI endpoint. + */ public String getStandardID(); } \ No newline at end of file diff --git a/src/tap/upload/LimitedSizeInputStream.java b/src/tap/upload/LimitedSizeInputStream.java index dd95527f390081201b96ffcfc597549cca557482..60acbe8a2c9ece299946d737f210501d9e13ecc4 100644 --- a/src/tap/upload/LimitedSizeInputStream.java +++ b/src/tap/upload/LimitedSizeInputStream.java @@ -16,7 +16,8 @@ package tap.upload; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; @@ -25,15 +26,34 @@ import java.security.InvalidParameterException; import com.oreilly.servlet.multipart.ExceededSizeException; +/** + * Let limit the number of bytes that can be read from a given input stream. + * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (08/2014) + */ public final class LimitedSizeInputStream extends InputStream { + /** Input stream whose the number of bytes that can be read must be limited. */ private final InputStream input; + /** Maximum number of bytes that can be read. */ private final long sizeLimit; + /** Number of bytes currently read. */ private long counter = 0; + /** Indicate whether the byte limit has already been reached. If true no more byte can be read ; + * all read(...) function will throw an {@link ExceededSizeException}. */ private boolean exceed = false; + /** + * Wrap the given input stream so that limiting the number of bytes that can be read. + * + * @param stream Stream to limit. + * @param sizeLimit Maximum number of bytes that can be read. If <=0 an {@link InvalidParameterException} will be thrown. + * + * @throws NullPointerException If the input stream is missing. + */ public LimitedSizeInputStream(final InputStream stream, final long sizeLimit) throws NullPointerException{ if (stream == null) throw new NullPointerException("The given input stream is NULL !"); @@ -44,16 +64,45 @@ public final class LimitedSizeInputStream extends InputStream { this.sizeLimit = sizeLimit; } + /** + * Get the input stream wrapped by this instance of {@link LimitedSizeInputStream}. + * + * @return The wrapped input stream. + * @since 2.0 + */ + public final InputStream getInnerStream(){ + return input; + } + + /** + *

Update the number of bytes currently read and them check whether the limit has been exceeded. + * If the limit has been exceeded, an {@link ExceededSizeException} is thrown.

+ * + *

Besides, the flag {@link #exceed} is set to true in order to forbid the further reading of bytes.

+ * + * @param nbReads Number of bytes read. + * + * @throws ExceededSizeException If, after update, the limit of bytes has been exceeded. + */ private void updateCounter(final long nbReads) throws ExceededSizeException{ if (nbReads > 0){ counter += nbReads; if (counter > sizeLimit){ exceed = true; - throw new ExceededSizeException(); + throw new ExceededSizeException("Data read overflow: the limit of " + sizeLimit + " bytes has been reached!"); } } } + /** + *

Tell whether the limit has already been exceeded or not.

+ * + *

Note: + * If true is returned, no more read will be allowed, and any attempt to read a byte will throw an {@link ExceededSizeException}. + *

+ * + * @return true if the byte limit has been exceeded, false otherwise. + */ public final boolean sizeExceeded(){ return exceed; } @@ -98,17 +147,17 @@ public final class LimitedSizeInputStream extends InputStream { @Override public synchronized void mark(int readlimit) throws UnsupportedOperationException{ - throw new UnsupportedOperationException("mark() not supported in a LimitedSizeInputStream !"); + input.mark(readlimit); } @Override public boolean markSupported(){ - return false; + return input.markSupported(); } @Override public synchronized void reset() throws IOException, UnsupportedOperationException{ - throw new UnsupportedOperationException("mark() not supported in a LimitedSizeInputStream !"); + input.reset(); } } diff --git a/src/tap/upload/TableLoader.java b/src/tap/upload/TableLoader.java deleted file mode 100644 index e8b050206347c3fc54eeee9f571f46abeddd82e9..0000000000000000000000000000000000000000 --- a/src/tap/upload/TableLoader.java +++ /dev/null @@ -1,104 +0,0 @@ -package tap.upload; - -/* - * This file is part of TAPLibrary. - * - * TAPLibrary is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * TAPLibrary is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with TAPLibrary. If not, see . - * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) - */ - -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.io.InputStream; - -import java.net.MalformedURLException; -import java.net.URL; - -import java.security.InvalidParameterException; -import java.util.Enumeration; - -import com.oreilly.servlet.MultipartRequest; - -public class TableLoader { - private static final String URL_REGEXP = "^(https?|ftp)://[-a-zA-Z0-9+&@#/%?=~_|!:,.;]*[-a-zA-Z0-9+&@#/%=~_|]"; - private static final String PARAM_PREFIX = "param:"; - - public final String tableName; - private final URL url; - private final String param; - - private final File file; - - public TableLoader(final String name, final String value){ - this(name, value, (MultipartRequest)null); - } - - @SuppressWarnings("unchecked") - public TableLoader(final String name, final String uri, final MultipartRequest multipart){ - if (name == null || name.trim().isEmpty()) - throw new NullPointerException("A table name can not be NULL !"); - tableName = name.trim(); - - if (uri == null || uri.trim().isEmpty()) - throw new NullPointerException("The table URI can not be NULL !"); - String URI = uri.trim(); - if (URI.startsWith(PARAM_PREFIX)){ - if (multipart == null) - throw new InvalidParameterException("The URI scheme \"param\" can be used ONLY IF the VOTable is provided inside the HTTP request (multipart/form-data) !"); - else if (URI.length() <= PARAM_PREFIX.length()) - throw new InvalidParameterException("Incomplete URI (" + URI + "): empty parameter name !"); - url = null; - param = URI.substring(PARAM_PREFIX.length()).trim(); - - Enumeration enumeration = multipart.getFileNames(); - File foundFile = null; - while(foundFile == null && enumeration.hasMoreElements()){ - String fileName = enumeration.nextElement(); - if (fileName.equals(param)) - foundFile = multipart.getFile(fileName); - } - - if (foundFile == null) - throw new InvalidParameterException("Incorrect file reference (" + URI + "): the parameter \"" + param + "\" does not exist !"); - else - file = foundFile; - }else if (URI.matches(URL_REGEXP)){ - try{ - url = new URL(URI); - param = null; - file = null; - }catch(MalformedURLException mue){ - throw new InvalidParameterException(mue.getMessage()); - } - }else - throw new InvalidParameterException("Invalid table URI: \"" + URI + "\" !"); - } - - public InputStream openStream() throws IOException{ - if (url != null) - return url.openStream(); - else - return new FileInputStream(file); - } - - public boolean deleteFile(){ - if (file != null && file.exists()) - return file.delete(); - else - return false; - } - -} diff --git a/src/tap/upload/Uploader.java b/src/tap/upload/Uploader.java index 651a198c2b8a5b8c518766a47eb3fdccda73a809..27f7d8124f2549248a4f1efe927a4aab63eb49e3 100644 --- a/src/tap/upload/Uploader.java +++ b/src/tap/upload/Uploader.java @@ -16,8 +16,8 @@ package tap.upload; * You should have received a copy of the GNU Lesser General Public License * along with TAPLibrary. If not, see . * - * Copyright 2012-2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomisches Rechen Institute (ARI) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; @@ -26,119 +26,173 @@ import java.io.InputStream; import tap.ServiceConnection; import tap.ServiceConnection.LimitUnit; import tap.TAPException; +import tap.data.DataReadException; +import tap.data.LimitedTableIterator; +import tap.data.TableIterator; +import tap.data.VOTableIterator; import tap.db.DBConnection; -import tap.db.DBException; +import tap.metadata.TAPColumn; +import tap.metadata.TAPMetadata; +import tap.metadata.TAPMetadata.STDSchema; import tap.metadata.TAPSchema; import tap.metadata.TAPTable; -import tap.metadata.TAPTypes; -import tap.metadata.VotType; -import cds.savot.model.DataBinaryReader; -import cds.savot.model.FieldSet; -import cds.savot.model.SavotBinary; -import cds.savot.model.SavotField; -import cds.savot.model.SavotResource; -import cds.savot.model.SavotTR; -import cds.savot.model.SavotTableData; -import cds.savot.model.TRSet; -import cds.savot.pull.SavotPullEngine; -import cds.savot.pull.SavotPullParser; +import tap.parameters.DALIUpload; +import uws.UWSException; +import uws.service.file.UnsupportedURIProtocolException; import com.oreilly.servlet.multipart.ExceededSizeException; /** + *

Let create properly given VOTable inputs in the "database".

* - * @author Grégory Mantelet (CDS;ARI) - gmantele@ari.uni-heidelberg.de - * @version 1.1 (03/2014) + *

+ * This class manages particularly the upload limit in rows and in bytes by creating a {@link LimitedTableIterator} + * with a {@link VOTableIterator}. + *

+ * + * @author Grégory Mantelet (CDS;ARI) + * @version 2.0 (04/2015) + * + * @see LimitedTableIterator + * @see VOTableIterator */ public class Uploader { - - protected final ServiceConnection service; - protected final DBConnection dbConn; - protected final int nbRowsLimit; - protected final int nbBytesLimit; - + /** Specification of the TAP service. */ + protected final ServiceConnection service; + /** Connection to the "database" (which lets upload the content of any given VOTable). */ + protected final DBConnection dbConn; + /** Description of the TAP_UPLOAD schema to use. + * @since 2.0 */ + protected final TAPSchema uploadSchema; + /** Type of limit to set: ROWS or BYTES. MAY be NULL ; if NULL, no limit will be set. */ + protected final LimitUnit limitUnit; + /** Limit on the number of rows or bytes (depending of {@link #limitUnit}) allowed to be uploaded in once (whatever is the number of tables). */ + protected final int limit; + + /** Number of rows already loaded. */ protected int nbRows = 0; - public Uploader(final ServiceConnection service, final DBConnection dbConn) throws TAPException{ + /** + * Build an {@link Uploader} object. + * + * @param service Specification of the TAP service using this uploader. + * @param dbConn A valid (open) connection to the "database". + * + * @throws TAPException If any error occurs while building this {@link Uploader}. + */ + public Uploader(final ServiceConnection service, final DBConnection dbConn) throws TAPException{ + this(service, dbConn, null); + } + + /** + * Build an {@link Uploader} object. + * + * @param service Specification of the TAP service using this uploader. + * @param dbConn A valid (open) connection to the "database". + * + * @throws TAPException If any error occurs while building this {@link Uploader}. + * + * @since 2.0 + */ + public Uploader(final ServiceConnection service, final DBConnection dbConn, final TAPSchema uplSchema) throws TAPException{ + // NULL tests: if (service == null) throw new NullPointerException("The given ServiceConnection is NULL !"); if (dbConn == null) throw new NullPointerException("The given DBConnection is NULL !"); + // Set the service and database connections: this.service = service; - this.dbConn = dbConn; - if (service.uploadEnabled()){ - if (service.getUploadLimitType()[1] == LimitUnit.rows){ - nbRowsLimit = ((service.getUploadLimit()[1] > 0) ? service.getUploadLimit()[1] : -1); - nbBytesLimit = -1; + // Set the given upload schema: + if (uplSchema != null){ + if (!uplSchema.getADQLName().equalsIgnoreCase(TAPMetadata.STDSchema.UPLOADSCHEMA.label)) + throw new TAPException("Incorrect upload schema! Its ADQL name MUST be \"" + TAPMetadata.STDSchema.UPLOADSCHEMA.label + "\" ; here is is \"" + uplSchema.getADQLName() + "\".", UWSException.INTERNAL_SERVER_ERROR); + else + this.uploadSchema = uplSchema; + } + // ...or the default one: + else + this.uploadSchema = new TAPSchema(TAPMetadata.STDSchema.UPLOADSCHEMA.label, "Schema for tables uploaded by users."); + + // Ensure UPLOAD is allowed by the TAP service specification... + if (this.service.uploadEnabled()){ + // ...and set the rows or bytes limit: + if (this.service.getUploadLimitType()[1] != null && this.service.getUploadLimit()[1] >= 0){ + limit = (int)(this.service.getUploadLimitType()[1].bytesFactor() * this.service.getUploadLimit()[1]); + limitUnit = (this.service.getUploadLimitType()[1] == LimitUnit.rows) ? LimitUnit.rows : LimitUnit.bytes; }else{ - nbBytesLimit = ((service.getUploadLimit()[1] > 0) ? service.getUploadLimit()[1] : -1); - nbRowsLimit = -1; + limit = -1; + limitUnit = null; } }else throw new TAPException("Upload aborted: this functionality is disabled in this TAP service!"); } - public TAPSchema upload(final TableLoader[] loaders) throws TAPException{ - // Begin a DB transaction: - dbConn.startTransaction(); - - TAPSchema uploadSchema = new TAPSchema("TAP_UPLOAD"); + /** + *

Upload all the given VOTable inputs.

+ * + *

Note: + * The {@link TAPTable} objects representing the uploaded tables will be associated with the TAP_UPLOAD schema specified at the creation of this {@link Uploader}. + * If no such schema was specified, a default one (whose DB name will be equals to the ADQL name, that's to say {@link STDSchema#UPLOADSCHEMA}) + * is created, will be associated with the uploaded tables and will be returned by this function. + *

+ * + * @param uploads Array of tables to upload. + * + * @return A {@link TAPSchema} containing the list and the description of all uploaded tables. + * + * @throws TAPException If any error occurs while reading the VOTable inputs or while uploading the table into the "database". + * + * @see DBConnection#addUploadedTable(TAPTable, tap.data.TableIterator) + */ + public TAPSchema upload(final DALIUpload[] uploads) throws TAPException{ + TableIterator dataIt = null; InputStream votable = null; String tableName = null; - nbRows = 0; try{ - for(TableLoader loader : loaders){ - tableName = loader.tableName; - votable = loader.openStream(); + // Iterate over the full list of uploaded tables: + for(DALIUpload upl : uploads){ + tableName = upl.label; - if (nbBytesLimit > 0) - votable = new LimitedSizeInputStream(votable, nbBytesLimit); + // Open a stream toward the VOTable: + votable = upl.open(); - // start parsing the VOTable: - SavotPullParser parser = new SavotPullParser(votable, SavotPullEngine.SEQUENTIAL, null); + // Start reading the VOTable (with the identified limit, if any): + dataIt = new LimitedTableIterator(VOTableIterator.class, votable, limitUnit, limit); - SavotResource resource = parser.getNextResource(); - if (resource == null) - throw new TAPException("Incorrect VOTable format !"); + // Define the table to upload: + TAPColumn[] columns = dataIt.getMetadata(); + TAPTable table = new TAPTable(tableName); + table.setDBName(tableName + "_" + System.currentTimeMillis()); + for(TAPColumn col : columns) + table.addColumn(col); - FieldSet fields = resource.getFieldSet(0); + // Add the table to the TAP_UPLOAD schema: + uploadSchema.addTable(table); - // 1st STEP: Convert the VOTable metadata into DBTable: - TAPTable tapTable = fetchTableMeta(tableName, System.currentTimeMillis() + "", fields); - uploadSchema.addTable(tapTable); - - // 2nd STEP: Create the corresponding table in the database: - dbConn.createTable(tapTable); - - // 3rd STEP: Load rows into this table: - SavotBinary binary = resource.getData(0).getBinary(); - if (binary != null) - loadTable(tapTable, fields, binary); - else - loadTable(tapTable, fields, resource.getData(0).getTableData()); + // Create and fill the corresponding table in the database: + dbConn.addUploadedTable(table, dataIt); + // Close the VOTable stream: + dataIt.close(); votable.close(); + votable = null; } - }catch(DBException dbe){ - dbConn.cancelTransaction(); // ROLLBACK - throw dbe; - }catch(ExceededSizeException ese){ - dbConn.cancelTransaction(); // ROLLBACK - throw new TAPException("Upload limit exceeded ! You can upload at most " + ((nbBytesLimit > 0) ? (nbBytesLimit + " bytes.") : (nbRowsLimit + " rows."))); - }catch(IOException ioe){ - dbConn.cancelTransaction(); // ROLLBACK - throw new TAPException("Error while reading the VOTable of \"" + tableName + "\" !", ioe); - }catch(NullPointerException npe){ - dbConn.cancelTransaction(); // ROLLBACK - if (votable != null && votable instanceof LimitedSizeInputStream) - throw new TAPException("Upload limit exceeded ! You can upload at most " + ((nbBytesLimit > 0) ? (nbBytesLimit + " bytes.") : (nbRowsLimit + " rows."))); + }catch(DataReadException dre){ + if (dre.getCause() instanceof ExceededSizeException) + throw dre; else - throw new TAPException(npe); + throw new TAPException("Error while reading the VOTable \"" + tableName + "\": " + dre.getMessage(), dre, UWSException.BAD_REQUEST); + }catch(IOException ioe){ + throw new TAPException("IO error while reading the VOTable of \"" + tableName + "\"!", ioe); + }catch(UnsupportedURIProtocolException e){ + throw new TAPException("URI error while trying to open the VOTable of \"" + tableName + "\"!", e); }finally{ try{ + if (dataIt != null) + dataIt.close(); if (votable != null) votable.close(); }catch(IOException ioe){ @@ -146,77 +200,8 @@ public class Uploader { } } - // Commit modifications: - try{ - dbConn.endTransaction(); - }finally{ - dbConn.close(); - } - + // Return the TAP_UPLOAD schema (containing just the description of the uploaded tables): return uploadSchema; } - private TAPTable fetchTableMeta(final String tableName, final String userId, final FieldSet fields){ - TAPTable tapTable = new TAPTable(tableName); - tapTable.setDBName(tableName + "_" + userId); - - for(int j = 0; j < fields.getItemCount(); j++){ - SavotField field = (SavotField)fields.getItemAt(j); - int arraysize = TAPTypes.NO_SIZE; - if (field.getArraySize() == null || field.getArraySize().trim().isEmpty()) - arraysize = 1; - else if (field.getArraySize().equalsIgnoreCase("*")) - arraysize = TAPTypes.STAR_SIZE; - else{ - try{ - arraysize = Integer.parseInt(field.getArraySize()); - }catch(NumberFormatException nfe){ - service.getLogger().warning("Invalid array-size in the uploaded table \"" + tableName + "\" for the field \"" + field.getName() + "\": \"" + field.getArraySize() + "\" ! It will be considered as \"*\" !"); - } - } - tapTable.addColumn(field.getName(), field.getDescription(), field.getUnit(), field.getUcd(), field.getUtype(), new VotType(field.getDataType(), arraysize, field.getXtype()), false, false, false); - } - - return tapTable; - } - - private int loadTable(final TAPTable tapTable, final FieldSet fields, final SavotBinary binary) throws TAPException, ExceededSizeException{ - // Read the raw binary data: - DataBinaryReader reader = null; - try{ - reader = new DataBinaryReader(binary.getStream(), fields, false); - while(reader.next()){ - if (nbRowsLimit > 0 && nbRows >= nbRowsLimit) - throw new ExceededSizeException(); - dbConn.insertRow(reader.getTR(), tapTable); - nbRows++; - } - }catch(ExceededSizeException ese){ - throw ese; - }catch(IOException se){ - throw new TAPException("Error while reading the binary data of the VOTable of \"" + tapTable.getADQLName() + "\" !", se); - }finally{ - try{ - if (reader != null) - reader.close(); - }catch(IOException ioe){ - ; - } - } - - return nbRows; - } - - private int loadTable(final TAPTable tapTable, final FieldSet fields, final SavotTableData data) throws TAPException, ExceededSizeException{ - TRSet rows = data.getTRs(); - for(int i = 0; i < rows.getItemCount(); i++){ - if (nbRowsLimit > 0 && nbRows >= nbRowsLimit) - throw new ExceededSizeException(); - dbConn.insertRow((SavotTR)rows.getItemAt(i), tapTable); - nbRows++; - } - - return nbRows; - } - } diff --git a/src/uws/ClientAbortException.java b/src/uws/ClientAbortException.java new file mode 100644 index 0000000000000000000000000000000000000000..df8a9cdaea7382fb01fc8473943abe23e0fe037b --- /dev/null +++ b/src/uws/ClientAbortException.java @@ -0,0 +1,56 @@ +package uws; + +/* + * This file is part of UWSLibrary. + * + * UWSLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * UWSLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with UWSLibrary. If not, see . + * + * Copyright 2015 - Astronomisches Rechen Institut (ARI) + */ + +import java.io.IOException; + +/** + *

Exception which occurs when the connection between the HTTP client and a servlet has been unexpectedly closed.

+ * + *

+ * In such situation Tomcat and JBoss throw a class extending {@link IOException} and also named ClientAbortException. + * Jetty just throw a simple {@link IOException} with an appropriate message. And so, other servlet + * containers may throw a similar exception when a client-server connection is closed. This implementation + * of ClientAbortException provided by the library aims to signal this error in a unified way, with a single + * {@link IOException}, whatever is the underlying servlet container. + *

+ * + *

Note: + * Instead of this exception any IOException thrown by an {@link java.io.OutputStream} or a {@link java.io.PrintWriter} + * which has been provided by an {@link javax.servlet.http.HttpServletResponse} should be considered as an abortion of + * the HTTP client. + *

+ * + * @author Grégory Mantelet (ARI) + * @version 4.1 (04/2015) + * @since 4.1 + */ +public class ClientAbortException extends IOException { + private static final long serialVersionUID = 1L; + + public ClientAbortException(){ + super(); + } + + public ClientAbortException(final IOException ioe){ + super(ioe); + } + +} diff --git a/src/uws/ISO8601Format.java b/src/uws/ISO8601Format.java new file mode 100644 index 0000000000000000000000000000000000000000..fab1cc0764870375c9b4af55745a039d5830d940 --- /dev/null +++ b/src/uws/ISO8601Format.java @@ -0,0 +1,342 @@ +package uws; + +import java.text.DecimalFormat; +import java.text.ParseException; +import java.util.Calendar; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.TimeZone; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + *

Let formatting and parsing date expressed in ISO8601 format.

+ * + *

Date formatting

+ * + *

+ * Dates are formatted using the following format: "yyyy-MM-dd'T'hh:mm:ss'Z'" if in UTC or "yyyy-MM-dd'T'hh:mm:ss[+|-]hh:mm" otherwise. + * On the contrary to the time zone, by default the number of milliseconds is not displayed. However, when displayed, the format is: + * "yyyy-MM-dd'T'hh:mm:ss.sss'Z'" if in UTC or "yyyy-MM-dd'T'hh:mm:ss.sss[+|-]hh:mm" otherwise. + * + * + *

+ * As said previously, it is possible to display or to hide the time zone and the milliseconds. This can be easily done by changing + * the value of the static attributes {@link #displayTimeZone} and {@link #displayMilliseconds}. By default {@link #displayTimeZone} is true + * and {@link #displayMilliseconds} is false. + * + * + *

+ * By default the date will be formatted in the local time zone. But this could be specified either in the format function {@link #format(long, String, boolean, boolean)} + * or by changing the static attribute {@link #targetTimeZone}. The time zone must be specified with its ID. The list of all available time zone IDs is given by + * {@link TimeZone#getAvailableIDs()}. + *

+ * + *

Date parsing

+ * + *

+ * This class is able to parse dates - with the function {@link #parse(String)} - formatted strictly in ISO8601 + * but is also more permissive. Particularly, separators (like '-' and ':') are optional. The date and time separator + * ('T') can be replaced by a space. + *

+ * + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (10/2014) + * @since 4.1 + */ +public class ISO8601Format { + + /** Indicate whether any date formatted with this class displays the time zone. */ + public static boolean displayTimeZone = false; + /** Indicate whether any date formatted with this class displays the milliseconds. */ + public static boolean displayMilliseconds = false; + /** Indicate the time zone in which the date and time should be formatted (whatever is the time zone of the given date). */ + public static String targetTimeZone = "UTC"; // for the local time zone: TimeZone.getDefault().getID(); + + /** Object to use to format numbers with two digits (ie. 12, 02, 00). */ + protected final static DecimalFormat twoDigitsFmt = new DecimalFormat("00"); + /** Object to use to format numbers with three digits (ie. 001, 000, 123). */ + protected final static DecimalFormat threeDigitsFmt = new DecimalFormat("000"); + + /** + *

Format the given date-time in ISO8601 format.

+ * + *

Note: + * This function is equivalent to {@link #format(long, String, boolean, boolean)} with the following parameters: + * d, ISO8601Format.targetTimeZone, ISO8601Format.displayTimeZone, ISO8601Format.displayMilliseconds. + *

+ * + * @param date Date-time. + * + * @return Date formatted in ISO8601. + */ + public static String format(final Date date){ + return format(date.getTime(), targetTimeZone, displayTimeZone, displayMilliseconds); + } + + /** + *

Format the given date-time in ISO8601 format.

+ * + *

Note: + * This function is equivalent to {@link #format(long, String, boolean, boolean)} with the following parameters: + * d, ISO8601Format.targetTimeZone, ISO8601Format.displayTimeZone, ISO8601Format.displayMilliseconds. + *

+ * + * @param date Date-time in milliseconds (from the 1st January 1970 ; this value is returned by java.util.Date#getTime()). + * + * @return Date formatted in ISO8601. + */ + public static String format(final long date){ + return format(date, targetTimeZone, displayTimeZone, displayMilliseconds); + } + + /** + *

Convert the given date-time in the given time zone and format it in ISO8601 format.

+ * + *

Note: + * This function is equivalent to {@link #format(long, String, boolean, boolean)} with the following parameters: + * d, ISO8601Format.targetTimeZone, withTimeZone, ISO8601Format.displayMilliseconds. + *

+ * + * @param date Date-time in milliseconds (from the 1st January 1970 ; this value is returned by java.util.Date#getTime()). + * @param withTimeZone Target time zone. + * + * @return Date formatted in ISO8601. + */ + public static String format(final long date, final boolean withTimeZone){ + return format(date, targetTimeZone, withTimeZone, displayMilliseconds); + } + + /** + *

Convert the given date-time in UTC and format it in ISO8601 format.

+ * + *

Note: + * This function is equivalent to {@link #format(long, String, boolean, boolean)} with the following parameters: + * d, "UTC", ISO8601Format.displayTimeZone, ISO8601Format.displayMilliseconds. + *

+ * + * @param date Date-time in milliseconds (from the 1st January 1970 ; this value is returned by java.util.Date#getTime()). + * + * @return Date formatted in ISO8601. + */ + public static String formatInUTC(final long date){ + return format(date, "UTC", displayTimeZone, displayMilliseconds); + } + + /** + *

Convert the given date-time in UTC and format it in ISO8601 format.

+ * + *

Note: + * This function is equivalent to {@link #format(long, String, boolean, boolean)} with the following parameters: + * d, "UTC", withTimeZone, ISO8601Format.displayMilliseconds. + *

+ * + * @param date Date-time in milliseconds (from the 1st January 1970 ; this value is returned by java.util.Date#getTime()). + * @param withTimeZone Target time zone. + * + * @return Date formatted in ISO8601. + */ + public static String formatInUTC(final long date, final boolean withTimeZone){ + return format(date, "UTC", withTimeZone, displayMilliseconds); + } + + /** + * Convert the given date in the given time zone and format it in ISO8601 format, with or without displaying the time zone + * and/or the milliseconds field. + * + * @param date Date-time in milliseconds (from the 1st January 1970 ; this value is returned by java.util.Date#getTime()). + * @param targetTimeZone Target time zone. + * @param withTimeZone true to display the time zone, false otherwise. + * @param withMillisec true to display the milliseconds, false otherwise. + * + * @return Date formatted in ISO8601. + */ + protected static String format(final long date, final String targetTimeZone, final boolean withTimeZone, final boolean withMillisec){ + GregorianCalendar cal = new GregorianCalendar(); + cal.setTimeInMillis(date); + + // Convert the given date in the target Time Zone: + if (targetTimeZone != null && targetTimeZone.length() > 0) + cal.setTimeZone(TimeZone.getTimeZone(targetTimeZone)); + else + cal.setTimeZone(TimeZone.getTimeZone(ISO8601Format.targetTimeZone)); + + StringBuffer buf = new StringBuffer(); + + // Date with format yyyy-MM-dd : + buf.append(cal.get(Calendar.YEAR)).append('-'); + buf.append(twoDigitsFmt.format(cal.get(Calendar.MONTH) + 1)).append('-'); + buf.append(twoDigitsFmt.format(cal.get(Calendar.DAY_OF_MONTH))); + + // Time with format 'T'HH:mm:ss : + buf.append('T').append(twoDigitsFmt.format(cal.get(Calendar.HOUR_OF_DAY))).append(':'); + buf.append(twoDigitsFmt.format(cal.get(Calendar.MINUTE))).append(':'); + buf.append(twoDigitsFmt.format(cal.get(Calendar.SECOND))); + if (withMillisec){ + buf.append('.').append(threeDigitsFmt.format(cal.get(Calendar.MILLISECOND))); + } + + // Time zone with format (+|-)HH:mm : + if (withTimeZone){ + int tzOffset = (cal.get(Calendar.ZONE_OFFSET) + cal.get(Calendar.DST_OFFSET)) / (60 * 1000); // offset in minutes + boolean negative = (tzOffset < 0); + if (negative) + tzOffset *= -1; + int hours = tzOffset / 60, minutes = tzOffset - (hours * 60); + if (hours == 0 && minutes == 0) + buf.append('Z'); + else{ + buf.append(negative ? '-' : '+'); + buf.append(twoDigitsFmt.format(hours)).append(':'); + buf.append(twoDigitsFmt.format(minutes)); + } + } + + return buf.toString(); + } + + /** + *

Parse the given date expressed using the ISO8601 format ("yyyy-MM-dd'T'hh:mm:ss.sssZ" + * or "yyyy-MM-dd'T'hh:mm:ss.sssZ[+|-]hh:mm:ss").

+ * + *

+ * The syntax of the given date may be more or less strict. Particularly, separators like '-' and ':' are optional. + * Besides the date and time separator ('T') may be replaced by a space. + *

+ * + *

+ * The minimum allowed string is the date: "yyyy-MM-dd". All other date-time fields are optional, + * BUT, the time zone can be given without the time. + *

+ * + *

+ * If no time zone is specified (by a 'Z' or a time offset), the time zone in which the date is expressed + * is supposed to be the local one. + *

+ * + *

Note: + * This function is equivalent to {@link #parse(String)}, but whose the returned value is used to create a Date object, like this: + * return new Date(parse(strDate)). + *

+ * + * @param strDate Date expressed as a string in ISO8601 format. + * + * @return Parsed date (expressed in milliseconds from the 1st January 1970 ; + * a date can be easily built with this number using {@link java.util.Date#Date(long)}). + * + * @throws ParseException If the given date is not expressed in ISO8601 format or is not merely parseable with this implementation. + */ + public final static Date parseToDate(final String strDate) throws ParseException{ + return new Date(parse(strDate)); + } + + /** + *

Parse the given date expressed using the ISO8601 format ("yyyy-MM-dd'T'hh:mm:ss.sssZ" + * or "yyyy-MM-dd'T'hh:mm:ss.sssZ[+|-]hh:mm:ss").

+ * + *

+ * The syntax of the given date may be more or less strict. Particularly, separators like '-' and ':' are optional. + * Besides the date and time separator ('T') may be replaced by a space. + *

+ * + *

+ * The minimum allowed string is the date: "yyyy-MM-dd". All other date-time fields are optional, + * BUT, the time zone can be given without the time. + *

+ * + *

+ * If no time zone is specified (by a 'Z' or a time offset), the time zone in which the date is expressed + * is supposed to be the local one. + *

+ * + * @param strDate Date expressed as a string in ISO8601 format. + * + * @return Parsed date (expressed in milliseconds from the 1st January 1970 ; + * a date can be easily built with this number using {@link java.util.Date#Date(long)}). + * + * @throws ParseException If the given date is not expressed in ISO8601 format or is not merely parseable with this implementation. + */ + public static long parse(final String strDate) throws ParseException{ + Pattern p = Pattern.compile("(\\d{4})-?(\\d{2})-?(\\d{2})([T| ](\\d{2}):?(\\d{2}):?(\\d{2})(\\.(\\d+))?(Z|([\\+|\\-])(\\d{2}):?(\\d{2})(:?(\\d{2}))?)?)?"); + /* + * With this regular expression, we will get the following groups: + * + * ( 0: everything) + * 1: year (yyyy) + * 2: month (MM) + * 3: day (dd) + * ( 4: the full time part) + * 5: hours (hh) + * 6: minutes (mm) + * 7: seconds (ss) + * ( 8: the full ms part) + * 9: milliseconds (sss) + * (10: the full time zone part: 'Z' or the applied time offset) + * 11: sign of the offset ('+' if an addition was applied, '-' if it was a subtraction) + * 12: applied hours offset (hh) + * 13: applied minutes offset (mm) + * (14: the full seconds offset) + * 15: applied seconds offset (ss) + * + * Groups in parenthesis should be ignored ; but an exception must be done for the 10th which may contain 'Z' meaning a UTC time zone. + * + * All groups from the 4th (included) are optional. If not filled, an optional group is set to NULL. + * + * This regular expression is more permissive than the strict definition of the ISO8601 format. Particularly, separator characters + * ('-', 'T' and ':') are optional and it is possible to specify seconds in the time zone offset. + */ + + Matcher m = p.matcher(strDate); + if (m.matches()){ + Calendar cal = new GregorianCalendar(); + + // Set the time zone: + /* + * Note: In this library, we suppose that any date provided without specified time zone, is in UTC. + * + * It is more a TAP specification than a UWS one ; see the REC-TAP 1.0 at section 2.3.4 (page 15): + * "Within the ADQL query, the service must support the use of timestamp values in + * ISO8601 format, specifically yyyy-MM-dd['T'HH:mm:ss[.SSS]], where square + * brackets denote optional parts and the 'T' denotes a single character separator + * (T) between the date and time parts." + * + * ...and 2.5 (page 20): + * "TIMESTAMP values are specified using ISO8601 format without a timezone (as in 2.3.4 ) and are assumed to be in UTC." + */ + cal.setTimeZone(TimeZone.getTimeZone("UTC")); + + // Set the date: + cal.set(Calendar.DAY_OF_MONTH, twoDigitsFmt.parse(m.group(3)).intValue()); + cal.set(Calendar.MONTH, twoDigitsFmt.parse(m.group(2)).intValue() - 1); + cal.set(Calendar.YEAR, Integer.parseInt(m.group(1))); + + // Set the time: + if (m.group(4) != null){ + cal.set(Calendar.HOUR_OF_DAY, twoDigitsFmt.parse(m.group(5)).intValue()); + cal.set(Calendar.MINUTE, twoDigitsFmt.parse(m.group(6)).intValue()); + cal.set(Calendar.SECOND, twoDigitsFmt.parse(m.group(7)).intValue()); + if (m.group(9) != null) + cal.set(Calendar.MILLISECOND, twoDigitsFmt.parse(m.group(9)).intValue()); + else + cal.set(Calendar.MILLISECOND, 0); + }else{ + cal.set(Calendar.HOUR_OF_DAY, 0); + cal.set(Calendar.MINUTE, 0); + cal.set(Calendar.SECOND, 0); + cal.set(Calendar.MILLISECOND, 0); + } + + // Compute and apply the offset: + if (m.group(10) != null && !m.group(10).equals("Z")){ + int sign = (m.group(11).equals("-") ? 1 : -1); + cal.add(Calendar.HOUR_OF_DAY, sign * twoDigitsFmt.parse(m.group(12)).intValue()); + cal.add(Calendar.MINUTE, sign * twoDigitsFmt.parse(m.group(13)).intValue()); + if (m.group(15) != null) + cal.add(Calendar.SECOND, sign * twoDigitsFmt.parse(m.group(15)).intValue()); + } + + return cal.getTimeInMillis(); + }else + throw new ParseException("Invalid date format: \"" + strDate + "\"! An ISO8601 date was expected.", 0); + } +} diff --git a/src/uws/UWSException.java b/src/uws/UWSException.java index 32aed8cc495cdca8a3994b4784153f69bce98eae..1128170787158f9fce238957481b1f091404d25b 100644 --- a/src/uws/UWSException.java +++ b/src/uws/UWSException.java @@ -16,23 +16,30 @@ package uws; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import uws.job.ErrorType; /** - * Any exception returned by a class of the UWS pattern may be associated with - * an HTTP error code (like: 404, 303, 500) and a UWS error type. + *

Any exception returned by a class of the UWS pattern may be associated with + * an HTTP error code (like: 404, 303, 500) and a UWS error type.

* - * @author Grégory Mantelet (CDS) - * @version 12/2010 + *

+ * Any error reported with this kind of exception will (in the most of cases) interrupt a UWS action, + * by reporting an error related with the UWS usage. + *

+ * + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (09/2014) */ public class UWSException extends Exception { private static final long serialVersionUID = 1L; // SUCCESS codes: public final static int OK = 200; + public final static int ACCEPTED_BUT_NOT_COMPLETE = 202; public final static int NO_CONTENT = 204; // REDIRECTION codes: @@ -61,69 +68,136 @@ public class UWSException extends Exception { /* ************ */ /* CONSTRUCTORS */ /* ************ */ + /** + * Exception in the general context of UWS. + * + * @param msg Error message to display. + */ public UWSException(String msg){ - this(msg, ErrorType.FATAL); + this(msg, null); } + /** + * Exception that occurs in the general context of UWS, and with the specified error type (FATAL or TRANSIENT). + * + * @param msg Error message to display. + * @param type Type of the error (FATAL or TRANSIENT). Note: If NULL, it will be considered as FATAL. + */ public UWSException(String msg, ErrorType type){ super(msg); - if (type != null) - errorType = type; + this.errorType = (type == null) ? ErrorType.FATAL : type; } + /** + * Exception that occurs in the general context of UWS because the given exception has been thrown. + * + * @param t The thrown (and so caught) exception. + */ public UWSException(Throwable t){ - this(t, ErrorType.FATAL); + this(t, null); } + /** + * Exception with the given type that occurs in the general context of UWS + * because the given exception has been thrown. + * + * @param t The thrown (and so caught) exception. + * @param type Type of the error (FATAL or TRANSIENT). Note: If NULL, it will be considered as FATAL. + */ public UWSException(Throwable t, ErrorType type){ super(t); - if (type != null) - errorType = type; + this.errorType = (type == null) ? ErrorType.FATAL : type; } + /** + * Exception that occurs in the general context of UWS and which should return the given HTTP error code. + * + * @param httpError HTTP error code to return. + * @param msg Error message to display. + */ public UWSException(int httpError, String msg){ - this(msg); - if (httpError >= 0) - httpErrorCode = httpError; + this(httpError, msg, null); } + /** + * Exception that occurs in the general context of UWS, with the given type and which should return the given HTTP error code. + * + * @param httpError HTTP error code to return. + * @param msg Error message to display. + * @param type Type of the error (FATAL or TRANSIENT). Note: If NULL, it will be considered as FATAL. + */ public UWSException(int httpError, String msg, ErrorType type){ this(msg, type); - if (httpError >= 0) - httpErrorCode = httpError; + this.httpErrorCode = (httpError < 0) ? NOT_FOUND : httpError; } + /** + * Exception that occurs in the general context of UWS, + * because the given exception has been thrown and that which should return the given HTTP error status. + * + * @param httpError HTTP error code to return. + * @param t The thrown (and so caught) exception. + */ public UWSException(int httpError, Throwable t){ - this(t); - if (httpError >= 0) - httpErrorCode = httpError; + this(httpError, t, (t != null) ? t.getMessage() : null, null); } + /** + * Exception that occurs in the general context of UWS with the given error type, + * because the given exception has been thrown and that which should return the given HTTP error status. + * + * @param httpError HTTP error code to return. + * @param t The thrown (and so caught) exception. + * @param type Type of the error (FATAL or TRANSIENT). Note: If NULL, it will be considered as FATAL. + */ public UWSException(int httpError, Throwable t, ErrorType type){ - this(t, type); - if (httpError >= 0) - httpErrorCode = httpError; + this(httpError, t, (t != null) ? t.getMessage() : null, type); } + /** + * Exception that occurs in the general context of UWS, + * because the given exception has been thrown and that which should return the given HTTP error status. + * + * @param httpError HTTP error code to return. + * @param t The thrown (and so caught) exception. + * @param msg Error message to display. + */ public UWSException(int httpError, Throwable t, String msg){ - this(httpError, t, msg, ErrorType.FATAL); + this(httpError, t, msg, null); } + /** + * Exception that occurs in the general context of UWS, + * because the given exception has been thrown and that which should return the given HTTP error status. + * + * @param httpError HTTP error code to return. + * @param t The thrown (and so caught) exception. + * @param msg Error message to display. + * @param type Type of the error (FATAL or TRANSIENT). Note: If NULL, it will be considered as FATAL. + */ public UWSException(int httpError, Throwable t, String msg, ErrorType type){ super(msg, t); - if (httpError >= 0) - httpErrorCode = httpError; - if (type != null) - errorType = type; + this.httpErrorCode = (httpError < 0) ? NOT_FOUND : httpError; + this.errorType = (type == null) ? ErrorType.FATAL : type; } /* ******* */ /* GETTERS */ /* ******* */ + /** + * Get the HTTP error code that should be returned. + * + * @return The corresponding HTTP error code. + */ public int getHttpErrorCode(){ return httpErrorCode; } + /** + * Get the type of this error (from the UWS point of view ; FATAL or TRANSIENT). + * + * @return Type of this error. + */ public ErrorType getUWSErrorType(){ return errorType; } diff --git a/src/uws/UWSExceptionFactory.java b/src/uws/UWSExceptionFactory.java index c2812736eccedb340ef7cfbc5d252a79aed5f8b5..c88fff2d2239b65b025ebc48e455387cf3519106 100644 --- a/src/uws/UWSExceptionFactory.java +++ b/src/uws/UWSExceptionFactory.java @@ -16,18 +16,18 @@ package uws; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import uws.job.ExecutionPhase; - import uws.job.user.JobOwner; /** * Let's creating the common exceptions of a UWS service. * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (09/2014) * * @see UWSException */ @@ -49,164 +49,47 @@ public final class UWSExceptionFactory { return ((consequence == null || consequence.trim().length() > 0) ? "" : " => " + consequence); } - public final static UWSException missingJobListName(){ - return missingJobListName(null); - } - - public final static UWSException missingJobListName(final String consequence){ - return new UWSException(UWSException.BAD_REQUEST, "Missing job list name !" + appendMessage(consequence)); - } - - public final static UWSException incorrectJobListName(final String jlName){ - return incorrectJobListName(jlName, null); - } - - public final static UWSException incorrectJobListName(final String jlName, final String consequence){ - return new UWSException(UWSException.NOT_FOUND, "Incorrect job list name ! The jobs list " + jlName + " does not exist." + appendMessage(consequence)); - } - - public final static UWSException missingJobID(){ - return missingJobID(null); - } - - public final static UWSException missingJobID(final String consequence){ - return new UWSException(UWSException.BAD_REQUEST, "Missing job ID !" + appendMessage(consequence)); - } - - public final static UWSException incorrectJobID(String jobListName, String jobID){ - return incorrectJobID(jobListName, jobID, null); - } - - public final static UWSException incorrectJobID(final String jobListName, final String jobID, final String consequence){ - return new UWSException(UWSException.NOT_FOUND, "Incorrect job ID ! The job " + jobID + " does not exist in the jobs list " + jobListName + appendMessage(consequence)); - } - - public final static UWSException missingSerializer(final String mimeTypes){ - return missingSerializer(null); - } - - public final static UWSException missingSerializer(final String mimeTypes, final String consequence){ - return new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Missing UWS serializer for the MIME types: " + mimeTypes + " !" + appendMessage(consequence)); - } - - public final static UWSException incorrectJobParameter(final String jobID, final String paramName){ - return incorrectJobParameter(jobID, paramName, null); - } - - public final static UWSException incorrectJobParameter(final String jobID, final String paramName, final String consequence){ - return new UWSException(UWSException.NOT_FOUND, "Incorrect job parameter ! The parameter " + paramName + " does not exist in the job " + jobID + "." + appendMessage(consequence)); - } - - public final static UWSException incorrectJobResult(final String jobID, final String resultID){ - return incorrectJobResult(jobID, resultID, null); - } - - public final static UWSException incorrectJobResult(final String jobID, final String resultID, final String consequence){ - return new UWSException(UWSException.NOT_FOUND, "Incorrect result ID ! There is no result " + resultID + " in the job " + jobID + "." + appendMessage(consequence)); - } - - public final static UWSException noErrorSummary(final String jobID){ - return noErrorSummary(jobID, null); - } - - public final static UWSException noErrorSummary(final String jobID, final String consequence){ - return new UWSException(UWSException.NOT_FOUND, "There is no error summary in the job " + jobID + " !" + appendMessage(consequence)); - } - - public final static UWSException incorrectPhaseTransition(final String jobID, final ExecutionPhase fromPhase, final ExecutionPhase toPhase){ + public final static String incorrectPhaseTransition(final String jobID, final ExecutionPhase fromPhase, final ExecutionPhase toPhase){ return incorrectPhaseTransition(jobID, fromPhase, toPhase, null); } - public final static UWSException incorrectPhaseTransition(final String jobID, final ExecutionPhase fromPhase, final ExecutionPhase toPhase, final String consequence){ - return new UWSException(UWSException.BAD_REQUEST, "Incorrect phase transition ! => the job " + jobID + " is in the phase " + fromPhase + ". It can not go to " + toPhase + "." + appendMessage(consequence)); - } - - public final static UWSException missingOutputStream(){ - return missingOutputStream(null); + public final static String incorrectPhaseTransition(final String jobID, final ExecutionPhase fromPhase, final ExecutionPhase toPhase, final String consequence){ + return "Incorrect phase transition ! => the job " + jobID + " is in the phase " + fromPhase + ". It can not go to " + toPhase + "." + appendMessage(consequence); } - public final static UWSException missingOutputStream(final String consequence){ - return new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Missing output stream !" + appendMessage(consequence)); - } - - public final static UWSException incorrectSerialization(final String serializationValue, final String serializationTarget){ - return incorrectSerialization(serializationValue, serializationTarget, null); - } - - public final static UWSException incorrectSerialization(final String serializationValue, final String serializationTarget, final String consequence){ - return new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Incorrect serialization value (=" + serializationValue + ") ! => impossible to serialize " + serializationTarget + "." + appendMessage(consequence)); - } - - public final static UWSException readPermissionDenied(final JobOwner user, final boolean jobList, final String containerName){ + public final static String readPermissionDenied(final JobOwner user, final boolean jobList, final String containerName){ return readPermissionDenied(user, jobList, containerName, null); } - public final static UWSException readPermissionDenied(final JobOwner user, final boolean jobList, final String containerName, final String consequence){ - return new UWSException(UWSException.PERMISSION_DENIED, user.getID() + ((user.getPseudo() == null) ? "" : (" (alias " + user.getPseudo() + ")")) + " is not allowed to read the content of the " + (jobList ? "jobs list" : "job") + " \"" + containerName + "\" !" + appendMessage(consequence)); + public final static String readPermissionDenied(final JobOwner user, final boolean jobList, final String containerName, final String consequence){ + return user.getID() + ((user.getPseudo() == null) ? "" : (" (alias " + user.getPseudo() + ")")) + " is not allowed to read the content of the " + (jobList ? "jobs list" : "job") + " \"" + containerName + "\" !" + appendMessage(consequence); } - public final static UWSException writePermissionDenied(final JobOwner user, final boolean jobList, final String containerName){ + public final static String writePermissionDenied(final JobOwner user, final boolean jobList, final String containerName){ return writePermissionDenied(user, jobList, containerName, null); } - public final static UWSException writePermissionDenied(final JobOwner user, final boolean jobList, final String containerName, final String consequence){ - return new UWSException(UWSException.PERMISSION_DENIED, user.getID() + ((user.getPseudo() == null) ? "" : (" (alias " + user.getPseudo() + ")")) + " is not allowed to update the content of the " + (jobList ? "jobs list" : "job") + " \"" + containerName + "\" !" + appendMessage(consequence)); + public final static String writePermissionDenied(final JobOwner user, final boolean jobList, final String containerName, final String consequence){ + return user.getID() + ((user.getPseudo() == null) ? "" : (" (alias " + user.getPseudo() + ")")) + " is not allowed to update the content of the " + (jobList ? "jobs list" : "job") + " \"" + containerName + "\" !" + appendMessage(consequence); } - public final static UWSException executePermissionDenied(final JobOwner user, final String jobID){ + public final static String executePermissionDenied(final JobOwner user, final String jobID){ return executePermissionDenied(user, jobID, null); } - public final static UWSException executePermissionDenied(final JobOwner user, final String jobID, final String consequence){ - return new UWSException(UWSException.PERMISSION_DENIED, user.getID() + ((user.getPseudo() == null) ? "" : (" (alias " + user.getPseudo() + ")")) + " is not allowed to execute/abort the job \"" + jobID + "\" !" + appendMessage(consequence)); + public final static String executePermissionDenied(final JobOwner user, final String jobID, final String consequence){ + return user.getID() + ((user.getPseudo() == null) ? "" : (" (alias " + user.getPseudo() + ")")) + " is not allowed to execute/abort the job \"" + jobID + "\" !" + appendMessage(consequence); } - public final static UWSException restoreJobImpossible(final Throwable t, final String cause){ - return restoreJobImpossible(t, cause, null); - } - - public final static UWSException restoreJobImpossible(final Throwable t, final String cause, final String consequence){ - return new UWSException(UWSException.INTERNAL_SERVER_ERROR, t, ((cause == null) ? "" : cause) + " Impossible to restore a job from the backup file(s)." + appendMessage(consequence)); - } - - public final static UWSException restoreUserImpossible(final String cause){ - return restoreUserImpossible(null, cause, null); - } - - public final static UWSException restoreUserImpossible(final Throwable t, final String cause){ - return restoreUserImpossible(t, cause, null); - } - - public final static UWSException restoreUserImpossible(final Throwable t, final String cause, final String consequence){ - return new UWSException(UWSException.INTERNAL_SERVER_ERROR, t, ((cause == null) ? "" : cause) + " Impossible to restore a user from the backup file(s)." + appendMessage(consequence)); - } - - public final static UWSException jobModificationForbidden(final String jobId, final ExecutionPhase phase, final String parameter){ + public final static String jobModificationForbidden(final String jobId, final ExecutionPhase phase, final String parameter){ return jobModificationForbidden(jobId, phase, parameter, null); } - public final static UWSException jobModificationForbidden(final String jobId, final ExecutionPhase phase, final String parameter, final String consequence){ + public final static String jobModificationForbidden(final String jobId, final ExecutionPhase phase, final String parameter, final String consequence){ if (parameter != null && !parameter.trim().isEmpty()) - return new UWSException(UWSException.NOT_ALLOWED, "Impossible to change the parameter \"" + parameter + "\" of the job " + jobId + ((phase != null) ? (" (phase: " + phase + ")") : "") + " !" + appendMessage(consequence)); + return "Impossible to change the parameter \"" + parameter + "\" of the job " + jobId + ((phase != null) ? (" (phase: " + phase + ")") : "") + " !" + appendMessage(consequence); else - return new UWSException(UWSException.NOT_ALLOWED, "Impossible to change the parameters of the job " + jobId + ((phase != null) ? (" (phase: " + phase + ")") : "") + " !" + appendMessage(consequence)); - } - - public final static UWSException badFormat(final String jobId, final String paramName, final String paramValue, final String valueClass, final String expectedFormat){ - return badFormat(jobId, paramName, paramValue, valueClass, expectedFormat, null); - } - - public final static UWSException badFormat(final String jobId, final String paramName, final String paramValue, final String valueClass, final String expectedFormat, final String consequence){ - String strExpected = ((expectedFormat != null && !expectedFormat.trim().isEmpty()) ? (" Expected: " + expectedFormat) : ""); - String strClass = ((valueClass != null && !valueClass.trim().isEmpty()) ? (" {an instance of " + valueClass + "}") : ""); - - if (paramName != null && !paramName.trim().isEmpty()){ - if (jobId != null && !jobId.trim().isEmpty()) - return new UWSException(UWSException.BAD_REQUEST, "Bad format for the parameter " + paramName.toUpperCase() + " of the job " + jobId + ": \"" + paramValue + "\"" + strClass + "." + strExpected + appendMessage(consequence)); - else - return new UWSException(UWSException.BAD_REQUEST, "Bad format for " + paramName + ": \"" + paramValue + "\"" + strClass + "." + strExpected + appendMessage(consequence)); - }else - return new UWSException(UWSException.BAD_REQUEST, "Bad format: \"" + paramValue + "\"" + strClass + "." + strExpected + appendMessage(consequence)); + return "Impossible to change the parameters of the job " + jobId + ((phase != null) ? (" (phase: " + phase + ")") : "") + " !" + appendMessage(consequence); } } diff --git a/src/uws/UWSToolBox.java b/src/uws/UWSToolBox.java index f6dc4f55373acd251c029d468050ac11427129d8..34bb1556a57cdadd65f2ac294de619bd3c5c03ba 100644 --- a/src/uws/UWSToolBox.java +++ b/src/uws/UWSToolBox.java @@ -16,7 +16,8 @@ package uws; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.File; @@ -24,36 +25,44 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintWriter; - +import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; import java.net.URL; - +import java.net.URLDecoder; +import java.net.URLEncoder; import java.util.Date; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import javax.servlet.ServletOutputStream; - import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import uws.job.ErrorSummary; import uws.job.UWSJob; - +import uws.job.user.JobOwner; +import uws.service.UWS; import uws.service.UWSUrl; - +import uws.service.UserIdentifier; import uws.service.log.DefaultUWSLog; import uws.service.log.UWSLog; +import uws.service.request.RequestParser; +import uws.service.request.UploadFile; /** * Some useful functions for the managing of a UWS service. * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (04/2015) */ public class UWSToolBox { + /** + * Default character encoding for all HTTP response sent by this library. + * @since 4.1 */ + public final static String DEFAULT_CHAR_ENCODING = "UTF-8"; + private static UWSLog defaultLogger = null; /** THIS CLASS CAN'T BE INSTANTIATED ! */ @@ -111,9 +120,15 @@ public class UWSToolBox { /** *

Builds a map of strings with all parameters of the given HTTP request.

* - *

NOTE: - * it converts the Map<String, String[]> returned by {@link HttpServletRequest#getParameterMap()} - * into a Map<String, String> (the key is put in lower case). + *

Note: + * If the request attribute {@link UWS#REQ_ATTRIBUTE_PARAMETERS} has been already set by the UWS library, + * this map (after conversion into a Map) is returned. + * Otherwise, the parameters identified automatically by the Servlet are returned (just the last occurrence of each parameter is kept). + *

+ * + *

WARNING: + * This function does not extract directly the parameters from the request content. It is just returning those already extracted + * either by the Servlet or by a {@link RequestParser}. *

* * @param req The HTTP request which contains the parameters to extract. @@ -121,25 +136,58 @@ public class UWSToolBox { * @return The corresponding map of string. */ @SuppressWarnings("unchecked") - public static final HashMap getParamsMap(HttpServletRequest req){ - HashMap params = new HashMap(req.getParameterMap().size()); + public static final HashMap getParamsMap(final HttpServletRequest req){ + HashMap map = new HashMap(); + + /* If the attribute "PARAMETERS" has been already set by the UWS library, + * return it by casting it from Map into Map: */ + if (req.getAttribute(UWS.REQ_ATTRIBUTE_PARAMETERS) != null){ + try{ + // Get the extracted parameters: + Map params = (Map)req.getAttribute(UWS.REQ_ATTRIBUTE_PARAMETERS); + + // Transform the map of Objects into a map of Strings: + for(Map.Entry e : params.entrySet()){ + if (e.getValue() != null) + map.put(e.getKey(), e.getValue().toString()); + } - Enumeration e = req.getParameterNames(); - while(e.hasMoreElements()){ - String name = e.nextElement(); - params.put(name.toLowerCase(), req.getParameter(name)); + // Return the fetched map: + return map; + + }catch(Exception ex){ + map.clear(); + } } - return params; + /* If there is no "PARAMETERS" attribute or if an error occurs while reading it, + * return all the parameters fetched by the Servlet: */ + Enumeration names = req.getParameterNames(); + int i; + String n; + String[] values; + while(names.hasMoreElements()){ + n = names.nextElement(); + values = req.getParameterValues(n); + // search for the last non-null occurrence: + i = values.length - 1; + while(i >= 0 && values[i] == null) + i--; + // if there is one, keep it: + if (i >= 0) + map.put(n.toLowerCase(), values[i]); + } + return map; } /** * Converts map of UWS parameters into a string corresponding to the query part of a HTTP-GET URL (i.e. ?EXECUTIONDURATION=60&DESTRUCTION=2010-09-01T13:58:00:000-0200). * * @param parameters A Map of parameters. + * * @return The corresponding query part of an HTTP-GET URL (all keys have been set in upper case). */ - public final static String getQueryPart(Map parameters){ + public final static String getQueryPart(final Map parameters){ if (parameters == null || parameters.isEmpty()) return ""; @@ -154,8 +202,10 @@ public class UWSToolBox { val = val.trim(); if (key != null && !key.isEmpty() && val != null && !val.isEmpty()){ - queryPart.append(e.getKey() + "=" + val); - queryPart.append("&"); + try{ + queryPart.append(URLEncoder.encode(e.getKey(), "UTF-8") + "=" + URLEncoder.encode(val, "UTF-8")); + queryPart.append("&"); + }catch(UnsupportedEncodingException uee){} } } @@ -166,6 +216,7 @@ public class UWSToolBox { * Converts the given query part of a HTTP-GET URL to a map of parameters. * * @param queryPart A query part of a HTTP-GET URL. + * * @return The corresponding map of parameters (all keys have been set in lower case). */ public final static Map getParameters(String queryPart){ @@ -180,8 +231,11 @@ public class UWSToolBox { if (keyValue.length == 2){ keyValue[0] = keyValue[0].trim().toLowerCase(); keyValue[1] = keyValue[1].trim(); - if (!keyValue[0].isEmpty() && !keyValue[1].isEmpty()) - parameters.put(keyValue[0].trim(), keyValue[1].trim()); + if (!keyValue[0].isEmpty() && !keyValue[1].isEmpty()){ + try{ + parameters.put(URLDecoder.decode(keyValue[0], "UTF-8"), URLDecoder.decode(keyValue[1], "UTF-8")); + }catch(UnsupportedEncodingException uee){} + } } } } @@ -190,6 +244,200 @@ public class UWSToolBox { return parameters; } + /** + *

Extract only the GET parameters from the given HTTP request and add them inside the given map.

+ * + *

Warning: + * If entries with the same key already exist in the map, they will overwritten. + *

+ * + * @param req The HTTP request whose the GET parameters must be extracted. + * @param parameters List of parameters to update. + * + * @return The same given parameters map (but updated with all found GET parameters). + * + * @since 4.1 + */ + public static final Map addGETParameters(final HttpServletRequest req, final Map parameters){ + String queryString = req.getQueryString(); + if (queryString != null){ + String[] params = queryString.split("&"); + int indSep; + for(String p : params){ + indSep = p.indexOf('='); + if (indSep >= 0){ + try{ + parameters.put(URLDecoder.decode(p.substring(0, indSep), "UTF-8"), URLDecoder.decode(p.substring(indSep + 1), "UTF-8")); + }catch(UnsupportedEncodingException uee){} + } + } + } + return parameters; + } + + /** + * Get the number of parameters submitted in the given HTTP request. + * + * @param request An HTTP request; + * + * @return The number of submitted parameters. + * + * @since 4.1 + */ + @SuppressWarnings("unchecked") + public static final int getNbParameters(final HttpServletRequest request){ + if (request == null) + return 0; + try{ + return ((Map)request.getAttribute(UWS.REQ_ATTRIBUTE_PARAMETERS)).size(); + }catch(Exception ex){ + return request.getParameterMap().size(); + } + } + + /** + * Check whether a parameter has been submitted with the given name. + * + * @param name Name of the parameter to search. The case is important! + * @param request HTTP request in which the specified parameter must be searched. + * @param caseSensitive true to perform the research case-sensitively, + * false for a case INsensitive research. + * + * @return true if the specified parameter has been found, false otherwise. + * + * @since 4.1 + */ + public static final boolean hasParameter(final String name, final HttpServletRequest request, final boolean caseSensitive){ + return getParameter(name, request, caseSensitive) != null; + } + + /** + * Check whether the parameter specified with the given pair (name,value) exists in the given HTTP request. + * + * @param name Name of the parameter to search. + * @param value Expected value of the parameter. + * @param request HTTP request in which the given pair must be searched. + * @param caseSensitive true to perform the research (on name AND value) case-sensitively, + * false for a case INsensitive research. + * + * @return true if the specified parameter has been found with the given value in the given HTTP request, + * false otherwise. + * + * @since 4.1 + */ + public static final boolean hasParameter(final String name, final String value, final HttpServletRequest request, final boolean caseSensitive){ + Object found = getParameter(name, request, caseSensitive); + if (value == null) + return found != null; + else{ + if (found == null || !(found instanceof String)) + return false; + else + return (caseSensitive && ((String)found).equals(value)) || (!caseSensitive && ((String)found).equalsIgnoreCase(value)); + } + } + + /** + * Get the parameter specified by the given name from the given HTTP request. + * + * @param name Name of the parameter to search. + * @param request HTTP request in which the given pair must be searched. + * @param caseSensitive true to perform the research case-sensitively, + * false for a case INsensitive research. + * + * @return Value of the parameter. + * + * @since 4.1 + */ + @SuppressWarnings("unchecked") + public static final Object getParameter(final String name, final HttpServletRequest request, final boolean caseSensitive){ + try{ + // Get the extracted parameters: + Map params = (Map)request.getAttribute(UWS.REQ_ATTRIBUTE_PARAMETERS); + + // Search case IN-sensitively the given pair (name, value): + for(Map.Entry e : params.entrySet()){ + if ((!caseSensitive && e.getKey().equalsIgnoreCase(name)) || (caseSensitive && e.getKey().equals(name))) + return (e.getValue() != null) ? e.getValue() : null; + } + }catch(Exception ex){} + return null; + } + + /** + *

Delete all unused uploaded files of the given request.

+ * + *

+ * These files have been stored on the file system + * if there is a request attribute named {@link UWS#REQ_ATTRIBUTE_PARAMETERS}. + *

+ * + * @param req Request in which files have been uploaded. + * + * @return The number of deleted files. + * + * @see UploadFile#isUsed() + * + * @since 4.1 + */ + @SuppressWarnings("unchecked") + public static final int deleteUploads(final HttpServletRequest req){ + int cnt = 0; + Object attribute = req.getAttribute(UWS.REQ_ATTRIBUTE_PARAMETERS); + // If there is the request attribute "UWS_PARAMETERS": + if (attribute != null && attribute instanceof Map){ + Map params = (Map)attribute; + // For each parameter... + for(Map.Entry e : params.entrySet()){ + // ...delete physically the uploaded file ONLY IF not used AND IF it is an uploaded file: + if (e.getValue() != null && e.getValue() instanceof UploadFile && !((UploadFile)e.getValue()).isUsed()){ + try{ + ((UploadFile)e.getValue()).deleteFile(); + cnt++; + }catch(IOException ioe){} + } + } + } + return cnt; + } + + /* *************** */ + /* USER EXTRACTION */ + /* *************** */ + /** + *

Extract the user/job owner from the given HTTP request.

+ * + * Two cases are supported: + *
    + *
  1. The user has already been identified and is stored in the HTTP attribute {@link UWS#REQ_ATTRIBUTE_USER} => the stored value is returned.
  2. + *
  3. No HTTP attribute and a {@link UserIdentifier} is provided => the user is identified with the given {@link UserIdentifier} and stored in the HTTP attribute {@link UWS#REQ_ATTRIBUTE_USER} before being returned.
  4. + *
+ * + *

In any other case, NULL is returned.

+ * + * @param request The HTTP request from which the user must be extracted. note: if NULL, NULL will be returned. + * @param userIdentifier The method to use in order to extract a user from the given request. note: if NULL, NULL is returned IF no HTTP attribute {@link UWS#REQ_ATTRIBUTE_USER} can be found. + * + * @return The identified user. MAY be NULL + * + * @throws NullPointerException If an error occurs while extracting a {@link UWSUrl} from the given {@link HttpServletRequest}. + * @throws UWSException If any error occurs while extracting a user from the given {@link HttpServletRequest}. + * + * @since 4.1 + */ + public static final JobOwner getUser(final HttpServletRequest request, final UserIdentifier userIdentifier) throws NullPointerException, UWSException{ + if (request == null) + return null; + else if (request.getAttribute(UWS.REQ_ATTRIBUTE_USER) != null) + return (JobOwner)request.getAttribute(UWS.REQ_ATTRIBUTE_USER); + else if (userIdentifier != null){ + JobOwner user = userIdentifier.extractUserId(new UWSUrl(request), request); + request.setAttribute(UWS.REQ_ATTRIBUTE_USER, user); + return user; + }else + return null; + } + /* **************************** */ /* DIRECTORY MANAGEMENT METHODS */ /* **************************** */ @@ -220,6 +468,30 @@ public class UWSToolBox { /* *************************** */ /* RESPONSE MANAGEMENT METHODS */ /* *************************** */ + + /** + *

Flush the buffer of the given {@link PrintWriter}.

+ * + *

+ * This function aims to be used if the given {@link PrintWriter} has been provided by an {@link HttpServletResponse}. + * In such case, a call to its flush() function may generate a silent error which could only mean that + * the connection with the HTTP client has been closed. + *

+ * + * @param writer The writer to flush. + * + * @throws ClientAbortException If the connection with the HTTP client is closed. + * + * @see PrintWriter#flush() + * + * @since 4.1 + */ + public static final void flush(final PrintWriter writer) throws ClientAbortException{ + writer.flush(); + if (writer.checkError()) + throw new ClientAbortException(); + } + /** * Copies the content of the given input stream in the given HTTP response. * @@ -237,6 +509,9 @@ public class UWSToolBox { if (mimeType != null) response.setContentType(mimeType); + // Set the character encoding: + response.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); + // Set the HTTP content length: if (contentSize > 0) response.setContentLength((int)contentSize); @@ -246,7 +521,7 @@ public class UWSToolBox { byte[] buffer = new byte[1024]; int length; while((length = input.read(buffer)) > 0) - output.print(new String(buffer, 0, length)); + output.write(buffer, 0, length); }finally{ if (output != null) output.flush(); @@ -256,7 +531,7 @@ public class UWSToolBox { /** * Writes the stack trace of the given exception in the file whose the name and the parent directory are given in parameters. * If the specified file already exists, it will be overwritten if the parameter overwrite is equal to true, otherwise - * no file will not be changed (default behavior of {@link UWSToolBox#writeErrorFile(Exception, String, String)}). + * no file will not be changed (default behavior of {@link UWSToolBox#writeErrorFile(Exception, ErrorSummary, UWSJob, OutputStream)}). * * @param ex The exception which has to be used to generate the error file. * @param error The error description. @@ -385,7 +660,7 @@ public class UWSToolBox { /** * Gets the file extension corresponding to the given MIME type. * - * @param MIME type A MIME type (i.e. text/plain, application/json, application/xml, text/xml, application/x-votable+xml, ....) + * @param mimeType A MIME type (i.e. text/plain, application/json, application/xml, text/xml, application/x-votable+xml, ....) * * @return The corresponding file extension or null if not known. */ diff --git a/src/uws/job/ErrorSummary.java b/src/uws/job/ErrorSummary.java index cc2c983434dea58939f652afdedf746f10634961..607dbff03f9b60d2b4b804677c258104d97937c6 100644 --- a/src/uws/job/ErrorSummary.java +++ b/src/uws/job/ErrorSummary.java @@ -16,21 +16,20 @@ package uws.job; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import uws.UWSException; - import uws.job.serializer.UWSSerializer; - import uws.job.user.JobOwner; /** * This class gives a short description of the occurred error (if any) during a job execution. * A fuller representation of the error may be retrieved from {jobs}/(job-id)/error. * - * @author Grégory Mantelet (CDS) - * @version 02/2011 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (08/2014) */ public class ErrorSummary extends SerializableUWSObject { private static final long serialVersionUID = 1L; @@ -132,7 +131,7 @@ public class ErrorSummary extends SerializableUWSObject { /* INHERITED METHODS */ /* ***************** */ @Override - public String serialize(UWSSerializer serializer, JobOwner owner) throws UWSException{ + public String serialize(UWSSerializer serializer, JobOwner owner) throws UWSException, Exception{ return serializer.getErrorSummary(this, true); } diff --git a/src/uws/job/ErrorType.java b/src/uws/job/ErrorType.java index 7ee038c277ed02bdf82e282df0138371698012ba..7e80d9d4ecf2e69567557924b20db074f6bf5175 100644 --- a/src/uws/job/ErrorType.java +++ b/src/uws/job/ErrorType.java @@ -16,7 +16,8 @@ package uws.job; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ /** @@ -24,9 +25,14 @@ package uws.job; * * @see ErrorSummary * - * @author Grégory Mantelet (CDS) - * @version 09/2010 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (09/2014) */ public enum ErrorType{ - FATAL, TRANSIENT + FATAL, TRANSIENT; + + @Override + public String toString(){ + return super.toString().toLowerCase(); + } } diff --git a/src/uws/job/JobList.java b/src/uws/job/JobList.java index 9c10ee54fcea4eaa28a5e6b9ad1f3b2ae1c2f3a3..90e43f405b1bbd2e5c920aeff1007b2546484ad3 100644 --- a/src/uws/job/JobList.java +++ b/src/uws/job/JobList.java @@ -16,7 +16,8 @@ package uws.job; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.util.ArrayList; @@ -24,25 +25,22 @@ import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; -import java.lang.IllegalStateException; - import uws.UWSException; import uws.UWSExceptionFactory; import uws.UWSToolBox; - import uws.job.manager.DefaultDestructionManager; import uws.job.manager.DefaultExecutionManager; import uws.job.manager.DestructionManager; import uws.job.manager.ExecutionManager; import uws.job.serializer.UWSSerializer; import uws.job.user.JobOwner; - -import uws.service.UWSService; import uws.service.UWS; +import uws.service.UWSService; import uws.service.UWSUrl; import uws.service.UserIdentifier; import uws.service.backup.UWSBackupManager; import uws.service.log.UWSLog; +import uws.service.log.UWSLog.LogLevel; /** *

General description

@@ -94,12 +92,11 @@ import uws.service.log.UWSLog; *

* *

- * To use a custom destruction manager, you can use the method {@link #setDestructionManager(DestructionManager)} - * if the jobs list is not managed by a UWS or {@link UWSService#setDestructionManager(DestructionManager)} otherwise. + * To use a custom destruction manager, you can use the method {@link #setDestructionManager(DestructionManager)}. *

* - * @author Grégory Mantelet (CDS) - * @version 06/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (11/2014) * * @see UWSJob */ @@ -132,12 +129,12 @@ public class JobList extends SerializableUWSObject implements Iterable { * * @param jobListName The jobs list name. * - * @throws UWSException If the given name is null or empty. + * @throws NullPointerException If the given job list name is NULL. * * @see #JobList(String, ExecutionManager) */ - public JobList(String jobListName) throws UWSException{ - this(jobListName, new DefaultExecutionManager(), new DefaultDestructionManager()); + public JobList(String jobListName) throws NullPointerException{ + this(jobListName, null, new DefaultDestructionManager()); } /** @@ -145,10 +142,10 @@ public class JobList extends SerializableUWSObject implements Iterable { * * @param jobListName The jobs list name. * @param executionManager The object which will manage the execution of all jobs of this list. - * - * @throws UWSException If the given name is null or empty, or if the given execution manager is null. + * + * @throws NullPointerException If the given job list name is NULL or empty or if no execution manager is provided. */ - public JobList(String jobListName, ExecutionManager executionManager) throws UWSException{ + public JobList(String jobListName, ExecutionManager executionManager) throws NullPointerException{ this(jobListName, executionManager, new DefaultDestructionManager()); } @@ -157,10 +154,10 @@ public class JobList extends SerializableUWSObject implements Iterable { * * @param jobListName The jobs list name. * @param destructionManager The object which manages the automatic destruction of jobs when they have reached their destruction date. - * - * @throws UWSException If the given name is null or empty, or if the given destruction manager is null. + * + * @throws NullPointerException If the given job list name is NULL or empty or if no destruction manager is provided. */ - public JobList(String jobListName, DestructionManager destructionManager) throws UWSException{ + public JobList(String jobListName, DestructionManager destructionManager) throws NullPointerException{ this(jobListName, new DefaultExecutionManager(), destructionManager); } @@ -170,29 +167,26 @@ public class JobList extends SerializableUWSObject implements Iterable { * @param jobListName The jobs list name. * @param executionManager The object which will manage the execution of all jobs of this list. * @param destructionManager The object which manages the automatic destruction of jobs when they have reached their destruction date. - * - * @throws UWSException If the given name is null or empty, or if the given execution or destruction manager is null. + * + * @throws NullPointerException If the given job list name is NULL or empty or if no execution manager and destruction manager are provided. */ - public JobList(String jobListName, ExecutionManager executionManager, DestructionManager destructionManager) throws UWSException{ + public JobList(String jobListName, ExecutionManager executionManager, DestructionManager destructionManager) throws NullPointerException{ if (jobListName == null) - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Missing job list name ! => Impossible to build the job list."); + throw new NullPointerException("Missing job list name ! => Impossible to build the job list."); else{ jobListName = jobListName.trim(); if (jobListName.length() == 0) - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Missing job list name ! => Impossible to build the job list."); + throw new NullPointerException("Missing job list name ! => Impossible to build the job list."); } name = jobListName; jobsList = new LinkedHashMap(); ownerJobs = new LinkedHashMap>(); - if (executionManager == null) - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Missing execution manager ! => Impossible to build the job list."); - else - this.executionManager = executionManager; + this.executionManager = executionManager; if (destructionManager == null) - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Missing destruction manager ! => Impossible to build the job list."); + throw new NullPointerException("Missing destruction manager ! => Impossible to build the job list."); else this.destructionManager = destructionManager; } @@ -236,7 +230,7 @@ public class JobList extends SerializableUWSObject implements Iterable { * @return A logger. * * @see #getUWS() - * @see UWS#getLogger(); + * @see UWS#getLogger() * @see UWSToolBox#getDefaultLogger() */ public UWSLog getLogger(){ @@ -289,6 +283,12 @@ public class JobList extends SerializableUWSObject implements Iterable { * @return The used execution manager. */ public final ExecutionManager getExecutionManager(){ + if (executionManager == null){ + if (uws == null) + executionManager = new DefaultExecutionManager(); + else + executionManager = new DefaultExecutionManager(uws.getLogger()); + } return executionManager; } @@ -302,17 +302,19 @@ public class JobList extends SerializableUWSObject implements Iterable { * @see ExecutionManager#remove(UWSJob) * @see ExecutionManager#execute(UWSJob) */ - public synchronized final void setExecutionManager(final ExecutionManager manager) throws UWSException{ + public synchronized final void setExecutionManager(final ExecutionManager manager){ if (manager == null) return; ExecutionManager oldManager = executionManager; executionManager = manager; - for(UWSJob job : this){ - if (job.getPhase() != ExecutionPhase.PENDING && !job.isFinished()){ - oldManager.remove(job); - executionManager.execute(job); + if (oldManager != null){ + for(UWSJob job : this){ + if (job.getPhase() != ExecutionPhase.PENDING && !job.isFinished()){ + oldManager.remove(job); + executionManager.execute(job); + } } } } @@ -356,7 +358,7 @@ public class JobList extends SerializableUWSObject implements Iterable { * Gets the job whose the ID is given in parameter ONLY IF it is the one of the specified user OR IF the specified job is owned by an anonymous user. * * @param jobID ID of the job to get. - * @param userID ID of the user who asks this job (null means no particular owner => cf {@link #getJob(String)}). + * @param user The user who asks this job (null means no particular owner => cf {@link #getJob(String)}). * * @return The requested job or null if there is no job with the given ID or if the user is not allowed to get the given job. * @@ -364,7 +366,7 @@ public class JobList extends SerializableUWSObject implements Iterable { */ public UWSJob getJob(String jobID, JobOwner user) throws UWSException{ if (user != null && !user.hasReadPermission(this)) - throw UWSExceptionFactory.readPermissionDenied(user, true, getName()); + throw new UWSException(UWSException.PERMISSION_DENIED, UWSExceptionFactory.readPermissionDenied(user, true, getName())); // Get the specified job: UWSJob job = jobsList.get(jobID); @@ -373,7 +375,7 @@ public class JobList extends SerializableUWSObject implements Iterable { if (user != null && job != null && job.getOwner() != null){ JobOwner owner = job.getOwner(); if (!owner.equals(user) && !user.hasReadPermission(job)) - throw UWSExceptionFactory.readPermissionDenied(user, false, job.getJobId()); + throw new UWSException(UWSException.PERMISSION_DENIED, UWSExceptionFactory.readPermissionDenied(user, false, job.getJobId())); } return job; @@ -393,7 +395,7 @@ public class JobList extends SerializableUWSObject implements Iterable { /** * Gets an iterator on the jobs list of the specified user. * - * @param ownerId The ID of the owner/user (may be null). + * @param user The owner/user who asks for this operation (may be null). * * @return An iterator on all jobs which have been created by the specified owner/user * or a NullIterator if the specified owner/user has no job @@ -407,14 +409,17 @@ public class JobList extends SerializableUWSObject implements Iterable { return ownerJobs.get(user).values().iterator(); else return new Iterator(){ + @Override public boolean hasNext(){ return false; } + @Override public UWSJob next(){ return null; } + @Override public void remove(){ ; } @@ -427,6 +432,7 @@ public class JobList extends SerializableUWSObject implements Iterable { * * @see java.lang.Iterable#iterator() */ + @Override public final Iterator iterator(){ return jobsList.values().iterator(); } @@ -510,7 +516,6 @@ public class JobList extends SerializableUWSObject implements Iterable { * * @throws UWSException If the owner of the given job is not allowed to add any job into this jobs list. * - * @see UWSJob#loadAdditionalParams() * @see UWSJob#setJobList(JobList) * @see UWSService#getBackupManager() * @see UWSBackupManager#saveOwner(JobOwner) @@ -518,46 +523,44 @@ public class JobList extends SerializableUWSObject implements Iterable { * @see UWSJob#applyPhaseParam(JobOwner) */ public synchronized String addNewJob(final UWSJob j) throws UWSException{ - if (j == null || jobsList.containsKey(j.getJobId())){ + if (uws == null) + throw new IllegalStateException("Jobs can not be added to this job list until this job list is linked to a UWS!"); + else if (j == null || jobsList.containsKey(j.getJobId())){ return null; }else{ JobOwner owner = j.getOwner(); // Check the WRITE permission of the owner of this job: if (owner != null && !owner.hasWritePermission(this)) - throw UWSExceptionFactory.writePermissionDenied(owner, true, getName()); - - try{ - // Set its job list: - j.setJobList(this); - - // Add the job to the jobs list: - jobsList.put(j.getJobId(), j); - if (owner != null){ - // Index also this job in function of its owner: - if (!ownerJobs.containsKey(owner)) - ownerJobs.put(owner, new LinkedHashMap()); - ownerJobs.get(owner).put(j.getJobId(), j); - } + throw new UWSException(UWSException.PERMISSION_DENIED, UWSExceptionFactory.writePermissionDenied(owner, true, getName())); + + // Set its job list: + j.setJobList(this); + + // Add the job to the jobs list: + jobsList.put(j.getJobId(), j); + if (owner != null){ + // Index also this job in function of its owner: + if (!ownerJobs.containsKey(owner)) + ownerJobs.put(owner, new LinkedHashMap()); + ownerJobs.get(owner).put(j.getJobId(), j); + } - // Save the owner jobs list: - if (owner != null && uws.getBackupManager() != null && j.getRestorationDate() == null) - uws.getBackupManager().saveOwner(j.getOwner()); + // Save the owner jobs list: + if (owner != null && uws.getBackupManager() != null && j.getRestorationDate() == null) + uws.getBackupManager().saveOwner(j.getOwner()); - // Add it to the destruction manager: - destructionManager.update(j); + // Add it to the destruction manager: + destructionManager.update(j); - // Execute the job if asked in the additional parameters: - j.applyPhaseParam(null); + // Execute the job if asked in the additional parameters: + j.applyPhaseParam(null); // Note: can not throw an exception since no user is specified (so, no permission check is done). - // Log the "creation" of the job: - if (j.getRestorationDate() == null) - getLogger().jobCreated(j); + // Log the "creation" of the job: + if (j.getRestorationDate() == null) + getLogger().logJob(LogLevel.INFO, j, "CREATED", "Job \"" + j.getJobId() + "\" successfully created and added in the job list \"" + getName() + "\".", null); - return j.getJobId(); - }catch(IllegalStateException e){ - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, e, "Impossible to add the job " + j.getJobId() + " into the jobs list " + getName() + " !"); - } + return j.getJobId(); } } @@ -589,13 +592,6 @@ public class JobList extends SerializableUWSObject implements Iterable { UWSJob removedJob = (jobId == null) ? null : jobsList.remove(jobId); if (removedJob != null){ - // Delete completely their association: - /*try{ - removedJob.setJobList(null); - }catch(IllegalStateException ue){ - getLogger().error("Impossible to set the job list of the removed job to NULL !", ue); - }*/ - // Clear its owner index: JobOwner owner = removedJob.getOwner(); if (owner != null && ownerJobs.containsKey(owner)){ @@ -638,7 +634,7 @@ public class JobList extends SerializableUWSObject implements Iterable { uws.getBackupManager().saveOwner(destroyedJob.getOwner()); // Log this job destruction: - getLogger().jobDestroyed(destroyedJob, this); + getLogger().logJob(LogLevel.INFO, destroyedJob, "DESTROY", "The job \"" + destroyedJob.getJobId() + "\" has been removed from the job list \"" + name + "\".", null); return true; } @@ -660,10 +656,10 @@ public class JobList extends SerializableUWSObject implements Iterable { public boolean destroyJob(final String jobId, final JobOwner user) throws UWSException{ if (user != null){ if (!user.hasWritePermission(this)) - throw UWSExceptionFactory.writePermissionDenied(user, true, getName()); + throw new UWSException(UWSException.PERMISSION_DENIED, UWSExceptionFactory.writePermissionDenied(user, true, getName())); UWSJob job = getJob(jobId); if (job != null && job.getOwner() != null && !user.equals(job.getOwner()) && !user.hasWritePermission(job)) - throw UWSExceptionFactory.writePermissionDenied(user, false, job.getJobId()); + throw new UWSException(UWSException.PERMISSION_DENIED, UWSExceptionFactory.writePermissionDenied(user, false, jobId)); } return destroyJob(jobId); } @@ -693,7 +689,7 @@ public class JobList extends SerializableUWSObject implements Iterable { /** * Destroys all jobs owned by the specified user. * - * @param ownerId The ID of the owner/user. + * @param owner The owner/user who asks for this operation. * * @throws UWSException If the given user is not allowed to update of the content of this jobs list. * @@ -704,7 +700,7 @@ public class JobList extends SerializableUWSObject implements Iterable { if (owner == null) clear(); else if (!owner.hasWritePermission(this)) - throw UWSExceptionFactory.writePermissionDenied(owner, true, getName()); + throw new UWSException(UWSException.PERMISSION_DENIED, UWSExceptionFactory.writePermissionDenied(owner, true, getName())); else{ if (ownerJobs.containsKey(owner)){ ArrayList jobIDs = new ArrayList(ownerJobs.get(owner).keySet()); @@ -719,9 +715,9 @@ public class JobList extends SerializableUWSObject implements Iterable { /* INHERITED METHODS */ /* ***************** */ @Override - public String serialize(UWSSerializer serializer, JobOwner user) throws UWSException{ + public String serialize(UWSSerializer serializer, JobOwner user) throws UWSException, Exception{ if (user != null && !user.hasReadPermission(this)) - throw UWSExceptionFactory.readPermissionDenied(user, true, getName()); + throw new UWSException(UWSException.PERMISSION_DENIED, UWSExceptionFactory.writePermissionDenied(user, true, getName())); return serializer.getJobList(this, user, true); } diff --git a/src/uws/job/JobPhase.java b/src/uws/job/JobPhase.java index cf0a270fb25dc19c1cd85013e6fbeea434fafc36..1de05bbbd0b4428b265fdcc59ba75db0b54c2882 100644 --- a/src/uws/job/JobPhase.java +++ b/src/uws/job/JobPhase.java @@ -16,7 +16,8 @@ package uws.job; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.Serializable; @@ -28,8 +29,8 @@ import uws.UWSExceptionFactory; * An instance of this class represents the current execution phase of a given job, * and it describes the transitions between the different phases. * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (08/2014) * * @see ExecutionPhase * @see UWSJob @@ -48,11 +49,11 @@ public class JobPhase implements Serializable { * * @param j The job whose the execution phase must be represented by the built JobPhase instance. * - * @throws UWSException If the given job is null. + * @throws NullPointerException If the given job is null. */ - public JobPhase(UWSJob j) throws UWSException{ + public JobPhase(UWSJob j) throws NullPointerException{ if (j == null) - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Missing job instance ! => impossible to build a JobPhase instance."); + throw new NullPointerException("Missing job instance ! => impossible to build a JobPhase instance."); job = j; } @@ -88,12 +89,16 @@ public class JobPhase implements Serializable { } /** - * Lets changing the current phase of the associated job considering or not the order of execution phases. + *

Lets changing the current phase of the associated job considering or not the order of execution phases.

+ * + *

Note: + * If the given phase is null, nothing is done. + *

* * @param p The new phase. * @param force true to ignore the phases order, false otherwise. * - * @throws UWSException If the given phase is null or if the phase transition is forbidden. + * @throws UWSException If the phase transition is forbidden. * * @see #setPendingPhase(boolean) * @see #setQueuedPhase(boolean) @@ -107,7 +112,7 @@ public class JobPhase implements Serializable { */ public void setPhase(ExecutionPhase p, boolean force) throws UWSException{ if (p == null) - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Incorrect phase ! => The phase of a job can not be set to NULL !"); + return; // Check that the given phase follows the imposed phases order: switch(p){ @@ -151,7 +156,7 @@ public class JobPhase implements Serializable { */ protected void setPendingPhase(boolean force) throws UWSException{ if (!force && phase != ExecutionPhase.PENDING && phase != ExecutionPhase.UNKNOWN) - throw UWSExceptionFactory.incorrectPhaseTransition(job.getJobId(), phase, ExecutionPhase.PENDING); + throw new UWSException(UWSException.BAD_REQUEST, UWSExceptionFactory.incorrectPhaseTransition(job.getJobId(), phase, ExecutionPhase.PENDING)); phase = ExecutionPhase.PENDING; } @@ -168,7 +173,7 @@ public class JobPhase implements Serializable { phase = ExecutionPhase.QUEUED; else{ if (phase != ExecutionPhase.QUEUED && phase != ExecutionPhase.HELD && phase != ExecutionPhase.PENDING && phase != ExecutionPhase.UNKNOWN) - throw UWSExceptionFactory.incorrectPhaseTransition(job.getJobId(), phase, ExecutionPhase.QUEUED); + throw new UWSException(UWSException.BAD_REQUEST, UWSExceptionFactory.incorrectPhaseTransition(job.getJobId(), phase, ExecutionPhase.QUEUED)); phase = ExecutionPhase.QUEUED; } @@ -186,7 +191,7 @@ public class JobPhase implements Serializable { phase = ExecutionPhase.EXECUTING; else{ if (phase != ExecutionPhase.EXECUTING && phase != ExecutionPhase.SUSPENDED && phase != ExecutionPhase.PENDING && phase != ExecutionPhase.QUEUED && phase != ExecutionPhase.UNKNOWN) - throw UWSExceptionFactory.incorrectPhaseTransition(job.getJobId(), phase, ExecutionPhase.EXECUTING); + throw new UWSException(UWSException.BAD_REQUEST, UWSExceptionFactory.incorrectPhaseTransition(job.getJobId(), phase, ExecutionPhase.EXECUTING)); phase = ExecutionPhase.EXECUTING; } @@ -204,7 +209,7 @@ public class JobPhase implements Serializable { phase = ExecutionPhase.COMPLETED; else{ if (phase != ExecutionPhase.COMPLETED && phase != ExecutionPhase.EXECUTING && phase != ExecutionPhase.UNKNOWN) - throw UWSExceptionFactory.incorrectPhaseTransition(job.getJobId(), phase, ExecutionPhase.COMPLETED); + throw new UWSException(UWSException.BAD_REQUEST, UWSExceptionFactory.incorrectPhaseTransition(job.getJobId(), phase, ExecutionPhase.COMPLETED)); phase = ExecutionPhase.COMPLETED; } @@ -222,7 +227,7 @@ public class JobPhase implements Serializable { phase = ExecutionPhase.ABORTED; else{ if (phase == ExecutionPhase.COMPLETED || phase == ExecutionPhase.ERROR) - throw UWSExceptionFactory.incorrectPhaseTransition(job.getJobId(), phase, ExecutionPhase.ABORTED); + throw new UWSException(UWSException.BAD_REQUEST, UWSExceptionFactory.incorrectPhaseTransition(job.getJobId(), phase, ExecutionPhase.ABORTED)); phase = ExecutionPhase.ABORTED; } @@ -240,7 +245,7 @@ public class JobPhase implements Serializable { phase = ExecutionPhase.ERROR; else{ if (phase == ExecutionPhase.COMPLETED || phase == ExecutionPhase.ABORTED) - throw UWSExceptionFactory.incorrectPhaseTransition(job.getJobId(), phase, ExecutionPhase.ERROR); + throw new UWSException(UWSException.BAD_REQUEST, UWSExceptionFactory.incorrectPhaseTransition(job.getJobId(), phase, ExecutionPhase.ERROR)); phase = ExecutionPhase.ERROR; } @@ -255,7 +260,7 @@ public class JobPhase implements Serializable { */ protected void setHeldPhase(boolean force) throws UWSException{ if (!force && phase != ExecutionPhase.HELD && phase != ExecutionPhase.PENDING && phase != ExecutionPhase.UNKNOWN) - throw UWSExceptionFactory.incorrectPhaseTransition(job.getJobId(), phase, ExecutionPhase.HELD); + throw new UWSException(UWSException.BAD_REQUEST, UWSExceptionFactory.incorrectPhaseTransition(job.getJobId(), phase, ExecutionPhase.HELD)); phase = ExecutionPhase.HELD; } diff --git a/src/uws/job/JobThread.java b/src/uws/job/JobThread.java index 0b80bd60c1be6fc663e1dbcd5d394ab5c842a1e0..28e9dcfc44cab313a967cced64734c009c9c119d 100644 --- a/src/uws/job/JobThread.java +++ b/src/uws/job/JobThread.java @@ -16,7 +16,8 @@ package uws.job; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; @@ -25,7 +26,10 @@ import java.util.Date; import uws.UWSException; import uws.UWSToolBox; +import uws.service.error.ServiceErrorWriter; import uws.service.file.UWSFileManager; +import uws.service.log.UWSLog; +import uws.service.log.UWSLog.LogLevel; /** *

An instance of this class is a thread dedicated to a job execution.

@@ -55,8 +59,8 @@ import uws.service.file.UWSFileManager; *
  • an {@link InterruptedException}: the method {@link UWSJob#abort()} is called.
  • * * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (04/2015) * * @see UWSJob#start() * @see UWSJob#abort() @@ -71,44 +75,91 @@ public abstract class JobThread extends Thread { /** The last error which has occurred during the execution of this thread. */ protected UWSException lastError = null; - /** Indicates whether the {@link UWSJob#jobWork()} has been called and finished, or not. */ + /** Indicate whether the exception stored in the attribute {@link #lastError} should be considered as a grave error or not. + * By default, {@link #lastError} is a "normal" error. + * @since 4.1 */ + protected boolean fatalError = false; + + /** Indicates whether the {@link #jobWork()} has been called and finished, or not. */ protected boolean finished = false; /** Description of what is done by this thread. */ protected final String taskDescription; + /** + * Object to use in order to write the content of an error/exception in any output stream. + * If NULL, the content will be written by {@link UWSToolBox#writeErrorFile(Exception, ErrorSummary, UWSJob, OutputStream)} + * (in text/plain with stack-trace). + * Otherwise the content and the MIME type are determined by the error writer. + * @since 4.1 + */ + protected final ServiceErrorWriter errorWriter; + + /** Group of threads in which this job thread will run. */ public final static ThreadGroup tg = new ThreadGroup("UWS_GROUP"); /** * Builds the JobThread instance which will be used by the given job to execute its task. * * @param j The associated job. - * @param fileManager An object to get access to UWS files (particularly: error and results file). * - * @throws UWSException If the given job or the given file manager is null. + * @throws NullPointerException If the given job or the given file manager is null. * * @see #getDefaultTaskDescription(UWSJob) */ - public JobThread(UWSJob j) throws UWSException{ - this(j, getDefaultTaskDescription(j)); + public JobThread(final UWSJob j) throws NullPointerException{ + this(j, getDefaultTaskDescription(j), null); + } + + /** + * Builds the JobThread instance which will be used by the given job to execute its task. + * + * @param j The associated job. + * @param errorWriter Object to use in case of error in order to format the details of the error for the .../error/details parameter. + * + * @throws NullPointerException If the given job is null. + * + * @see #getDefaultTaskDescription(UWSJob) + * + * @since 4.1 + */ + public JobThread(final UWSJob j, final ServiceErrorWriter errorWriter) throws NullPointerException{ + this(j, getDefaultTaskDescription(j), errorWriter); + } + + /** + * Builds the JobThread instance which will be used by the given job to execute its task. + * + * @param j The associated job. + * @param task Description of the task executed by this thread. + * + * @throws NullPointerException If the given job is null. + */ + public JobThread(final UWSJob j, final String task) throws NullPointerException{ + super(tg, j.getJobId()); + + job = j; + taskDescription = task; + errorWriter = null; } /** * Builds the JobThread instance which will be used by the given job to execute its task. * * @param j The associated job. - * @param fileManager An object to get access to UWS files (particularly: error and results file). * @param task Description of the task executed by this thread. + * @param errorWriter Object to use in case of error in order to format the details of the error for the .../error/details parameter. + * + * @throws NullPointerException If the given job is null. * - * @throws UWSException If the given job or the given file manager is null. + * @since 4.1 */ - public JobThread(UWSJob j, String task) throws UWSException{ + public JobThread(final UWSJob j, final String task, final ServiceErrorWriter errorWriter) throws NullPointerException{ super(tg, j.getJobId()); - if (j == null) - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Missing job instance ! => impossible to build a JobThread instance."); job = j; taskDescription = task; + this.errorWriter = errorWriter; } /** @@ -158,7 +209,7 @@ public abstract class JobThread extends Thread { } /** - * Indicates whether the {@link UWSJob#jobWork()} method has been called or not. + * Indicates whether the {@link #jobWork()} method has been called or not. * * @return true if the job work is done, false otherwise. */ @@ -189,7 +240,7 @@ public abstract class JobThread extends Thread { * * @throws UWSException If there is an error while publishing the error. * - * @see {@link UWSJob#error(ErrorSummary)} + * @see UWSJob#error(ErrorSummary) */ public void setError(final ErrorSummary error) throws UWSException{ job.error(error); @@ -210,22 +261,33 @@ public abstract class JobThread extends Thread { * * @throws UWSException If there is an error while publishing the given exception. * - * {@link UWSToolBox#writeErrorFile(Exception, ErrorSummary, UWSJob, OutputStream)} + * @see #setError(ErrorSummary) + * @see UWSToolBox#writeErrorFile(Exception, ErrorSummary, UWSJob, OutputStream) */ public void setError(final UWSException ue) throws UWSException{ if (ue == null) return; try{ + // Set the error summary: ErrorSummary error = new ErrorSummary(ue, ue.getUWSErrorType(), job.getUrl() + "/" + UWSJob.PARAM_ERROR_SUMMARY + "/details"); + + // Prepare the output stream: OutputStream output = getFileManager().getErrorOutput(error, job); - UWSToolBox.writeErrorFile(ue, error, job, output); + // Format and write the error... + // ...using the error writer, if any: + if (errorWriter != null) + errorWriter.writeError(ue, error, job, output); + // ...or write a default output: + else + UWSToolBox.writeErrorFile(ue, error, job, output); + // Set the error summary inside the job: setError(error); }catch(IOException ioe){ - job.getLogger().error("The stack trace of a UWSException (job ID: " + job.getJobId() + " ; error message: \"" + ue.getMessage() + "\") had not been written !", ioe); + job.getLogger().logThread(LogLevel.ERROR, this, "SET_ERROR", "The stack trace of a UWSException had not been written!", ioe); setError(new ErrorSummary(ue.getMessage(), ue.getUWSErrorType())); } } @@ -233,8 +295,6 @@ public abstract class JobThread extends Thread { /** * Creates a default result description. * - * @param job The job which will contains this result. - * * @return The created result. * * @see #createResult(String) @@ -256,7 +316,6 @@ public abstract class JobThread extends Thread { /** * Creates a default result description but by precising its name/ID. * - * @param job The job which will contains this result. * @param name The name/ID of the result to create. * * @return The created result. @@ -305,7 +364,7 @@ public abstract class JobThread extends Thread { * * @throws IOException If there is an error while getting the result file size. * - * @see {@link UWSFileManager#getResultSize(Result, UWSJob)} + * @see UWSFileManager#getResultSize(Result, UWSJob) */ public long getResultSize(final Result result) throws IOException{ return getFileManager().getResultSize(result, job); @@ -318,14 +377,14 @@ public abstract class JobThread extends Thread { *
      *
    • This method does the job work but it MUST also fill the associated job with the execution results and/or errors.
    • *
    • Do not forget to check the interrupted flag of the thread ({@link Thread#isInterrupted()}) and then to send an {@link InterruptedException}. - * Otherwise the {@link UWSJob#stop()} method will have no effect, as for {@link #abort()} and {@link #error(ErrorSummary)}.
    • + * Otherwise the {@link UWSJob#stop()} method will have no effect, as for {@link UWSJob#abort()} and {@link #setError(ErrorSummary)}. *

    * *

    Notes: *

      *
    • The "setPhase(COMPLETED)" and the "endTime=new Date()" are automatically applied just after the call to jobWork.
    • - *
    • If an {@link UWSException} is thrown the {@link JobThread} will automatically publish the exception in this job - * thanks to the {@link UWSJob#error(UWSException)} method or the {@link #setErrorSummary(ErrorSummary)} method, + *
    • If a {@link UWSException} is thrown the {@link JobThread} will automatically publish the exception in this job + * thanks to the {@link UWSJob#error(ErrorSummary)} method or the {@link UWSJob#setErrorSummary(ErrorSummary)} method, * and so it will set its phase to {@link ExecutionPhase#ERROR}.
    • *
    • If an {@link InterruptedException} is thrown the {@link JobThread} will automatically set the phase to {@link ExecutionPhase#ABORTED}
    • *

    @@ -338,19 +397,19 @@ public abstract class JobThread extends Thread { /** *
      *
    1. Tests the execution phase of the job: if not {@link ExecutionPhase#EXECUTING EXECUTING}, nothing is done...the thread ends immediately.
    2. - *
    3. Calls the {@link UWSJob#jobWork()} method.
    4. + *
    5. Calls the {@link #jobWork()} method.
    6. *
    7. Sets the finished flag to true.
    8. *
    9. Changes the job phase to {@link ExecutionPhase#COMPLETED COMPLETED} if not interrupted, else {@link ExecutionPhase#ABORTED ABORTED}. *
    *

    If any {@link InterruptedException} occurs the job phase is only set to {@link ExecutionPhase#ABORTED ABORTED}.

    *

    If any {@link UWSException} occurs while the phase is {@link ExecutionPhase#EXECUTING EXECUTING} the job phase * is set to {@link ExecutionPhase#ERROR ERROR} and an error summary is created.

    - *

    Whatever is the exception, it will always be available thanks to the {@link JobThread#getError()} after execution.

    + *

    Whatever is the exception, it will always be available thanks to the {@link #getError()} after execution.

    * - * @see UWSJob#jobWork() + * @see #jobWork() * @see UWSJob#setPhase(ExecutionPhase) - * @see UWSJob#publishExecutionError(UWSException) - * @see UWSToolBox#publishErrorSummary(UWSJob, String, ErrorType) + * @see #setError(UWSException) + * @see #setError(ErrorSummary) */ @Override public final void run(){ @@ -361,63 +420,79 @@ public abstract class JobThread extends Thread { finished = false; } + UWSLog logger = job.getLogger(); + // Log the start of this thread: - job.getLogger().threadStarted(this, taskDescription); + logger.logThread(LogLevel.INFO, this, "START", "Thread \"" + getName() + "\" started.", null); try{ - try{ - // Execute the task: - jobWork(); - - // Change the phase to COMPLETED: - finished = true; - complete(); - }catch(InterruptedException ex){ - // Abort: - finished = true; - if (!job.stopping) + // Execute the task: + jobWork(); + + // Change the phase to COMPLETED: + finished = true; + complete(); + logger.logThread(LogLevel.INFO, this, "END", "Thread \"" + getName() + "\" successfully ended.", null); + + }catch(InterruptedException ex){ + /* CASE: ABORTION + * In case of abortion, the thread just stops normally, just logging an INFO saying that the thread has been cancelled. + * Since it is not an abnormal behavior there is no reason to keep a trace of the interrupted exception. */ + finished = true; + // Abort: + if (!job.stopping){ + try{ job.abort(); - // Log the abortion: - job.getLogger().threadInterrupted(this, taskDescription, ex); + }catch(UWSException ue){ + /* Should not happen since the reason of a such exception would be that the thread can not be stopped... + * ...but we are already in the thread and it is stopping. */ + logger.logJob(LogLevel.WARNING, job, "ABORT", "Can not put the job in its ABORTED phase!", ue); + } } - return; + // Log the abortion: + logger.logThread(LogLevel.INFO, this, "END", "Thread \"" + getName() + "\" cancelled.", null); }catch(UWSException ue){ - // Save the error: + /* CASE: ERROR for a known reason + * A such error is just a "normal" error, in the sense its cause is known and in a way supported or expected in + * a special configuration or parameters. Thus, the error is kept and will logged with a stack trace afterwards.*/ lastError = ue; }catch(Throwable t){ - // Build the error: - if (t.getMessage() == null || t.getMessage().trim().isEmpty()) - lastError = new UWSException(UWSException.INTERNAL_SERVER_ERROR, t.getClass().getName(), ErrorType.FATAL); + /* DEFAULT: FATAL error + * Any other error is considered as FATAL because it was not expected or supported at a given point. + * It is generally a bug or a forgiven thing in the code of the library. As for "normal" errors, this error + * is kept and will logged with stack trace afterwards. */ + fatalError = true; + if (t instanceof Error) + lastError = new UWSException(UWSException.INTERNAL_SERVER_ERROR, t, "A FATAL DEEP ERROR OCCURED WHILE EXECUTING THIS QUERY! This error is reported in the service logs.", ErrorType.FATAL); + else if (t.getMessage() == null || t.getMessage().trim().isEmpty()) + lastError = new UWSException(UWSException.INTERNAL_SERVER_ERROR, t, t.getClass().getName(), ErrorType.FATAL); else lastError = new UWSException(UWSException.INTERNAL_SERVER_ERROR, t, ErrorType.FATAL); }finally{ finished = true; - // Publish the error if any has occurred: + /* PUBLISH THE ERROR if any has occurred */ if (lastError != null){ // Log the error: - job.getLogger().threadInterrupted(this, taskDescription, lastError); + LogLevel logLevel = fatalError ? LogLevel.FATAL : LogLevel.ERROR; + logger.logJob(logLevel, job, "END", "The following " + (fatalError ? "GRAVE" : "") + " error interrupted the execution of the job " + job.getJobId() + ".", lastError); + logger.logThread(logLevel, this, "END", "Thread \"" + getName() + "\" ended with an error.", null); // Set the error into the job: try{ setError(lastError); }catch(UWSException ue){ try{ - job.getLogger().error("[JobThread] LEVEL 1 -> Problem in JobThread.setError(UWSException), while setting the execution error of the job " + job.getJobId(), ue); + logger.logThread(logLevel, this, "SET_ERROR", "[1st Attempt] Problem in JobThread.setError(UWSException), while setting the execution error of the job " + job.getJobId() + ". A last attempt will be done.", ue); setError(new ErrorSummary((lastError.getCause() != null) ? lastError.getCause().getMessage() : lastError.getMessage(), lastError.getUWSErrorType())); }catch(UWSException ue2){ - job.getLogger().error("[JobThread] LEVEL 2 -> Problem in JobThread.setError(ErrorSummary), while setting the execution error of the job " + job.getJobId(), ue2); - try{ - setError(new ErrorSummary(lastError.getMessage(), ErrorType.FATAL)); - }catch(UWSException ue3){ - job.getLogger().error("[JobThread] LEVEL 3 -> Problem in JobThread.setError(ErrorSummary), while setting the execution error of the job " + job.getJobId(), ue3); - } + logger.logThread(logLevel, this, "SET_ERROR", "[2nd and last Attempt] Problem in JobThread.setError(ErrorSummary), while setting the execution error of the job " + job.getJobId() + ". This error can not be reported to the user, but it will be reported in the log in the JOB context.", ue2); + // Note: no need of a level 3: if the second attempt fails, it means the job is in a wrong phase and no error summary can never be set ; further attempt won't change anything! } } - }else - job.getLogger().threadFinished(this, taskDescription); + } } } } diff --git a/src/uws/job/Result.java b/src/uws/job/Result.java index 1361bac8fb86e5b5d91f8d0c9d899a9c14bb0dae..fa45a754a5f481b0bbbd3bcde737f21154ad44e9 100644 --- a/src/uws/job/Result.java +++ b/src/uws/job/Result.java @@ -16,21 +16,20 @@ package uws.job; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import uws.UWSException; - import uws.job.serializer.UWSSerializer; import uws.job.user.JobOwner; - import uws.service.UWSUrl; /** * This class gives a short description (mainly an ID and a URL) of a job result. * - * @author Grégory Mantelet (CDS) - * @version 06/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (08/2014) */ public class Result extends SerializableUWSObject { private static final long serialVersionUID = 1L; @@ -245,9 +244,7 @@ public class Result extends SerializableUWSObject { } /** - * Sets the size of the corresponding result file. - * - * @return size Result file size (in bytes). + * Sets the size (in bytes) of the corresponding result file. */ public final void setSize(long size){ this.size = size; @@ -257,7 +254,7 @@ public class Result extends SerializableUWSObject { /* INHERITED METHODS */ /* ***************** */ @Override - public String serialize(UWSSerializer serializer, JobOwner owner) throws UWSException{ + public String serialize(UWSSerializer serializer, JobOwner owner) throws UWSException, Exception{ return serializer.getResult(this, true); } diff --git a/src/uws/job/SerializableUWSObject.java b/src/uws/job/SerializableUWSObject.java index 3fc7c493be3a0ad5783089f27232821b16401a29..878d9cead44a03fa77f3e106317f886ac9b4332a 100644 --- a/src/uws/job/SerializableUWSObject.java +++ b/src/uws/job/SerializableUWSObject.java @@ -16,7 +16,8 @@ package uws.job; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; @@ -25,7 +26,6 @@ import java.io.Serializable; import javax.servlet.ServletOutputStream; import uws.UWSException; -import uws.UWSExceptionFactory; import uws.job.serializer.UWSSerializer; import uws.job.serializer.XMLSerializer; import uws.job.user.JobOwner; @@ -36,8 +36,8 @@ import uws.job.user.JobOwner; *

    The {@link SerializableUWSObject#serialize(UWSSerializer, JobOwner)} method must be implemented. It is the most important method of this class * because it returns a serialized representation of this UWS object.

    * - * @author Grégory Mantelet (CDS) - * @version 01/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (08/2014) */ public abstract class SerializableUWSObject implements Serializable { private static final long serialVersionUID = 1L; @@ -49,11 +49,11 @@ public abstract class SerializableUWSObject implements Serializable { * * @return The serialized representation of this object. * - * @throws UWSException If there is an error during the serialization. + * @throws Exception If there is an unexpected error during the serialization. * - * @see #serialize(UWSSerializer, String) + * @see #serialize(UWSSerializer, JobOwner) */ - public String serialize(UWSSerializer serializer) throws UWSException{ + public String serialize(UWSSerializer serializer) throws Exception{ return serialize(serializer, null); } @@ -66,9 +66,10 @@ public abstract class SerializableUWSObject implements Serializable { * * @return The serialized representation of this object. * - * @throws UWSException If there is an error during the serialization. + * @throws UWSException If the owner is not allowed to see the content of the serializable object. + * @throws Exception If there is any other error during the serialization. */ - public abstract String serialize(UWSSerializer serializer, JobOwner owner) throws UWSException; + public abstract String serialize(UWSSerializer serializer, JobOwner owner) throws UWSException, Exception; /** * Serializes the whole object in the given output stream and thanks to the given serializer. @@ -78,9 +79,9 @@ public abstract class SerializableUWSObject implements Serializable { * * @throws UWSException If there is an error during the serialization. * - * @see #serialize(ServletOutputStream, UWSSerializer, String) + * @see #serialize(ServletOutputStream, UWSSerializer, JobOwner) */ - public void serialize(ServletOutputStream output, UWSSerializer serializer) throws UWSException{ + public void serialize(ServletOutputStream output, UWSSerializer serializer) throws Exception{ serialize(output, serializer, null); } @@ -90,27 +91,23 @@ public abstract class SerializableUWSObject implements Serializable { * * @param output The ouput stream in which this object must be serialized. * @param serializer The serializer to use. - * @param ownerId The ID of the current ID. + * @param owner The user who asks for the serialization. * - * @throws UWSException If the given ouput stream is null, - * or if there is an error during the serialization, - * or if there is an error while writing in the given stream. + * @throws UWSException If the owner is not allowed to see the content of the serializable object. + * @throws IOException If there is an error while writing in the given stream. + * @throws Exception If there is any other error during the serialization. * - * @see #serialize(UWSSerializer, String) + * @see #serialize(UWSSerializer, JobOwner) */ - public void serialize(ServletOutputStream output, UWSSerializer serializer, JobOwner owner) throws UWSException{ + public void serialize(ServletOutputStream output, UWSSerializer serializer, JobOwner owner) throws UWSException, IOException, Exception{ if (output == null) - throw UWSExceptionFactory.missingOutputStream("impossible to serialize {" + toString() + "}."); + throw new NullPointerException("Missing serialization output stream!"); - try{ - String serialization = serialize(serializer, owner); - if (serialization != null){ - output.print(serialization); - output.flush(); - }else - throw UWSExceptionFactory.incorrectSerialization("NULL", "{" + toString() + "}"); - }catch(IOException ex){ - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, ex, "IOException => impossible to serialize {" + toString() + "} !"); - } + String serialization = serialize(serializer, owner); + if (serialization != null){ + output.print(serialization); + output.flush(); + }else + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Incorrect serialization value (=NULL) ! => impossible to serialize " + toString() + "."); } } diff --git a/src/uws/job/UWSJob.java b/src/uws/job/UWSJob.java index 9fe509a7a9f47a6f3f9cc0a8fb588c96160018f0..b2a28c9f367b13560925b963c2bdec220571e065 100644 --- a/src/uws/job/UWSJob.java +++ b/src/uws/job/UWSJob.java @@ -17,7 +17,7 @@ package uws.job; * along with UWSLibrary. If not, see . * * Copyright 2012-2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), - * Astronomisches Rechen Institute (ARI) + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; @@ -33,6 +33,7 @@ import java.util.Vector; import javax.servlet.ServletOutputStream; +import uws.ISO8601Format; import uws.UWSException; import uws.UWSExceptionFactory; import uws.UWSToolBox; @@ -45,6 +46,8 @@ import uws.service.UWSFactory; import uws.service.UWSUrl; import uws.service.file.UWSFileManager; import uws.service.log.UWSLog; +import uws.service.log.UWSLog.LogLevel; +import uws.service.request.UploadFile; /** *

    Brief description

    @@ -56,8 +59,8 @@ import uws.service.log.UWSLog; *
      *
    • * The job attributes startTime and endTime are automatically managed by {@link UWSJob}. You don't have to do anything ! - * However you can customize the used date/time format thanks to the function {@link #setDateFormat(DateFormat)}. The default date/time format is: - * yyyy-MM-dd'T'HH:mm:ss.SSSZ + * The date/time format is managed automatically by the library and can not be customized since it is imposed by the UWS + * protocol definition: ISO-8601. *
    • *
      *
    • Once set, the destruction and the executionDuration attributes are automatically managed. That is to say: @@ -90,13 +93,6 @@ import uws.service.log.UWSLog; *
    • *
      *
    • - * {@link #loadAdditionalParams()}: - * All parameters that are not managed by default are automatically stored in the job attribute {@link #additionalParameters} (a map). - * However if you want manage yourself some or all of these additional parameters (i.e. task parameters), you must override this method. - * (By default nothing is done.) - *
    • - *
      - *
    • * {@link #clearResources()}: * This method is called only at the destruction of the job. * By default, the job is stopped (if running), thread resources are freed, @@ -117,7 +113,7 @@ import uws.service.log.UWSLog; *
    * * @author Grégory Mantelet (CDS;ARI) - * @version 4.1 (04/2014) + * @version 4.1 (12/2014) */ public class UWSJob extends SerializableUWSObject { private static final long serialVersionUID = 1L; @@ -176,7 +172,9 @@ public class UWSJob extends SerializableUWSObject { /** Default value of {@link #owner} if no ID are given at the job creation. */ public final static String ANONYMOUS_OWNER = "anonymous"; - /** Default date format pattern. */ + /** Default date format pattern. + * @deprecated Replaced by {@link ISO8601Format}.*/ + @Deprecated public static final String DEFAULT_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; /** The quote value that indicates the quote of this job is not known. */ @@ -189,7 +187,7 @@ public class UWSJob extends SerializableUWSObject { /* VARIABLES */ /* ********* */ /** The last generated job ID. It SHOULD be used ONLY by the function {@link #generateJobId()} ! */ - protected static String lastId = null; + protected static String lastId = System.currentTimeMillis() + "A"; /** The identifier of the job (it MUST be different from any other job).
    * Note: It is assigned automatically at the job creation in any job constructor @@ -205,11 +203,6 @@ public class UWSJob extends SerializableUWSObject { /** The jobs list which is supposed to managed this job. */ private JobList myJobList = null; - /* The name/label that the job creator uses to identify this job.
    - * Note: this is distinct from the Job Identifier that the UWS system itself - * assigns to each job ({@link #jobId}). It may not be unique ! * - protected String runId = null;*/ - /** *

    The current phase of the job.

    * Remember: A job is treated as a state machine thanks to this attribute. @@ -226,7 +219,9 @@ public class UWSJob extends SerializableUWSObject { */ private JobPhase phase; - /** The used date formatter. */ + /** The used date formatter. + * @deprecated Replaced by {@link ISO8601Format}. */ + @Deprecated public static final DateFormat dateFormat = new SimpleDateFormat(DEFAULT_DATE_FORMAT); /** @@ -242,37 +237,11 @@ public class UWSJob extends SerializableUWSObject { /** The time at which the job execution ended. */ private Date endTime = null; - /* - *

    This is the duration (in seconds) for which the job shall run.

    - * Notes: - *
      - *
    • An execution duration of 0 ({@link #UNLIMITED_DURATION}) implies unlimited execution duration.
    • - *
    • When a job is created, the service sets the initial execution duration.
    • - *
    • When the execution duration has been exceeded the service should automatically abort the job, - * which has the same effect as when a manual "Abort" is requested.
    • - *
    * - private long executionDuration = UNLIMITED_DURATION; - - /*

    This represents the instant when the job shall be destroyed.

    - * Notes: Destroying a job implies: - *
      - *
    • if the job is still executing, the execution is aborted
    • - *
    • any results from the job are destroyed and storage reclaimed
    • - *
    • the service forgets that the job existed.
    • - *
    - *

    The Destruction time should be viewed as a measure of the amount of time - * that a service is prepared to allocated storage for a job - typically this will be a longer duration - * that the amount of CPU time that a service would allocate.

    * - private Date destructionTime = null;*/ - /**

    This error summary gives a human-readable error message for the underlying job.

    * Note: This object is intended to be a detailed error message, and consequently, * might be a large piece of text such as a stack trace. */ protected ErrorSummary errorSummary = null; - /* This is an enumeration of the other Job parameters (given in POST queries). * - protected Map additionalParameters;*/ - /** This is a list of all results of this job. */ protected Map results; @@ -300,13 +269,11 @@ public class UWSJob extends SerializableUWSObject { *

    Note: if the parameter {@link UWSJob#PARAM_PHASE} (phase) is given with the value {@link UWSJob#PHASE_RUN} * the job execution starts immediately after the job has been added to a job list or after {@link #applyPhaseParam(JobOwner)} is called.

    * - * @param params UWS standard and non-standard parameters. - * - * @throws UWSException If a parameter is incorrect. + * @param params UWS standard and non-standard parameters. * - * @see UWSJob#AbstractJob(String, Map) + * @see UWSJob#UWSJob(JobOwner, UWSParameters) */ - public UWSJob(final UWSParameters params) throws UWSException{ + public UWSJob(final UWSParameters params){ this(null, params); } @@ -316,32 +283,31 @@ public class UWSJob extends SerializableUWSObject { *

    Note: if the parameter {@link #PARAM_PHASE} (phase) is given with the value {@link #PHASE_RUN} * the job execution starts immediately after the job has been added to a job list or after {@link #applyPhaseParam(JobOwner)} is called.

    * - * @param owner Job.owner ({@link #PARAM_OWNER}). - * @param params UWS standard and non-standard parameters. - * - * @throws UWSException If a parameter is incorrect. + * @param owner Job.owner ({@link #PARAM_OWNER}). + * @param params UWS standard and non-standard parameters. * - * @see #loadDefaultParams(Map) - * @see #loadAdditionalParams() + * @see UWSParameters#init() */ - public UWSJob(JobOwner owner, final UWSParameters params) throws UWSException{ + public UWSJob(JobOwner owner, final UWSParameters params){ this.owner = owner; phase = new JobPhase(this); - //additionalParameters = new HashMap(); results = new HashMap(); - /*Map others = loadDefaultParams(lstParam); - if (others != null){ - additionalParameters.putAll(others); - loadAdditionalParams(); - }*/ inputParams = params; inputParams.init(); jobId = generateJobId(); restorationDate = null; + + // Move all uploaded files in a location related with this job: + Iterator files = inputParams.getFiles(); + while(files.hasNext()){ + try{ + files.next().move(this); + }catch(IOException ioe){} + } } /** @@ -363,18 +329,16 @@ public class UWSJob extends SerializableUWSObject { * @param results Its results (if phase=COMPLETED). * @param error Its error (if phase=ERROR). * - * @throws UWSException If the given ID is null or if another error occurs while building this job. + * @throws NullPointerException If the given ID is NULL. */ - public UWSJob(final String jobID, final JobOwner owner, final UWSParameters params, final long quote, final long startTime, final long endTime, final List results, final ErrorSummary error) throws UWSException{ + public UWSJob(final String jobID, final JobOwner owner, final UWSParameters params, final long quote, final long startTime, final long endTime, final List results, final ErrorSummary error) throws NullPointerException{ if (jobID == null) - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Missing job ID => impossible to build a Job without a valid ID !"); + throw new NullPointerException("Missing job ID => impossible to build a Job without a valid ID!"); this.jobId = jobID; this.owner = owner; this.quote = quote; - /*this.destructionTime = destruction; - this.executionDuration = (maxDuration<0)?UNLIMITED_DURATION:maxDuration;*/ if (startTime > 0) this.startTime = new Date(startTime); @@ -396,13 +360,6 @@ public class UWSJob extends SerializableUWSObject { inputParams = params; params.init(); - /*this.additionalParameters = new HashMap(); - Map others = loadDefaultParams(lstParams); - if (others != null){ - additionalParameters.putAll(others); - loadAdditionalParams(); - }*/ - ExecutionPhase p = ExecutionPhase.PENDING; if (startTime > 0 && endTime > 0){ if (this.results.isEmpty() && this.errorSummary == null) @@ -412,8 +369,13 @@ public class UWSJob extends SerializableUWSObject { else if (this.errorSummary != null) p = ExecutionPhase.ERROR; } - if (phase != null) - setPhase(p, true); + if (phase != null){ + try{ + setPhase(p, true); + }catch(UWSException ue){ + // Can never append because the "force" parameter is true! + } + } restorationDate = new Date(); } @@ -429,143 +391,18 @@ public class UWSJob extends SerializableUWSObject { * * @return A unique job identifier. */ - protected String generateJobId() throws UWSException{ - String generatedId = System.currentTimeMillis() + "A"; - if (lastId != null){ - while(lastId.equals(generatedId)) - generatedId = generatedId.substring(0, generatedId.length() - 1) + (char)(generatedId.charAt(generatedId.length() - 1) + 1); + protected String generateJobId(){ + synchronized(lastId){ + String generatedId = System.currentTimeMillis() + "A"; + if (lastId != null){ + while(lastId.equals(generatedId)) + generatedId = generatedId.substring(0, generatedId.length() - 1) + (char)(generatedId.charAt(generatedId.length() - 1) + 1); + } + lastId = generatedId; + return generatedId; } - lastId = generatedId; - return generatedId; } - /* - *

    Loads the given parameters: all known parameters (with write access) are updated - * whereas others are returned in a new map in which all keys are in lower case.

    - * - *

    Important: The phase parameter is NEVER managed here and is ALWAYS added immediately in the additional parameters attribute !

    - * - *

    Note: UWS parameters with write access are: - *

      - *
    • {@link UWSJob#PARAM_RUN_ID RUN_ID}
    • - *
    • {@link UWSJob#PARAM_EXECUTION_DURATION EXECUTION_DURATION}
    • - *
    • {@link UWSJob#PARAM_DESTRUCTION_TIME DESTRUCTION_TIME}
    • - *
    • {@link UWSJob#PARAM_PHASE PHASE} if equals to {@link UWSJob#PHASE_RUN} or {@link UWSJob#PHASE_ABORT}
    • - *

    - * - *

    Note: To check more DEFAULT parameters you just have to: - *

      - *
    1. override the function {@link UWSJob#loadDefaultParams(Map)}
    2. - *
    3. call super.loadParams(Map)
    4. - *
    5. add your own checking (do not forget to update the returned map and to return it).
    6. - *

    - * - * @param lstParam The list of parameters to load (UWS - included PHASE - and additional parameters). - * - * @return
      - *
    • a new map with all the parameters that have not been loaded (additional parameters and/or not known UWS parameter and/or the PHASE parameter)
    • - *
    • or an empty map
    • - *
    • or null if the job is executing or is ended (actually: all phase except PENDING)
    • - *
    - * - * @throws UWSException If a given UWS parameter is not correct. - * - @SuppressWarnings("unchecked") - protected Map loadDefaultParams(final Map lstParam) throws UWSException { - if (lstParam == null) - return new HashMap(); - - // Forbids the parameter modification if the job is already finished: - if (isFinished()) - throw UWSExceptionFactory.jobModificationForbidden(getJobId(), getPhase(), null); - - // Build a new map for all the ignored parameters (that's to say all non UWS parameters): - HashMap otherParams = new HashMap(); - - Set> paramSet = lstParam.entrySet(); - String paramName = null; - Object paramValue = null; - for(Map.Entry param : paramSet){ - paramName = param.getKey(); - paramValue = param.getValue(); - - if (paramName == null || paramValue == null) - continue; - - // PHASE: - if (paramName.equalsIgnoreCase(PARAM_PHASE)){ - if (!phase.isFinished()) - otherParams.put(PARAM_PHASE, paramValue); - - }// PARAMETERS: - else if (paramName.equalsIgnoreCase(PARAM_PARAMETERS)){ - if (paramValue instanceof Map){ - Map m = (Map)paramValue; - for(Map.Entry entry : (Set)m.entrySet()){ - if (entry.getKey() instanceof String) - otherParams.put((String)entry.getKey(), entry.getValue()); - } - } - - }// RUN ID: - else if (paramName.equalsIgnoreCase(PARAM_RUN_ID)){ - if (paramValue instanceof String) - setRunId((String)paramValue); - else - throw UWSExceptionFactory.badFormat(getJobId(), "RUN ID", paramValue.toString(), paramValue.getClass().getName(), "A String instance"); - - }// EXECUTION DURATION: - else if (paramName.equalsIgnoreCase(PARAM_EXECUTION_DURATION)){ - if (isRunning()) - throw UWSExceptionFactory.jobModificationForbidden(getJobId(), getPhase(), "EXECUTION DURATION"); - - if (!(paramValue instanceof String) && !(paramValue instanceof Long)) - throw UWSExceptionFactory.badFormat(getJobId(), "EXECUTION DURATION", paramValue.toString(), paramValue.getClass().getName(), "A Long or a String instance."); - - try{ - setExecutionDuration((paramValue instanceof String) ? Long.parseLong((String)paramValue) : (Long)paramValue); - }catch(NumberFormatException ex){ - setExecutionDuration(0); - throw UWSExceptionFactory.badFormat(getJobId(), "EXECUTION DURATION", paramValue.toString(), paramValue.getClass().getName(), "A long integer value"); - } - - }// DESTRUCTION TIME: - else if (paramName.equalsIgnoreCase(PARAM_DESTRUCTION_TIME)){ - if (isRunning()){ - try{ - throw UWSExceptionFactory.jobModificationForbidden(getJobId(), getPhase(), "DESTRUCTION TIME"); - }catch(UWSException ue){ - ue.printStackTrace(); - System.out.println(" => PARAM NAME = \""+paramName+"\" ; PARAM VALUE = "+paramValue); - throw ue; - } - } - - if (!(paramValue instanceof String) && !(paramValue instanceof Date)) - throw UWSExceptionFactory.badFormat(getJobId(), paramName, paramValue.toString(), paramValue.getClass().getName(), "A Date or a String instance."); - - try { - if (paramValue instanceof String){ - String time = (String)paramValue; - if (time != null && !time.trim().isEmpty()) - setDestructionTime(dateFormat.parse(time)); - }else - setDestructionTime((Date)paramValue); - } catch (ParseException e) { - throw UWSExceptionFactory.badFormat(getJobId(), paramName, paramValue.toString(), null, ((dateFormat instanceof SimpleDateFormat)?(((SimpleDateFormat)dateFormat).toPattern()):"A valid date (format: ???).")); - } - - }// READ-ONLY PARAMETERS: - else if (paramName.equalsIgnoreCase(PARAM_JOB_ID) && paramName.equalsIgnoreCase(PARAM_QUOTE) && paramName.equalsIgnoreCase(PARAM_START_TIME) && paramName.equalsIgnoreCase(PARAM_END_TIME) && paramName.equalsIgnoreCase(PARAM_RESULTS) && paramName.equalsIgnoreCase(PARAM_ERROR_SUMMARY)){ - continue; - - }// ADDITIONAL PARAMETERS - else - otherParams.put(paramName, paramValue); - } - return otherParams; - }*/ - /** *

    Gets the value of the specified parameter.

    * @@ -598,36 +435,17 @@ public class UWSJob extends SerializableUWSObject { return inputParams.get(name); } - /* - *

    Method called when updating one or several parameters using the functions {@link #addOrUpdateParameter(String, String)} and - * {@link #addOrUpdateParameters(Map)} or at the job creation.

    - * - *

    It is useful if you need to check or to process all or a part of the additional parameters stored in {@link #additionalParameters}.

    - * - *

    By default this function does nothing and always return true.

    - * - * @return true if all required additional parameters have been successfully updated, false otherwise. - * - * @throws UWSException If an error occurred during the updating of one parameter. - * - * @see #addOrUpdateParameter(String, String) - * @see #addOrUpdateParameters(Map) - * - protected boolean loadAdditionalParams() throws UWSException { - return true; - }*/ - /** *

    Looks for an additional parameters which corresponds to the Execution Phase. If it exists and:

    *
      - *
    • is equals to {@link UWSJob#PHASE_RUN RUN} => remove it from the attribute {@link #additionalParameters} and start the job.
    • - *
    • is equals to {@link UWSJob#PHASE_ABORT ABORT} => remove it from the attribute {@link #additionalParameters} and abort the job.
    • - *
    • is another value => the attribute stays in the attribute {@link #additionalParameters} and nothing is done.
    • + *
    • is equals to {@link UWSJob#PHASE_RUN RUN} => remove it from the attribute {@link #inputParams} and start the job.
    • + *
    • is equals to {@link UWSJob#PHASE_ABORT ABORT} => remove it from the attribute {@link #inputParams} and abort the job.
    • + *
    • is another value => the attribute is though removed from the attribute {@link #inputParams} but nothing is done.
    • *
    * * @param user The user who asks to apply the phase parameter (start/abort). (may be NULL) * - * @throws UWSException If it is impossible to change the Execution Phase + * @throws UWSException If it is impossible the state of this job (into EXECUTING or ABORTED) * or if the given user is not allowed to execute this job. * * @see UWSParameters#hasInputPhase() @@ -642,12 +460,12 @@ public class UWSJob extends SerializableUWSObject { if (inputPhase.equalsIgnoreCase(PHASE_RUN)){ // Forbids the execution if the user has not the required permission: if (user != null && !user.equals(owner) && !user.hasExecutePermission(this)) - throw UWSExceptionFactory.executePermissionDenied(user, getJobId()); + throw new UWSException(UWSException.PERMISSION_DENIED, UWSExceptionFactory.executePermissionDenied(user, jobId)); start(); }else if (inputPhase.equalsIgnoreCase(PHASE_ABORT)){ // Forbids the execution if the user has not the required permission: if (user != null && !user.equals(owner) && !user.hasExecutePermission(this)) - throw UWSExceptionFactory.executePermissionDenied(user, getJobId()); + throw new UWSException(UWSException.PERMISSION_DENIED, UWSExceptionFactory.executePermissionDenied(user, jobId)); abort(); } } @@ -765,6 +583,9 @@ public class UWSJob extends SerializableUWSObject { ExecutionPhase oldPhase = phase.getPhase(); phase.setPhase(p, force); + if (!force) + getLogger().logJob(LogLevel.INFO, this, "CHANGE_PHASE", "The job \"" + getJobId() + "\" goes from " + oldPhase + " to " + p, null); + // Notify the execution manager: if (phase.isFinished() && getJobList() != null) getJobList().getExecutionManager().remove(this); @@ -840,7 +661,7 @@ public class UWSJob extends SerializableUWSObject { getJobList().getUWS().getBackupManager().saveOwner(owner); // Log the end of this job: - getLogger().jobFinished(this); + getLogger().logJob(LogLevel.INFO, this, "END", "Job \"" + jobId + "\" ended with the status " + phase, null); } /** @@ -890,19 +711,19 @@ public class UWSJob extends SerializableUWSObject { * If known the jobs list is notify of this destruction time update. *

    * - * @param destructionTime The destruction time of this job. + * @param destructionTime The destruction time of this job. MUST NOT be NULL * * @see JobList#updateDestruction(UWSJob) * @see UWSParameters#set(String, Object) */ public final void setDestructionTime(Date destructionTime){ - if (phase.isJobUpdatable()){ + if (destructionTime != null && phase.isJobUpdatable()){ try{ inputParams.set(PARAM_DESTRUCTION_TIME, destructionTime); if (myJobList != null) myJobList.updateDestruction(this); }catch(UWSException ue){ - ; + getLogger().logJob(LogLevel.WARNING, this, "SET_DESTRUCTION", "Can not set the destruction time of the job \"" + getJobId() + "\" to \"" + destructionTime + "\"!", ue); } } } @@ -922,17 +743,21 @@ public class UWSJob extends SerializableUWSObject { *

    IMPORTANT: This function will have no effect if the job is finished, that is to say if the current phase is * {@link ExecutionPhase#ABORTED ABORTED}, {@link ExecutionPhase#ERROR ERROR} or {@link ExecutionPhase#COMPLETED COMPLETED}..

    * - * @param errorSummary A summary of the error. + * @param errorSummary A summary of the error. MUST NOT be NULL * * @throws UWSException If the job execution is finished that is to say if the phase is ABORTED, ERROR or COMPLETED. * * @see #isFinished() */ public final void setErrorSummary(ErrorSummary errorSummary) throws UWSException{ - if (!isFinished()) + if (errorSummary == null) + return; + else if (!isFinished()) this.errorSummary = errorSummary; - else - throw UWSExceptionFactory.jobModificationForbidden(getJobId(), getPhase(), "ERROR SUMMARY"); + else{ + getLogger().logJob(LogLevel.ERROR, this, "SET_ERROR", "Can not set an error summary when the job is finished (or not yet started)! The current phase is: " + getPhase() + " ; the summary of the error to set is: \"" + errorSummary.message + "\".", null); + throw new UWSException(UWSException.NOT_ALLOWED, UWSExceptionFactory.jobModificationForbidden(jobId, getPhase(), "ERROR SUMMARY")); + } } /** @@ -1049,11 +874,51 @@ public class UWSJob extends SerializableUWSObject { * @throws UWSException If a parameter value is incorrect. * * @see JobPhase#isJobUpdatable() - * @see UWSJob#addOrUpdateParameters(Map) */ public final boolean addOrUpdateParameter(String paramName, Object paramValue) throws UWSException{ - if (!phase.isFinished()){ + return addOrUpdateParameter(paramName, paramValue, null); + } + + /** + * Adds or updates the specified parameter with the given value ONLY IF the job can be updated (considering its current execution phase, see {@link JobPhase#isJobUpdatable()}). + * + * @param paramName The name of the parameter to add or to update. + * @param paramValue The (new) value of the specified parameter. + * @param user The user who asks for this update. + * + * @return
    • true if the parameter has been successfully added/updated,
    • + *
    • false otherwise (particularly if paramName=null or paramName="" or paramValue=null).
    + * + * @throws UWSException If a parameter value is incorrect. + * + * @since 4.1 + * + * @see JobPhase#isJobUpdatable() + */ + public final boolean addOrUpdateParameter(String paramName, Object paramValue, final JobOwner user) throws UWSException{ + if (paramValue != null && !phase.isFinished()){ + + // Set the parameter: inputParams.set(paramName, paramValue); + + // If it is a file or an array containing files, they must be moved in a location related to this job: + try{ + if (paramValue instanceof UploadFile) + ((UploadFile)paramValue).move(this); + else if (paramValue.getClass().isArray()){ + for(Object o : (Object[])paramValue){ + if (o != null && o instanceof UploadFile) + ((UploadFile)o).move(this); + } + } + }catch(IOException ioe){ + getLogger().logJob(LogLevel.WARNING, this, "MOVE_UPLOAD", "Can not move an uploaded file in the job \"" + jobId + "\"!", ioe); + return false; + } + + // Apply the retrieved phase: + applyPhaseParam(user); + return true; }else return false; @@ -1062,11 +927,11 @@ public class UWSJob extends SerializableUWSObject { /** *

    Adds or updates the given parameters ONLY IF the job can be updated (considering its current execution phase, see {@link JobPhase#isJobUpdatable()}).

    * - *

    Whatever is the result of {@link #loadDefaultParams(Map)} the method {@link #applyPhaseParam()} is called so that if there is an additional parameter {@link #PARAM_PHASE} with the value: + *

    At the end of this function, the method {@link #applyPhaseParam(JobOwner)} is called so that if there is an additional parameter {@link #PARAM_PHASE} with the value: *

      *
    • {@link UWSJob#PHASE_RUN RUN} then the job is starting and the phase goes to {@link ExecutionPhase#EXECUTING EXECUTING}.
    • *
    • {@link UWSJob#PHASE_ABORT ABORT} then the job is aborting.
    • - *
    • otherwise the parameter {@link UWSJob#PARAM_PHASE PARAM_PHASE} remains in the {@link UWSJob#additionalParameters additionalParameters} list.
    • + *
    • otherwise the parameter {@link UWSJob#PARAM_PHASE PARAM_PHASE} is removed from {@link UWSJob#inputParams inputParams} and nothing is done.
    • *

    * * @param params A list of parameters to add/update. @@ -1075,7 +940,7 @@ public class UWSJob extends SerializableUWSObject { * * @throws UWSException If a parameter value is incorrect. * - * @see #addOrUpdateParameters(Map) + * @see #addOrUpdateParameters(UWSParameters, JobOwner) */ public boolean addOrUpdateParameters(UWSParameters params) throws UWSException{ return addOrUpdateParameters(params, null); @@ -1084,38 +949,55 @@ public class UWSJob extends SerializableUWSObject { /** *

    Adds or updates the given parameters ONLY IF the job can be updated (considering its current execution phase, see {@link JobPhase#isJobUpdatable()}).

    * - *

    Whatever is the result of {@link #loadDefaultParams(Map)} the method {@link #applyPhaseParam()} is called so that if there is an additional parameter {@link #PARAM_PHASE} with the value: + *

    At the end of this function, the method {@link #applyPhaseParam(JobOwner)} is called so that if there is an additional parameter {@link #PARAM_PHASE} with the value: *

      *
    • {@link UWSJob#PHASE_RUN RUN} then the job is starting and the phase goes to {@link ExecutionPhase#EXECUTING EXECUTING}.
    • *
    • {@link UWSJob#PHASE_ABORT ABORT} then the job is aborting.
    • - *
    • otherwise the parameter {@link UWSJob#PARAM_PHASE PARAM_PHASE} remains in the {@link UWSJob#additionalParameters additionalParameters} list.
    • + *
    • otherwise the parameter {@link UWSJob#PARAM_PHASE PARAM_PHASE} is removed from {@link UWSJob#inputParams inputParams} and nothing is done.
    • *

    * * @param params The UWS parameters to update. + * @param user The user who asks for this update. + * * @return
    • true if all the given parameters have been successfully added/updated,
    • *
    • false if some parameters have not been managed.
    * * @throws UWSException If a parameter value is incorrect or if the given user can not update or execute this job. * - * @see #loadDefaultParams(Map) * @see JobPhase#isJobUpdatable() - * @see #loadAdditionalParams() - * @see #applyPhaseParam() + * @see #applyPhaseParam(JobOwner) */ public boolean addOrUpdateParameters(UWSParameters params, final JobOwner user) throws UWSException{ + // The job can be modified ONLY IF in PENDING phase: + if (!phase.isJobUpdatable()) + throw new UWSException(UWSException.FORBIDDEN, "Forbidden parameters modification: the job is not any more in the PENDING phase!"); + // Forbids the update if the user has not the required permission: if (user != null && !user.equals(owner) && !user.hasWritePermission(this)) - throw UWSExceptionFactory.writePermissionDenied(user, false, getJobId()); + throw new UWSException(UWSException.PERMISSION_DENIED, UWSExceptionFactory.writePermissionDenied(user, false, getJobId())); // Load all parameters: String[] updated = inputParams.update(params); // If the destruction time has been updated, the modification must be propagated to the jobs list: + Object newValue; for(String updatedParam : updated){ + // CASE DESTRUCTION_TIME: update the thread dedicated to the destruction: if (updatedParam.equals(PARAM_DESTRUCTION_TIME)){ if (myJobList != null) myJobList.updateDestruction(this); - break; + } + // DEFAULT: test whether the parameter is a file, and if yes, move it in a location related to this job: + else{ + newValue = inputParams.get(updatedParam); + if (newValue != null && newValue instanceof UploadFile){ + try{ + ((UploadFile)newValue).move(this); + }catch(IOException ioe){ + getLogger().logJob(LogLevel.WARNING, this, "MOVE_UPLOAD", "Can not move an uploaded file in the job \"" + jobId + "\"!", ioe); + inputParams.remove(updatedParam); + } + } } } @@ -1139,7 +1021,16 @@ public class UWSJob extends SerializableUWSObject { if (phase.isFinished() || paramName == null) return false; else{ - inputParams.remove(paramName); + // Remove the parameter from the map: + Object removed = inputParams.remove(paramName); + // If the parameter value was an uploaded file, delete it physically: + if (removed != null && removed instanceof UploadFile){ + try{ + ((UploadFile)removed).deleteFile(); + }catch(IOException ioe){ + getLogger().logJob(LogLevel.WARNING, this, "MOVE_UPLOAD", "Can not delete the uploaded file \"" + paramName + "\" of the job \"" + jobId + "\"!", ioe); + } + } return true; } } @@ -1189,9 +1080,11 @@ public class UWSJob extends SerializableUWSObject { public boolean addResult(Result res) throws UWSException{ if (res == null) return false; - else if (isFinished()) - throw UWSExceptionFactory.jobModificationForbidden(getJobId(), getPhase(), "RESULT"); - else{ + else if (isFinished()){ + UWSException ue = new UWSException(UWSException.NOT_ALLOWED, UWSExceptionFactory.jobModificationForbidden(getJobId(), getPhase(), "RESULT")); + getLogger().logJob(LogLevel.ERROR, this, "ADD_RESULT", "Can not add the result \"" + res.getId() + "\" to the job \"" + getJobId() + "\": this job is already finished (or not yet started). Current phase: " + getPhase(), ue); + throw ue; + }else{ synchronized(results){ if (results.containsKey(res.getId())) return false; @@ -1228,7 +1121,7 @@ public class UWSJob extends SerializableUWSObject { *

    note 2: this job is removed from its previous job list, if there is one.

    *

    note 3: this job is NOT automatically added into the new jobs list. Indeed, this function should be called by {@link JobList#addNewJob(UWSJob)}.

    * - * @param jobList Its new jobs list. note: if NULL, nothing is done ! + * @param jobList Its new jobs list. note: if NULL, nothing is done ! * * @throws IllegalStateException If this job is not PENDING. * @@ -1302,7 +1195,8 @@ public class UWSJob extends SerializableUWSObject { * * @param useManager true to let the execution manager deciding whether the job starts immediately or whether it must be put in a queue until enough resources are available, false to start the execution immediately. * - * @throws UWSException If there is an error while changing the execution phase or when starting the corresponding thread. + * @throws NullPointerException If this job is not associated with a job list or the associated job list is not part of a UWS service or if no thread is created. + * @throws UWSException If there is an error while changing the execution phase or when starting the corresponding thread. * * @see #isRunning() * @see UWSFactory#createJobThread(UWSJob) @@ -1314,7 +1208,7 @@ public class UWSJob extends SerializableUWSObject { public void start(boolean useManager) throws UWSException{ // This job must know its jobs list and this jobs list must know its UWS: if (myJobList == null || myJobList.getUWS() == null) - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "A UWSJob can not start if it is not part of a job list or if its job list is not part of a UWS."); + throw new IllegalStateException("A UWSJob can not start if it is not linked to a job list or if its job list is not linked to a UWS."); // If already running do nothing: else if (isRunning()) @@ -1326,24 +1220,32 @@ public class UWSJob extends SerializableUWSObject { }// Otherwise start directly the execution: else{ - // Try to change the phase: - setPhase(ExecutionPhase.EXECUTING); - - // Create and run its corresponding thread: + // Create its corresponding thread: thread = getFactory().createJobThread(this); if (thread == null) - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Missing job work ! The thread created by the factory is NULL => The job can't be executed !"); - thread.start(); - (new JobTimeOut()).start(); + throw new NullPointerException("Missing job work! The thread created by the factory is NULL => The job can't be executed!"); + + // Change the job phase: + setPhase(ExecutionPhase.EXECUTING); // Set the start time: setStartTime(new Date()); + // Run the job: + thread.start(); + (new JobTimeOut()).start(); + // Log the start of this job: - getLogger().jobStarted(this); + getLogger().logJob(LogLevel.INFO, this, "START", "Job \"" + jobId + "\" started.", null); } } + /** + * Stop/Cancel this job when its maximum execution duration has been reached. + * + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (09/2014) + */ protected final class JobTimeOut extends Thread { public JobTimeOut(){ super(JobThread.tg, "TimeOut_" + jobId); @@ -1358,9 +1260,9 @@ public class UWSJob extends SerializableUWSObject { if (!isFinished()) UWSJob.this.abort(); }catch(InterruptedException ie){ - getLogger().error("Unexpected InterruptedException while waiting the end of the execution of the job \"" + jobId + "\" (thread ID: " + thread.getId() + ") !", ie); + /* Not needed to report any interruption while waiting. */ }catch(UWSException ue){ - getLogger().error("Unexpected UWSException while waiting the end of the execution of the job \"" + jobId + "\" (thread ID: " + thread.getId() + ") !", ue); + getLogger().logJob(LogLevel.WARNING, UWSJob.this, "EXECUTING", "Unexpected error while waiting the end of the execution of the job \"" + jobId + "\" (thread ID: " + thread.getId() + ")!", ue); } } } @@ -1398,7 +1300,7 @@ public class UWSJob extends SerializableUWSObject { *

    Stops immediately the job, sets its phase to {@link ExecutionPhase#ABORTED ABORTED} and sets its end time.

    * *

    IMPORTANT: If the thread does not stop immediately the phase and the end time are not modified. However it can be done by calling one more time {@link #abort()}. - * Besides you should check that you test regularly the interrupted flag of the thread in {@link #jobWork()} !

    + * Besides you should check that you test regularly the interrupted flag of the thread in {@link JobThread#jobWork()} !

    * * @throws UWSException If there is an error while changing the execution phase. * @@ -1419,8 +1321,9 @@ public class UWSJob extends SerializableUWSObject { // Set the end time: setEndTime(new Date()); }else if (thread == null || (thread != null && !thread.isAlive())) - throw UWSExceptionFactory.incorrectPhaseTransition(getJobId(), phase.getPhase(), ExecutionPhase.ABORTED); - } + throw new UWSException(UWSException.BAD_REQUEST, UWSExceptionFactory.incorrectPhaseTransition(getJobId(), phase.getPhase(), ExecutionPhase.ABORTED)); + }else + getLogger().logJob(LogLevel.WARNING, this, "ABORT", "Abortion of the job \"" + getJobId() + "\" asked but not yet effective (after having waited " + waitForStop + "ms)!", null); } /** @@ -1428,7 +1331,7 @@ public class UWSJob extends SerializableUWSObject { * *

    IMPORTANT: If the thread does not stop immediately the phase, the error summary and the end time are not modified. * However it can be done by calling one more time {@link #error(ErrorSummary)}. - * Besides you should check that you test regularly the interrupted flag of the thread in {@link #jobWork()} !

    + * Besides you should check that you test regularly the interrupted flag of the thread in {@link JobThread#jobWork()} !

    * * @param error The error that has interrupted this job. * @@ -1456,8 +1359,9 @@ public class UWSJob extends SerializableUWSObject { // Set the end time: setEndTime(new Date()); }else if (thread != null && !thread.isAlive()) - throw UWSExceptionFactory.incorrectPhaseTransition(jobId, phase.getPhase(), ExecutionPhase.ERROR); - } + throw new UWSException(UWSException.BAD_REQUEST, UWSExceptionFactory.incorrectPhaseTransition(jobId, phase.getPhase(), ExecutionPhase.ERROR)); + }else + getLogger().logJob(LogLevel.WARNING, this, "ERROR", "Stopping of the job \"" + getJobId() + "\" with error asked but not yet effective (after having waited " + waitForStop + "ms)!", null); } /** Used by the thread to known whether the {@link #stop()} method has already been called, and so, that the job is stopping. */ @@ -1479,7 +1383,7 @@ public class UWSJob extends SerializableUWSObject { try{ thread.join(waitForStop); }catch(InterruptedException ie){ - getLogger().error("Unexpected InterruptedException while waiting the end of the execution of the job \"" + jobId + "\" (thread ID: " + thread.getId() + ") !", ie); + getLogger().logJob(LogLevel.WARNING, this, "END", "Unexpected InterruptedException while waiting for the end of the execution of the job \"" + jobId + "\" (thread ID: " + thread.getId() + ")!", ie); } } } @@ -1499,7 +1403,7 @@ public class UWSJob extends SerializableUWSObject { *

    Stops the job if running, removes the job from the execution manager, stops the timer for the execution duration * and may clear all files or any other resources associated to this job.

    * - *

    By default the job is aborted, only the {@link UWSJob#thread} attribute is set to null and the timers are stopped; no other operations (i.e. clear result files and error files) is done.

    + *

    By default the job is aborted, the {@link UWSJob#thread} attribute is set to null, the timers are stopped and uploaded files, results and the error summary are deleted.

    */ public void clearResources(){ // If still running, abort/stop the job: @@ -1507,27 +1411,35 @@ public class UWSJob extends SerializableUWSObject { try{ abort(); }catch(UWSException e){ - getLogger().error("Impossible to abort the job" + jobId + " => trying to stop it...", e); + getLogger().logJob(LogLevel.WARNING, this, "CLEAR_RESOURCES", "Impossible to abort the job \"" + jobId + "\" => trying to stop it...", e); stop(); } } // Remove this job from its execution manager: - try{ - if (getJobList() != null) - getJobList().getExecutionManager().remove(this); - }catch(UWSException ue){ - getLogger().error("Impossible to remove the job " + jobId + " from its execution manager !", ue); - } + if (getJobList() != null) + getJobList().getExecutionManager().remove(this); thread = null; + // Clear all uploaded files: + Iterator files = inputParams.getFiles(); + UploadFile upl; + while(files.hasNext()){ + upl = files.next(); + try{ + upl.deleteFile(); + }catch(IOException ioe){ + getLogger().logJob(LogLevel.ERROR, this, "CLEAR_RESOURCES", "Impossible to delete the file uploaded as parameter \"" + upl.paramName + "\" (" + upl.getLocation() + ") of the job \"" + jobId + "\"!", null); + } + } + // Clear all results file: for(Result r : results.values()){ try{ getFileManager().deleteResult(r, this); }catch(IOException ioe){ - getLogger().error("Impossible to delete the file associated with the result '" + r.getId() + "' of the job " + jobId + " !", ioe); + getLogger().logJob(LogLevel.ERROR, this, "CLEAR_RESOURCES", "Impossible to delete the file associated with the result '" + r.getId() + "' of the job \"" + jobId + "\"!", ioe); } } @@ -1536,9 +1448,11 @@ public class UWSJob extends SerializableUWSObject { try{ getFileManager().deleteError(errorSummary, this); }catch(IOException ioe){ - getLogger().error("Impossible to delete the file associated with the error '" + errorSummary.message + "' of the job " + jobId + " !", ioe); + getLogger().logJob(LogLevel.ERROR, this, "CLEAR_RESOURCES", "Impossible to delete the file associated with the error '" + errorSummary.message + "' of the job \"" + jobId + "\"!", ioe); } } + + getLogger().logJob(LogLevel.INFO, this, "CLEAR_RESOURCES", "Resources associated with the job \"" + getJobId() + "\" have been successfully freed.", null); } /* ******************* */ @@ -1629,7 +1543,7 @@ public class UWSJob extends SerializableUWSObject { } if (errors != null) - getLogger().error("Some observers of \"" + jobId + "\" can not have been updated:\n" + errors); + getLogger().logJob(LogLevel.WARNING, this, "NOTIFY", "Some observers of the job \"" + jobId + "\" can not have been updated:\n" + errors, null); } /* **************** */ @@ -1652,9 +1566,9 @@ public class UWSJob extends SerializableUWSObject { /* SERIALIZATION */ /* ************* */ @Override - public String serialize(UWSSerializer serializer, JobOwner user) throws UWSException{ + public String serialize(UWSSerializer serializer, JobOwner user) throws UWSException, Exception{ if (user != null && !user.equals(getOwner()) && !user.hasReadPermission(this)) - throw UWSExceptionFactory.readPermissionDenied(user, false, getJobId()); + throw new UWSException(UWSException.PERMISSION_DENIED, UWSExceptionFactory.readPermissionDenied(user, false, getJobId())); return serializer.getJob(this, true); } @@ -1667,11 +1581,11 @@ public class UWSJob extends SerializableUWSObject { * * @return The serialized job attribute (or the whole job if attributes is an empty array or is null). * - * @throws UWSException If there is an error during the serialization. + * @throws Exception If there is an unexpected error during the serialization. * * @see UWSSerializer#getJob(UWSJob, String[], boolean) */ - public String serialize(String[] attributes, UWSSerializer serializer) throws UWSException{ + public String serialize(String[] attributes, UWSSerializer serializer) throws Exception{ return serializer.getJob(this, attributes, true); } @@ -1682,30 +1596,27 @@ public class UWSJob extends SerializableUWSObject { * @param attributes The name of the attribute to serialize (if null, the whole job will be serialized). * @param serializer The serializer to use. * - * @throws UWSException If there is an error during the serialization. + * @throws Exception If there is an unexpected error during the serialization. * * @see #serialize(String[], UWSSerializer) */ - public void serialize(ServletOutputStream output, String[] attributes, UWSSerializer serializer) throws UWSException{ + public void serialize(ServletOutputStream output, String[] attributes, UWSSerializer serializer) throws UWSException, IOException, Exception{ String errorMsgPart = null; if (attributes == null || attributes.length <= 0) - errorMsgPart = "the job " + toString(); + errorMsgPart = "the job \"" + getJobId() + "\""; else - errorMsgPart = "the given attribute \"" + errorMsgPart + "\" of {" + toString() + "}"; + errorMsgPart = "the given attribute \"" + attributes[0] + "\" of the job \"" + getJobId() + "\""; if (output == null) - throw UWSExceptionFactory.missingOutputStream("impossible to serialize " + errorMsgPart + "."); - - try{ - String serialization = serialize(attributes, serializer); - if (serialization == null) - throw UWSExceptionFactory.incorrectSerialization("NULL", errorMsgPart); - else{ - output.print(serialization); - output.flush(); - } - }catch(IOException ex){ - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, ex, "IOException => impossible to serialize " + errorMsgPart + "."); + throw new NullPointerException("Missing serialization output stream when serializing " + errorMsgPart + "!"); + + String serialization = serialize(attributes, serializer); + if (serialization == null){ + getLogger().logJob(LogLevel.ERROR, this, "SERIALIZE", "Error while serializing " + errorMsgPart + ": NULL was returned.", null); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Incorrect serialization value (=NULL) ! => impossible to serialize " + errorMsgPart + "."); + }else{ + output.print(serialization); + output.flush(); } } diff --git a/src/uws/job/manager/AbstractQueuedExecutionManager.java b/src/uws/job/manager/AbstractQueuedExecutionManager.java index a2e1d4082504debb196779d716f43293605f2b88..41f84a8a8819db5d51a472f6c6ecaf50b522e6e3 100644 --- a/src/uws/job/manager/AbstractQueuedExecutionManager.java +++ b/src/uws/job/manager/AbstractQueuedExecutionManager.java @@ -16,7 +16,8 @@ package uws.job.manager; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.util.Iterator; @@ -26,28 +27,34 @@ import java.util.Vector; import uws.UWSException; import uws.UWSToolBox; - -import uws.job.ErrorType; import uws.job.ExecutionPhase; import uws.job.UWSJob; import uws.service.log.UWSLog; +import uws.service.log.UWSLog.LogLevel; /** *

    Abstract implementation of the interface {@link ExecutionManager} which lets managing an execution queue.

    + * *

    * When calling {@link #execute(UWSJob)}, ALL jobs are put into the list of queued jobs (so their phase is changed * to {@link ExecutionPhase#QUEUED}). A call to {@link #refresh()}, reads this list and tries to execute the first job of the list. * The function {@link #isReadyForExecution(UWSJob)} decides whether the first job of the queue can be executed NOW or not. *

    - *

    - * NOTE: The order of queued jobs is preserved: it is implemented by a FIFO queue. - *

    * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + *

    Note: + * The order of queued jobs is preserved: it is implemented by a FIFO queue. + *

    + * + *

    Note: + * After a call to {@link #stopAll()}, this manager is still able to execute new jobs. + * Except if it was not possible to stop them properly, stopped jobs could be executed again by calling + * afterwards {@link #execute(UWSJob)} with these jobs in parameter. + *

    + * + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (12/2014) */ public abstract class AbstractQueuedExecutionManager implements ExecutionManager { - private static final long serialVersionUID = 1L; /** List of running jobs. */ protected Map runningJobs; @@ -72,18 +79,22 @@ public abstract class AbstractQueuedExecutionManager implements ExecutionManager /* ***************** */ /* GETTERS & SETTERS */ /* ***************** */ + @Override public final Iterator getRunningJobs(){ return runningJobs.values().iterator(); } + @Override public final int getNbRunningJobs(){ return runningJobs.size(); } + @Override public final Iterator getQueuedJobs(){ return queuedJobs.iterator(); } + @Override public final int getNbQueuedJobs(){ return queuedJobs.size(); } @@ -102,6 +113,7 @@ public abstract class AbstractQueuedExecutionManager implements ExecutionManager * of the result of this function, the given job will be put in the queue or it will be executed. * * @param jobToExecute + * * @return true if the given job can be executed NOW (=> it will be executed), false otherwise (=> it will be put in the queue). */ public abstract boolean isReadyForExecution(UWSJob jobToExecute); @@ -111,11 +123,15 @@ public abstract class AbstractQueuedExecutionManager implements ExecutionManager /* **************************** */ /** *

    Removes the first queued job(s) from the queue and executes it (them) - * ONLY IF it (they) can be executed (see {@link #isReadyForExecution(AbstractJob)}).

    + * ONLY IF it (they) can be executed (see {@link #isReadyForExecution(UWSJob)}).

    * - *

    Note: Nothing is done if there is no queue.

    + *

    Note: + * Nothing is done if there is no queue. + *

    * - * @throws UWSException If there is an error during the phase transition of one or more jobs. + *

    Note: + * If any error occurs while refreshing this manager, it SHOULD be logged using the service logger. + *

    * * @see #hasQueue() * @see #isReadyForExecution(UWSJob) @@ -123,25 +139,22 @@ public abstract class AbstractQueuedExecutionManager implements ExecutionManager * * @see uws.job.manager.ExecutionManager#refresh() */ - public synchronized final void refresh() throws UWSException{ + @Override + public synchronized final void refresh(){ // Return immediately if no queue: if (!hasQueue()) return; - String allMsg = null; // the concatenation of all errors which may occur - // Start the first job of the queue while it can be executed: + UWSJob jobToStart; while(!queuedJobs.isEmpty() && isReadyForExecution(queuedJobs.firstElement())){ + jobToStart = queuedJobs.remove(0); try{ - startJob(queuedJobs.remove(0)); + startJob(jobToStart); }catch(UWSException ue){ - allMsg = ((allMsg == null) ? "ERRORS THAT OCCURED WHILE REFRESHING THE EXECUTION MANAGER:" : allMsg) + "\n\t- " + ue.getMessage(); + logger.logJob(LogLevel.ERROR, jobToStart, "START", "Can not start the job \"" + jobToStart.getJobId() + "\"! This job is not any more part of its execution manager.", ue); } } - - // Throw one error for all jobs that can not have been executed: - if (allMsg != null) - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, allMsg, ErrorType.TRANSIENT); } /** @@ -165,28 +178,28 @@ public abstract class AbstractQueuedExecutionManager implements ExecutionManager /** *

    Refreshes this manager and then put the given job into the queue (if it is not already into it).

    * + *

    Note: + * If any error occurs while executing the given job, it SHOULD be logged using the service logger. + *

    + * * @param jobToExecute The job to execute. - * @return The resulting execution phase of the given job ({@link ExecutionPhase#EXECUTING EXECUTING} or {@link ExecutionPhase#QUEUED QUEUED} or null if the given job is null). * - * @throws UWSException If there is an error while changing the execution phase of the given job or if the job is already finished. + * @return The resulting execution phase of the given job ({@link ExecutionPhase#EXECUTING EXECUTING} or {@link ExecutionPhase#QUEUED QUEUED} or null if the given job is null). * * @see #refresh() - * @see AbstractJob#isRunning() + * @see UWSJob#isRunning() * @see #isReadyForExecution(UWSJob) * @see UWSJob#setPhase(ExecutionPhase) * - * @see uws.job.manager.ExecutionManager#execute(AbstractJob) + * @see uws.job.manager.ExecutionManager#execute(UWSJob) */ - public synchronized final ExecutionPhase execute(final UWSJob jobToExecute) throws UWSException{ + @Override + public synchronized final ExecutionPhase execute(final UWSJob jobToExecute){ if (jobToExecute == null) return null; // Refresh the list of running jobs before all: - try{ - refresh(); - }catch(UWSException ue){ - logger.error("Impossible to refresh the execution manager !", ue); - } + refresh(); // If the job is already running, ensure it is in the list of running jobs: if (jobToExecute.isRunning()) @@ -199,12 +212,16 @@ public abstract class AbstractQueuedExecutionManager implements ExecutionManager }// Otherwise, change the phase to QUEUED, put it into the queue and then refresh the queue: else{ - if (jobToExecute.getPhase() != ExecutionPhase.QUEUED) - jobToExecute.setPhase(ExecutionPhase.QUEUED); + try{ + if (jobToExecute.getPhase() != ExecutionPhase.QUEUED) + jobToExecute.setPhase(ExecutionPhase.QUEUED); - if (!queuedJobs.contains(jobToExecute)){ - queuedJobs.add(jobToExecute); - refresh(); + if (!queuedJobs.contains(jobToExecute)){ + queuedJobs.add(jobToExecute); + refresh(); + } + }catch(UWSException ue){ + logger.logJob(LogLevel.ERROR, jobToExecute, "QUEUE", "Can not set the job \"" + jobToExecute.getJobId() + "\" in the QUEUED phase!", ue); } } @@ -212,15 +229,52 @@ public abstract class AbstractQueuedExecutionManager implements ExecutionManager } /** - * Removes the given job from the lists of queued and running jobs and then refreshes the manager. + *

    Removes the given job from the lists of queued and running jobs and then refreshes the manager.

    + * + *

    Note: + * If any error occurs while removing a job from this manager, it SHOULD be logged using the service logger. + *

    * * @see uws.job.manager.ExecutionManager#remove(uws.job.UWSJob) */ - public final synchronized void remove(final UWSJob jobToRemove) throws UWSException{ + @Override + public final synchronized void remove(final UWSJob jobToRemove){ if (jobToRemove != null){ runningJobs.remove(jobToRemove.getJobId()); queuedJobs.remove(jobToRemove); refresh(); } } + + @Override + public final synchronized void stopAll(){ + // Set back all queued jobs to the PENDING phase: + for(UWSJob qj : queuedJobs){ + try{ + qj.setPhase(ExecutionPhase.PENDING, true); + }catch(UWSException ue){ + if (logger != null) + logger.logJob(LogLevel.WARNING, qj, "ABORT", "Can not set back the job to the PENDING phase.", ue); + } + } + + // Empty the queue: + queuedJobs.clear(); + + // Stop all running jobs and set them back to the PENDING phase: + for(UWSJob rj : runningJobs.values()){ + try{ + // Stop the job: + rj.abort(); + // Set its phase back to PENDING: + rj.setPhase(ExecutionPhase.PENDING, true); + }catch(UWSException ue){ + if (logger != null) + logger.logJob(LogLevel.WARNING, rj, "ABORT", "Can not stop the job nicely. The thread may continue to run until its end.", ue); + } + } + + // Empty the list of running jobs: + runningJobs.clear(); + } } diff --git a/src/uws/job/manager/DefaultDestructionManager.java b/src/uws/job/manager/DefaultDestructionManager.java index 24bff3e6e1ae765a08ef33f8f9a65b50e0056d34..8a0314c2129b75a7edafabd0d100c61f457b8544 100644 --- a/src/uws/job/manager/DefaultDestructionManager.java +++ b/src/uws/job/manager/DefaultDestructionManager.java @@ -16,11 +16,11 @@ package uws.job.manager; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.Serializable; - import java.util.Comparator; import java.util.Date; import java.util.Timer; @@ -34,19 +34,27 @@ import uws.job.UWSJob; * The default implementation of the {@link DestructionManager} interface. * Its goal is to manage the automatic destruction any given jobs. *

    + * *

    * Jobs can be added thanks to {@link #update(UWSJob)} and removed with {@link #remove(UWSJob)}. * All added jobs are stored in a {@link TreeSet} which sorts them by ascending destruction time. * The job which must be destroyed in first is used to start a timer. * This one will destroyed the job once its destruction time is reached. *

    + * *

    * The list of jobs to destroy is supposed to be updated each time the destruction time of a job is changed. This update works only if * the job knows its jobs list ({@link UWSJob#getJobList()} != null) and its jobs list has a destruction manager. *

    * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + *

    Note: + * The {@link #stop()} function lets stop this manager to watch for destructions of job until {@link #refresh()} or + * {@link #update(UWSJob)} or {@link #remove(UWSJob)} is called. When stopped, the inner timer is canceled and set + * to NULL ; no more thread resources is used. + *

    + * + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (12/2014) */ public class DefaultDestructionManager implements DestructionManager { private static final long serialVersionUID = 1L; @@ -83,7 +91,8 @@ public class DefaultDestructionManager implements DestructionManager { /** * Stops the timer if running and set to null {@link #timDestruction}, {@link #currentDate} and {@link #currentJob}. */ - protected synchronized final void stop(){ + @Override + public synchronized final void stop(){ if (timDestruction != null) timDestruction.cancel(); timDestruction = null; @@ -111,18 +120,22 @@ public class DefaultDestructionManager implements DestructionManager { /** *

    Returns true if {@link #currentDate} is different from null.

    */ + @Override public final boolean isRunning(){ return currentDate != null; } + @Override public final Date getNextDestruction(){ return currentDate; } + @Override public final String getNextJobToDestroy(){ return (currentJob == null) ? null : currentJob.getJobId(); } + @Override public final int getNbJobsToDestroy(){ return jobsToDestroy.size() + (isRunning() ? 1 : 0); } @@ -147,6 +160,7 @@ public class DefaultDestructionManager implements DestructionManager { * @see #stop() * @see #destroyJob(UWSJob) */ + @Override public synchronized void refresh(){ // Finish the current timer if... if (isRunning()){ @@ -196,6 +210,7 @@ public class DefaultDestructionManager implements DestructionManager { * @see #destroyJob(UWSJob) * @see #refresh() */ + @Override public synchronized void update(UWSJob job){ if (job != null && job.getJobList() != null && job.getDestructionTime() != null){ if (job.getDestructionTime().before(new Date())) @@ -217,6 +232,7 @@ public class DefaultDestructionManager implements DestructionManager { * @see #stop() * @see #refresh() */ + @Override public synchronized void remove(UWSJob job){ if (job == null) return; diff --git a/src/uws/job/manager/DefaultExecutionManager.java b/src/uws/job/manager/DefaultExecutionManager.java index e00bda0af82d9ef37c49eb428b8156ba306e9979..7049aaf6a76690c24a2babd1db7c2905c79b6ff2 100644 --- a/src/uws/job/manager/DefaultExecutionManager.java +++ b/src/uws/job/manager/DefaultExecutionManager.java @@ -16,7 +16,8 @@ package uws.job.manager; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.util.Iterator; @@ -24,38 +25,53 @@ import java.util.LinkedHashMap; import java.util.Map; import uws.UWSException; -import uws.UWSExceptionFactory; - +import uws.UWSToolBox; import uws.job.ExecutionPhase; import uws.job.UWSJob; +import uws.service.log.UWSLog; +import uws.service.log.UWSLog.LogLevel; /** *

    Default implementation of the ExecutionManager interface.

    * *

    This manager does not have a queue. That is to say that all jobs are always immediately starting. * Consequently this manager is just used to gather all running jobs.

    + * + *

    Note: + * After a call to {@link #stopAll()}, this manager is still able to execute new jobs. + * Except if it was not possible to stop them properly, stopped jobs could be executed again by calling + * afterwards {@link #execute(UWSJob)} with these jobs in parameter. + *

    * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (12/2014) */ public class DefaultExecutionManager implements ExecutionManager { - private static final long serialVersionUID = 1L; /** List of running jobs. */ protected Map runningJobs; + protected final UWSLog logger; + public DefaultExecutionManager(){ + this(null); + } + + public DefaultExecutionManager(final UWSLog logger){ runningJobs = new LinkedHashMap(10); + this.logger = (logger == null) ? UWSToolBox.getDefaultLogger() : logger; } /* ******* */ /* GETTERS */ /* ******* */ + @Override public final Iterator getRunningJobs(){ return runningJobs.values().iterator(); } + @Override public final int getNbRunningJobs(){ return runningJobs.size(); } @@ -65,6 +81,7 @@ public class DefaultExecutionManager implements ExecutionManager { * * @see uws.job.manager.ExecutionManager#getQueuedJobs() */ + @Override public final Iterator getQueuedJobs(){ return new Iterator(){ @Override @@ -89,6 +106,7 @@ public class DefaultExecutionManager implements ExecutionManager { * * @see uws.job.manager.ExecutionManager#getNbQueuedJobs() */ + @Override public final int getNbQueuedJobs(){ return 0; } @@ -98,11 +116,13 @@ public class DefaultExecutionManager implements ExecutionManager { * * @see uws.job.manager.ExecutionManager#refresh() */ - public final void refresh() throws UWSException{ + @Override + public final void refresh(){ ; } - public synchronized ExecutionPhase execute(final UWSJob jobToExecute) throws UWSException{ + @Override + public synchronized ExecutionPhase execute(final UWSJob jobToExecute){ if (jobToExecute == null) return null; @@ -113,19 +133,42 @@ public class DefaultExecutionManager implements ExecutionManager { // If the job is already finished, ensure it is not any more in the list of running jobs: else if (jobToExecute.isFinished()){ runningJobs.remove(jobToExecute); - throw UWSExceptionFactory.incorrectPhaseTransition(jobToExecute.getJobId(), jobToExecute.getPhase(), ExecutionPhase.EXECUTING); + logger.logJob(LogLevel.WARNING, jobToExecute, "START", "Job \"" + jobToExecute.getJobId() + "\" already finished!", null); // Otherwise start it: }else{ - jobToExecute.start(false); - runningJobs.put(jobToExecute.getJobId(), jobToExecute); + try{ + jobToExecute.start(false); + runningJobs.put(jobToExecute.getJobId(), jobToExecute); + }catch(UWSException ue){ + logger.logJob(LogLevel.ERROR, jobToExecute, "START", "Can not start the job \"" + jobToExecute.getJobId() + "\"! This job is not any more part of its execution manager.", ue); + } } return jobToExecute.getPhase(); } - public synchronized void remove(final UWSJob jobToRemove) throws UWSException{ + @Override + public synchronized void remove(final UWSJob jobToRemove){ if (jobToRemove != null) runningJobs.remove(jobToRemove.getJobId()); } + + @Override + public synchronized void stopAll(){ + // Stop all running jobs: + for(UWSJob rj : runningJobs.values()){ + try{ + // Stop the job: + rj.abort(); + // Set its phase back to PENDING: + rj.setPhase(ExecutionPhase.PENDING, true); + }catch(UWSException ue){ + if (logger != null) + logger.logJob(LogLevel.WARNING, rj, "ABORT", "Can not stop the job nicely. The thread may continue to run until its end.", ue); + } + } + // Empty the list of running jobs: + runningJobs.clear(); + } } diff --git a/src/uws/job/manager/DestructionManager.java b/src/uws/job/manager/DestructionManager.java index 5bb89066a3b998da8e23cb787638b2ff97c61e77..022ecbc41717fe988a3c5ef7f53e1477b68cb97f 100644 --- a/src/uws/job/manager/DestructionManager.java +++ b/src/uws/job/manager/DestructionManager.java @@ -16,16 +16,15 @@ package uws.job.manager; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.Serializable; - import java.util.Date; import uws.job.JobList; import uws.job.UWSJob; - import uws.service.UWS; /** @@ -50,8 +49,8 @@ import uws.service.UWS; *
    *

    * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (12/2014) * * @see DefaultDestructionManager */ @@ -114,4 +113,15 @@ public interface DestructionManager extends Serializable { * @param job The job to remove. */ public void remove(UWSJob job); + + /** + *

    Stop watching the destruction of jobs.

    + * + *

    Note: + * A subsequent call to {@link #update(UWSJob)} may enable again this manager. + *

    + * + * @since 4.1 + */ + public void stop(); } diff --git a/src/uws/job/manager/ExecutionManager.java b/src/uws/job/manager/ExecutionManager.java index 7232cb529e4770b72140a7f591ee49efdc8e564e..e50d86d9b0a23e771e17d7293bf5102ecf9c0e2e 100644 --- a/src/uws/job/manager/ExecutionManager.java +++ b/src/uws/job/manager/ExecutionManager.java @@ -16,13 +16,12 @@ package uws.job.manager; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.util.Iterator; -import uws.UWSException; - import uws.job.ExecutionPhase; import uws.job.UWSJob; @@ -36,8 +35,8 @@ import uws.job.UWSJob; * and to end ({@link #remove(UWSJob)}). *

    * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (12/2014) */ public interface ExecutionManager { @@ -70,31 +69,50 @@ public interface ExecutionManager { public int getNbQueuedJobs(); /** - * Refreshes the lists of running and queued jobs. + *

    Refreshes the lists of running and queued jobs.

    * - * @throws UWSException If there is an error while refreshing this manager. + *

    Note: + * If any error occurs while refreshing this manager, it SHOULD be logged using the service logger. + *

    */ - public void refresh() throws UWSException; + public void refresh(); /** *

    Lets deciding whether the given job can start immediately or whether it must be put in the queue.

    * + *

    Note: + * If any error occurs while executing the given job, it SHOULD be logged using the service logger. + *

    + * * @param job The job to execute. * @return The resulting execution phase of the given job. * - * @throws UWSException If there is an error while changing the execution phase of the given job or if any other error occurs. - * * @see UWSJob#start(boolean) * @see UWSJob#setPhase(ExecutionPhase) */ - public ExecutionPhase execute(final UWSJob job) throws UWSException; + public ExecutionPhase execute(final UWSJob job); /** - * Removes the job from this manager whatever is its current execution phase. + *

    Removes the job from this manager whatever is its current execution phase.

    + * + *

    Note: + * If any error occurs while removing a job from this manager, it SHOULD be logged using the service logger. + *

    * * @param jobToRemove The job to remove. + */ + public void remove(final UWSJob jobToRemove); + + /** + *

    Stop all running jobs. No more job, even the queued ones, must be executed after a call to this function. + * All stopped or aborted queued jobs should be set forcedly back to the PENDING status.

    + * + *

    Note: + * A call to {@link #execute(UWSJob)} would re-activate this manager. However jobs stopped or + * aborted using this function might not be starting again. These behaviors at implementation-dependent. + *

    * - * @throws UWSException If there is an error while refreshing the list of running jobs or if any other error occurs. + * @since 4.1 */ - public void remove(final UWSJob jobToRemove) throws UWSException; + public void stopAll(); } diff --git a/src/uws/job/manager/QueuedExecutionManager.java b/src/uws/job/manager/QueuedExecutionManager.java index 9fd9de413af0de36bcbf935f32607e80a6e11112..51402c877d8e097a36b33e4a6c9b9911dfd7c6e1 100644 --- a/src/uws/job/manager/QueuedExecutionManager.java +++ b/src/uws/job/manager/QueuedExecutionManager.java @@ -16,11 +16,11 @@ package uws.job.manager; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import uws.UWSException; - import uws.job.UWSJob; import uws.service.log.UWSLog; @@ -29,11 +29,10 @@ import uws.service.log.UWSLog; * if there are more running jobs than a given number, the jobs to execute are put in the queue until a running job stops. * The order of queued jobs are preserved: it is implemented by a FIFO queue.

    * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (08/2014) */ public class QueuedExecutionManager extends AbstractQueuedExecutionManager { - private static final long serialVersionUID = 1L; /** The maximum number of running jobs. */ protected int nbMaxRunningJobs = NO_QUEUE; @@ -70,11 +69,7 @@ public class QueuedExecutionManager extends AbstractQueuedExecutionManager { public final void setNoQueue(){ nbMaxRunningJobs = NO_QUEUE; - try{ - refresh(); - }catch(UWSException ue){ - logger.error("Impossible to refresh the execution manager !", ue); - } + refresh(); } /** @@ -89,8 +84,10 @@ public class QueuedExecutionManager extends AbstractQueuedExecutionManager { /** *

    Sets the maximum number of running jobs.

    * - *

    Note: If the new maximum number of running jobs is increasing the list of running jobs is immediately updated - * BUT NOT IF it is decreasing (that is to say, running jobs will not be interrupted to be put in the queue, they continue to run) !

    + *

    Note: + * If the new maximum number of running jobs is increasing the list of running jobs is immediately updated + * BUT NOT IF it is decreasing (that is to say, running jobs will not be interrupted to be put in the queue, they continue to run) ! + *

    * * @param maxRunningJobs The new maximum number of running jobs ({@link #NO_QUEUE} or a negative value means no maximum number of running jobs: there will be no queue any more). * diff --git a/src/uws/job/parameters/DestructionTimeController.java b/src/uws/job/parameters/DestructionTimeController.java index 103b64ce0e5510516ee6e21153c0e7affe7750d9..bf8699919bdd600347494613fe7052ca5afc2c74 100644 --- a/src/uws/job/parameters/DestructionTimeController.java +++ b/src/uws/job/parameters/DestructionTimeController.java @@ -16,38 +16,42 @@ package uws.job.parameters; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.Serializable; - import java.text.ParseException; import java.util.Calendar; import java.util.Date; -import uws.UWSException; -import uws.UWSExceptionFactory; -import uws.job.UWSJob; +import uws.ISO8601Format; +import uws.UWSException; /** *

    - * Let's controlling the destruction time of all jobs managed by a UWS. Thus it is possible to set a default and a maximum value. + * Let controlling the destruction time of all jobs managed by a UWS. Thus it is possible to set a default and a maximum value. * Moreover you can indicate whether the destruction time of jobs can be modified by the user or not. *

    * - *

    - * Notes: - *

      - *
    • By default, the destruction time can be modified by anyone without any limitation. - * There is no default value (that means jobs may stay forever).
    • - *
    • You can specify a destruction time (default or maximum value) in two ways: - * by an exact date-time or by an interval of time from the initialization (expressed in the second, minutes, hours, days, months or years).
    • - *
    - * - *

    + *

    Notes: + *

      + *
    • By default, the destruction time can be modified by anyone without any limitation. + * There is no default value (that means jobs may stay forever).
    • + *
    • You can specify a destruction time (default or maximum value) in two ways: + * by an exact date-time or by an interval of time from the initialization (expressed in the second, minutes, hours, days, months or years).
    • + *
    + *

    * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + *

    The logic of the destruction time is set in this class. Here it is:

    + *
      + *
    • If no value is specified by the UWS client, the default value is returned.
    • + *
    • If no default value is provided, the maximum destruction date is returned.
    • + *
    • If no maximum value is provided, there is no destruction.
    • + *
    + * + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (11/2014) */ public class DestructionTimeController implements InputParamController, Serializable { private static final long serialVersionUID = 1L; @@ -56,7 +60,7 @@ public class DestructionTimeController implements InputParamController, Serializ * Represents a date/time field. * * @author Grégory Mantelet (CDS) - * @version 02/2011 + * @version 4.0 (02/2011) * * @see Calendar */ @@ -95,33 +99,38 @@ public class DestructionTimeController implements InputParamController, Serializ protected boolean allowModification = true; @Override - public Object check(Object value) throws UWSException{ + public Object check(final Object value) throws UWSException{ + // If no value, return the default one: if (value == null) - return null; + return getDefault(); + // Otherwise, parse the date: Date date = null; if (value instanceof Date) date = (Date)value; else if (value instanceof String){ String strValue = (String)value; try{ - date = UWSJob.dateFormat.parse(strValue); + date = ISO8601Format.parseToDate(strValue); }catch(ParseException pe){ - throw UWSExceptionFactory.badFormat(null, UWSJob.PARAM_DESTRUCTION_TIME, strValue, null, "A date not yet expired."); + throw new UWSException(UWSException.BAD_REQUEST, pe, "Wrong date format for the destruction time parameter: \"" + strValue + "\"! Dates must be formatted in ISO8601 (\"yyyy-MM-dd'T'hh:mm:ss[.sss]['Z'|[+|-]hh:mm]\", fields inside brackets are optional)."); } }else - throw UWSExceptionFactory.badFormat(null, UWSJob.PARAM_DESTRUCTION_TIME, value.toString(), value.getClass().getName(), "A date not yet expired."); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Wrong type for the destruction time parameter: class \"" + value.getClass().getName() + "\"! It should be a Date or a string containing a date formatted in IS8601 (\"yyyy-MM-dd'T'hh:mm:ss[.sss]['Z'|[+|-]hh:mm]\", fields inside brackets are optional)."); + // Compare it to the maximum destruction time: if after, set the date to the maximum allowed date: Date maxDate = getMaxDestructionTime(); if (maxDate != null && date.after(maxDate)) - throw new UWSException(UWSException.BAD_REQUEST, "The UWS limits " + ((defaultInterval > NO_INTERVAL) ? ("the DESTRUCTION INTERVAL (since now) to " + maxInterval + " " + maxIntervalField.name().toLowerCase() + "s") : ("the DESTRUCTION TIME to " + maxDate)) + " !"); + date = maxDate; + // Return the parsed date: return date; } @Override public Object getDefault(){ - return getDefaultDestructionTime(); + Date defaultDate = getDefaultDestructionTime(); + return (defaultDate == null) ? getMaxDestructionTime() : defaultDate; } /* ***************** */ @@ -307,9 +316,15 @@ public class DestructionTimeController implements InputParamController, Serializ * @param timeField The unit of the interval (null means the job may stay forever). */ public final void setMaxDestructionInterval(int maxDestructionInterval, DateField timeField){ - this.maxInterval = maxDestructionInterval; - maxIntervalField = timeField; - maxTime = null; + if (maxDestructionInterval <= 0 || timeField == null){ + this.maxInterval = NO_INTERVAL; + maxIntervalField = null; + maxTime = null; + }else{ + this.maxInterval = maxDestructionInterval; + maxIntervalField = timeField; + maxTime = null; + } } /** @@ -317,6 +332,7 @@ public class DestructionTimeController implements InputParamController, Serializ * * @return true if the destruction time can be modified, false otherwise. */ + @Override public final boolean allowModification(){ return allowModification; } diff --git a/src/uws/job/parameters/ExecutionDurationController.java b/src/uws/job/parameters/ExecutionDurationController.java index 7c2d7eedfb5680724b7a39df4c79b4b48d51771f..670d02f9556b9969546ff4f6166fcf19b1477d64 100644 --- a/src/uws/job/parameters/ExecutionDurationController.java +++ b/src/uws/job/parameters/ExecutionDurationController.java @@ -16,48 +16,78 @@ package uws.job.parameters; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.Serializable; import uws.UWSException; -import uws.UWSExceptionFactory; - import uws.job.UWSJob; /** *

    - * Lets controlling the execution duration of all jobs managed by a UWS. Thus it is possible to set a default and a maximum value. + * Let controlling the execution duration of all jobs managed by a UWS. Thus it is possible to set a default and a maximum value. * Moreover you can indicate whether the execution duration of jobs can be modified by the user or not. *

    * - *

    - * Note: - * By default, the execution duration can be modified by anyone without any limitation. - * The default value is {@link UWSJob#UNLIMITED_DURATION}. - * - *

    + *

    Note: the execution duration is always expressed in seconds.

    + * + *

    Note: + * By default, the execution duration can be modified by anyone without any limitation. + * The default and maximum value is {@link UWSJob#UNLIMITED_DURATION}. + *

    * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + *

    The logic of the execution duration is set in this class. Here it is:

    + *
      + *
    • If no value is specified by the UWS client, the default value is returned.
    • + *
    • If no default value is provided, the maximum duration is returned.
    • + *
    • If no maximum value is provided, there is no limit (={@link UWSJob#UNLIMITED_DURATION}).
    • + *
    + * + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (11/2014) */ public class ExecutionDurationController implements InputParamController, Serializable { private static final long serialVersionUID = 1L; - /** The default duration. */ + /** The default duration (in seconds). */ protected long defaultDuration = UWSJob.UNLIMITED_DURATION; - /** The maximum duration. */ + /** The maximum duration (in seconds). */ protected long maxDuration = UWSJob.UNLIMITED_DURATION; /** Indicates whether the execution duration of jobs can be modified. */ protected boolean allowModification = true; - public ExecutionDurationController(){ - ; - } + /** + *

    Create a controller for the execution duration. + * By default, there is no maximum value and the default duration is {@link UWSJob#UNLIMITED_DURATION}.

    + * + *

    + * A default and/or maximum value can be set after creation using {@link #setDefaultExecutionDuration(long)} + * and {@link #setMaxExecutionDuration(long)}. By default this parameter can always be modified, but it can + * be forbidden using {@link #allowModification(boolean)}. + *

    + */ + public ExecutionDurationController(){} + /** + *

    Create a controller for the execution duration. + * The default and the maximum duration are initialized with the given parameters. + * The third parameter allows also to forbid the modification of the execution duration by the user, + * if set to false.

    + * + *

    + * A default and/or maximum value can be modified after creation using {@link #setDefaultExecutionDuration(long)} + * and {@link #setMaxExecutionDuration(long)}. The flag telling whether this parameter can be modified by the user + * can be changed using {@link #allowModification(boolean)}. + *

    + * + * @param defaultDuration Duration (in seconds) set by default to a job, when none is specified. + * @param maxDuration Maximum duration (in seconds) that can be set. If a greater value is provided by the user, an exception will be thrown by {@link #check(Object)}. + * @param allowModification true to allow the user to modify this value when creating a job, false otherwise. + */ public ExecutionDurationController(final long defaultDuration, final long maxDuration, final boolean allowModification){ setDefaultExecutionDuration(defaultDuration); setMaxExecutionDuration(maxDuration); @@ -66,31 +96,38 @@ public class ExecutionDurationController implements InputParamController, Serial @Override public Object getDefault(){ - return defaultDuration; + return (defaultDuration > 0) ? defaultDuration : getMaxExecutionDuration(); } @Override - public Object check(Object value) throws UWSException{ + public Object check(final Object value) throws UWSException{ + // If no value, return the default one: if (value == null) - return null; + return getDefault(); + // Otherwise, parse the given duration: Long duration = null; if (value instanceof Long) duration = (Long)value; + else if (value instanceof Integer) + duration = (long)(Integer)value; else if (value instanceof String){ String strValue = (String)value; try{ duration = Long.parseLong(strValue); }catch(NumberFormatException nfe){ - throw UWSExceptionFactory.badFormat(null, UWSJob.PARAM_EXECUTION_DURATION, strValue, null, "A long value between " + UWSJob.UNLIMITED_DURATION + " and " + maxDuration + " (Default value: " + defaultDuration + ")."); + throw new UWSException(UWSException.BAD_REQUEST, "Wrong format for the maximum duration parameter: \"" + strValue + "\"! It should be a long numeric value between " + UWSJob.UNLIMITED_DURATION + " and " + maxDuration + " (Default value: " + defaultDuration + ")."); } }else - throw UWSExceptionFactory.badFormat(null, UWSJob.PARAM_EXECUTION_DURATION, null, value.getClass().getName(), "A long value between " + UWSJob.UNLIMITED_DURATION + " and " + maxDuration + " (Default value: " + defaultDuration + ")."); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Wrong type for the maximum duration parameter: class \"" + value.getClass().getName() + "\"! It should be long or a string containing only a long value."); - if (duration < UWSJob.UNLIMITED_DURATION) + // If the duration is negative or zero, set it to UNLIMITED: + if (duration <= 0) duration = UWSJob.UNLIMITED_DURATION; - else if (maxDuration > UWSJob.UNLIMITED_DURATION && duration > maxDuration) - throw new UWSException(UWSException.BAD_REQUEST, "The UWS limits the execution duration to maximum " + maxDuration + " seconds !"); + + // Set the maximum duration if the duration is greater than the maximum value: + if (maxDuration > 0 && (duration > maxDuration || duration <= 0)) + duration = maxDuration; return duration; } @@ -101,16 +138,19 @@ public class ExecutionDurationController implements InputParamController, Serial /** * Gets the default execution duration. * - * @return The default execution duration (0 or less mean an unlimited duration). + * @return The default execution duration (in seconds) (0 or less mean an unlimited duration). + * + * @deprecated This function is completely equivalent to {@link #getDefault()}. */ + @Deprecated public final long getDefaultExecutionDuration(){ - return defaultDuration; + return (Long)getDefault(); } /** * Sets the default execution duration. * - * @param defaultExecutionDuration The new default execution duration ({@link UWSJob#UNLIMITED_DURATION}, 0 or a negative value mean an unlimited duration). + * @param defaultExecutionDuration The new default execution duration (in seconds) ({@link UWSJob#UNLIMITED_DURATION}, 0 or a negative value mean an unlimited duration). */ public final boolean setDefaultExecutionDuration(long defaultExecutionDuration){ defaultExecutionDuration = (defaultExecutionDuration <= 0) ? UWSJob.UNLIMITED_DURATION : defaultExecutionDuration; @@ -126,7 +166,7 @@ public class ExecutionDurationController implements InputParamController, Serial /** * Gets the maximum execution duration. * - * @return The maximum execution duration (0 or less mean an unlimited duration). + * @return The maximum execution duration (in seconds) (0 or less mean an unlimited duration). */ public final long getMaxExecutionDuration(){ return maxDuration; @@ -135,7 +175,7 @@ public class ExecutionDurationController implements InputParamController, Serial /** * Sets the maximum execution duration. * - * @param maxExecutionDuration The maximum execution duration ({@link UWSJob#UNLIMITED_DURATION}, 0 or a negative value mean an unlimited duration). + * @param maxExecutionDuration The maximum execution duration (in seconds) ({@link UWSJob#UNLIMITED_DURATION}, 0 or a negative value mean an unlimited duration). */ public final void setMaxExecutionDuration(long maxExecutionDuration){ maxDuration = (maxExecutionDuration <= 0) ? UWSJob.UNLIMITED_DURATION : maxExecutionDuration; @@ -148,6 +188,7 @@ public class ExecutionDurationController implements InputParamController, Serial * * @return true if the execution duration can be modified, false otherwise. */ + @Override public final boolean allowModification(){ return allowModification; } diff --git a/src/uws/job/parameters/InputParamController.java b/src/uws/job/parameters/InputParamController.java index 72e6a33b45580ed3d2fa1320f4ddd6e0265f3c7c..5cf47ee792b4d85f72bc1a1696dafa3bcbdda473 100644 --- a/src/uws/job/parameters/InputParamController.java +++ b/src/uws/job/parameters/InputParamController.java @@ -25,6 +25,7 @@ import uws.UWSException; *

    Lets controlling an input parameter of a UWS job.

    * * @author Grégory Mantelet (CDS) + * @version 4.0 */ public interface InputParamController { diff --git a/src/uws/job/parameters/StringParamController.java b/src/uws/job/parameters/StringParamController.java index 39b42bf2930080f38dd93cd5ec4f8d5ca52d889d..2eecd59327768ed955441981ad5a911a5b29c3e3 100644 --- a/src/uws/job/parameters/StringParamController.java +++ b/src/uws/job/parameters/StringParamController.java @@ -16,17 +16,17 @@ package uws.job.parameters; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import uws.UWSException; -import uws.UWSExceptionFactory; /** - * Lets controlling a String parameter. + * Let controlling a String parameter. * - * @author Grégory Mantelet (CDS) - * @version 06/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (09/2014) */ public class StringParamController implements InputParamController { @@ -133,11 +133,11 @@ public class StringParamController implements InputParamController { if (strValue.equalsIgnoreCase(v)) return v; } - throw UWSExceptionFactory.badFormat(null, paramName, strValue, null, getExpectedFormat()); + throw new UWSException(UWSException.BAD_REQUEST, "Unknown value for the parameter \"" + paramName + "\": \"" + strValue + "\". It should be " + getExpectedFormat()); }else return strValue; }else - throw UWSExceptionFactory.badFormat(null, paramName, value.toString(), value.getClass().getName(), getExpectedFormat()); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Wrong type for the parameter \"" + paramName + "\": \"" + value.getClass().getName() + "\"! It should be a String."); } /** @@ -146,13 +146,13 @@ public class StringParamController implements InputParamController { * @return A string which describes the format expected by this controller. */ protected final String getExpectedFormat(){ - if (possibleValues == null || possibleValues.length == 0){ - StringBuffer buffer = new StringBuffer("A String value among: "); + if (possibleValues != null && possibleValues.length > 0){ + StringBuffer buffer = new StringBuffer("a String value among: "); for(int i = 0; i < possibleValues.length; i++) buffer.append((i == 0) ? "" : ", ").append(possibleValues[i]); return buffer.toString(); }else - return "A String value."; + return "a String value."; } @Override diff --git a/src/uws/job/parameters/UWSParameters.java b/src/uws/job/parameters/UWSParameters.java index 767706ad072510b40f48d4b9f48165b59d543bfb..734a79f73c25bb035797e9098b517c8765bbd9b3 100644 --- a/src/uws/job/parameters/UWSParameters.java +++ b/src/uws/job/parameters/UWSParameters.java @@ -16,69 +16,91 @@ package uws.job.parameters; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ +import java.io.IOException; import java.text.ParseException; +import java.util.ArrayList; import java.util.Collection; +import java.util.Date; import java.util.Enumeration; +import java.util.HashMap; import java.util.Iterator; -import java.util.Date; -import java.util.Set; +import java.util.List; import java.util.Map; -import java.util.HashMap; - import java.util.Map.Entry; +import java.util.Set; import javax.servlet.http.HttpServletRequest; +import uws.ISO8601Format; import uws.UWSException; - import uws.job.UWSJob; - import uws.service.UWS; +import uws.service.request.UploadFile; /** - *

    Lets extracting all UWS standard and non-standard parameters from a map or a {@link HttpServletRequest}.

    + *

    Let extracting all UWS standard and non-standard parameters from a map.

    * *

    Input parameter check

    - *

    It is possible to check the value of some or all parameters by calling the function {@link InputParamController#check(Object)} - * of an {@link InputParamController} associated with the name of the parameter. Input parameter controllers can be - * provided at the creation of a {@link UWSParameters}. If none are given, default ones are used (see {@link #getDefaultUWSParamControllers()}).

    + *

    + * It is possible to check the value of some or all parameters by calling the function {@link InputParamController#check(Object)} + * of an {@link InputParamController} associated with the name of the parameter. Input parameter controllers can be + * provided at the creation of a {@link UWSParameters}. If none are given, default ones are used (see {@link #getDefaultControllers()}) + * for the standard UWS parameters (e.g. destruction time, duration, etc...). + *

    * *

    Default value

    - *

    By calling the function {@link #init()}, you set a default value to any parameter which has an {@link InputParamController} - * and which has not yet a value.

    - *

    The function {@link InputParamController#getDefault()} returns a default value for its associated parameter. - * This value must be obviously different from NULL.

    + *

    + * By calling the function {@link #init()}, you set a default value to any parameter which has an {@link InputParamController} + * and which has not yet a value. + *

    + *

    + * The function {@link InputParamController#getDefault()} returns a default value for its associated parameter. + * This value must be obviously different from NULL. + *

    * *

    Updating a {@link UWSParameters}

    - *

    It is possible to update a {@link UWSParameters} with another {@link UWSParameters} thanks to the function - * {@link #update(UWSParameters)}. In this case, no check is done since the values given by a - * {@link UWSParameters} should be theoretically already correct.

    - *

    In order to forbid the modification of some parameters after their initialization, you must associate an - * {@link InputParamController} with them and override the function {@link InputParamController#allowModification()} - * so that it returns false.

    + *

    + * It is possible to update a {@link UWSParameters} with another {@link UWSParameters} thanks to the function + * {@link #update(UWSParameters)}. In this case, no check is done since the values given by a + * {@link UWSParameters} should be theoretically already correct. + *

    + *

    + * In order to forbid the modification of some parameters after their initialization, you must associate an + * {@link InputParamController} with them and override the function {@link InputParamController#allowModification()} + * so that it returns false. + *

    * *

    Case sensitivity

    - *

    All UWS STANDARD parameters can be provided in any case: they will always be identified and updated. - * However any other parameter will be stored as it is provided: so with the same case. Thus, you must respect - * the case for all UWS additional parameters in your other operations on the parameters.

    - *

    If you want to identify your own parameters without case sensitivity, you must provides a list - * of all the additional parameters you are expected at the creation: see {@link #UWSParameters(HttpServletRequest, Collection, Map)} - * and {@link #UWSParameters(Map, Collection, Map)}.

    + *

    + * All UWS STANDARD parameters can be provided in any case: they will always be identified and updated. + * However any other parameter will be stored as it is provided: so with the same case. Thus, you must respect + * the case for all UWS additional parameters in your other operations on the parameters. + *

    + *

    + * If you want to identify your own parameters without case sensitivity, you must provides a list + * of all the additional parameters you are expected at the creation: see {@link #UWSParameters(HttpServletRequest, Collection, Map)} + * and {@link #UWSParameters(Map, Collection, Map)}. + *

    * *

    Additional parameters case normalization

    - *

    Indeed, the second parameter of these constructors (if != NULL) is used to normalize the name of the additional parameters so - * that they have exactly the given case.

    - *

    For instance, suppose that the given HttpServletRequest has a parameter named "foo" and - * you expect a parameter named "FOO" (only the case changes). By providing a second parameter - * which contains the entry "FOO", all parameters having the same name - even if the case is different - - * will be named "FOO".

    + *

    + * Indeed, the second parameter of these constructors (if != NULL) is used to normalize the name of the additional parameters so + * that they have exactly the given case. + *

    + *

    + * For instance, suppose that the request had a parameter named "foo" and + * you expect a parameter named "FOO" (only the case changes). By providing a second parameter + * which contains the entry "FOO", all parameters having the same name - even if the case is different - + * will be named "FOO". + *

    *

    In brief:

    *
      - *
    • With "FOO" in the second parameter of the constructor: {@link #get(String) get("FOO")} will return something if in the HttpServletRequest there is a parameter named: "foo", "FOO", "Foo", ...
    • - *
    • If the second parameter is empty, NULL or does not contain "FOO": {@link #get(String) get("FOO")} will return something if in the HttpServletRequest there is a parameter named exactly "FOO".
    • + *
    • With "FOO" in the second parameter of the constructor: {@link #get(String) get("FOO")} will return something if in the request there was a parameter named: "foo", "FOO", "Foo", ...
    • + *
    • If the second parameter is empty, NULL or does not contain "FOO": {@link #get(String) get("FOO")} will return something if in the request there was a parameter named exactly "FOO".
    • *
    * *

    UWS standard parameters

    @@ -89,11 +111,12 @@ import uws.service.UWS; *
  • executionDuration ({@link UWSJob#PARAM_EXECUTION_DURATION})
  • *
  • destruction ({@link UWSJob#PARAM_DESTRUCTION_TIME})
  • * - *

    note: All parameters stored under the parameter {@link UWSJob#PARAM_PARAMETERS} (that's to say, additional parameters) + *

    note 1: All parameters stored under the parameter {@link UWSJob#PARAM_PARAMETERS} (that's to say, additional parameters) * are also considered as READ/WRITE parameters !

    + *

    note 2: If several values have been submitted for the same UWS standard parameter, just the last occurrence is taken into account.

    * - * @author Grégory Mantelet (CDS) - * @version 06/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (12/2014) */ public class UWSParameters implements Iterable> { @@ -102,6 +125,10 @@ public class UWSParameters implements Iterable> { *

    Names of the UWS parameters whose the value can be modified by the user.

    */ protected final static String[] UWS_RW_PARAMETERS = new String[]{UWSJob.PARAM_PHASE,UWSJob.PARAM_RUN_ID,UWSJob.PARAM_EXECUTION_DURATION,UWSJob.PARAM_DESTRUCTION_TIME,UWSJob.PARAM_PARAMETERS}; + + /** Regular expression allowing to test which UWS parameters can be set. Actually, only: phase, runID, executionduration and destruction. */ + public final static String UWS_RW_PARAMETERS_REGEXP = ("(" + UWSJob.PARAM_PHASE + "|" + UWSJob.PARAM_RUN_ID + "|" + UWSJob.PARAM_EXECUTION_DURATION + "|" + UWSJob.PARAM_DESTRUCTION_TIME + ")").toLowerCase(); + /** *

    Read-Only parameters.

    *

    Names of the UWS parameters whose the value can NOT be modified by the user. These value are not kept. They are only ignored.

    @@ -119,7 +146,13 @@ public class UWSParameters implements Iterable> { * It is deleted (set to NULL) when there is a modification in the list of all parameters * (so in the function {@link #set(String, Object)}, {@link #update(UWSParameters)} and {@link #init()}).

    */ - private HashMap additionalParams = null; + private Map additionalParams = null; + + /** + * List of all uploaded files among the whole set of parameters. + * @since 4.1 + */ + protected List files = null; /** * List of the expected additional parameters. @@ -144,14 +177,14 @@ public class UWSParameters implements Iterable> { *

    Builds an empty list of UWS parameters.

    * *

    note: Even if no controllers is provided, this constructor sets the default - * input parameter controllers (see {@link #getDefaultUWSParamControllers()}).

    + * input parameter controllers (see {@link #getDefaultControllers()}).

    * * @param expectedAdditionalParams The names of all expected additional parameters (MAY BE NULL). * note: they will be identified with no case sensitivity * and stored with the same case as in this collection. * @param inputParamControllers Controllers of the input parameters (MAY BE NULL). * - * @see #getDefaultUWSParamControllers() + * @see #getDefaultControllers() */ public UWSParameters(final Collection expectedAdditionalParams, final Map inputParamControllers){ // Set the input parameter controllers: @@ -166,7 +199,7 @@ public class UWSParameters implements Iterable> { /** *

    Extracts and identifies all UWS standard and non-standard parameters from the given {@link HttpServletRequest}.

    * - *

    note: The default input parameter controllers are set by default (see {@link #getDefaultUWSParamControllers()}).

    + *

    note: The default input parameter controllers are set by default (see {@link #getDefaultControllers()}).

    * * @param request The request to parse to extract the parameters. * @@ -182,7 +215,7 @@ public class UWSParameters implements Iterable> { *

    Extracts and identifies all UWS standard and non-standard parameters from the given {@link HttpServletRequest}.

    * *

    note: Even if no controllers is provided, this constructor sets the default - * input parameter controllers (see {@link #getDefaultUWSParamControllers()}).

    + * input parameter controllers (see {@link #getDefaultControllers()}).

    * * @param request The request to parse to extract the parameters. * @param expectedAdditionalParams The names of all expected additional parameters. @@ -192,27 +225,66 @@ public class UWSParameters implements Iterable> { * * @throws UWSException If one of the given parameter is incorrect or badly formatted. * - * @see #UWSParameters(Collection, Map) + * @see #UWSParameters(Map, Collection, Map) */ - @SuppressWarnings("unchecked") public UWSParameters(final HttpServletRequest request, final Collection expectedAdditionalParams, final Map inputParamControllers) throws UWSException{ - this(expectedAdditionalParams, inputParamControllers); + this(getParameters(request), expectedAdditionalParams, inputParamControllers); + } - // Load all parameters: - if (request != null){ - Enumeration names = request.getParameterNames(); - String paramName; - while(names.hasMoreElements()){ - paramName = names.nextElement(); - set(paramName, request.getParameter(paramName)); - } + /** + *

    Get the parameters stored in the given HTTP request.

    + * + *

    + * Since the version 4.1, parameters are extracted immediately when the request is received. They are then stored in an attribute + * under the name of {@link UWS#REQ_ATTRIBUTE_PARAMETERS}. Thus, the map of parameters can be got in that way. However, if this attribute + * does not exist, this function will ask for the parameters extracted by {@link HttpServletRequest} ({@link HttpServletRequest#getParameterNames()} + * and {@link HttpServletRequest#getParameter(String)}). In this last case only the last non-null occurrence of any parameter will be kept. + *

    + * + * @param request HTTP request from which the parameters must be got. + * + * @return The extracted parameters. + * + * @since 4.1 + */ + @SuppressWarnings("unchecked") + protected static Map getParameters(final HttpServletRequest request){ + // No request => no parameters: + if (request == null) + return null; + + /* The UWS service has theoretically already extracted all parameters in function of the content-type. + * If so, these parameters can be found as a Map in the request attribute "UWS_PARAMETERS": */ + try{ + if (request.getAttribute(UWS.REQ_ATTRIBUTE_PARAMETERS) != null) + return (Map)request.getAttribute(UWS.REQ_ATTRIBUTE_PARAMETERS); + }catch(Exception e){} // 2 possible exceptions: ClassCastException and NullPointerException + + /* If there is no such attribute or if it is not of the good type, + * extract only application/x-www-form-urlencoded parameters: */ + Map map = new HashMap(request.getParameterMap().size()); + Enumeration names = request.getParameterNames(); + int i; + String n; + String[] values; + while(names.hasMoreElements()){ + n = names.nextElement(); + values = request.getParameterValues(n); + // search for the last non-null occurrence: + i = values.length - 1; + while(i >= 0 && values[i] == null) + i--; + // if there is one, keep it: + if (i >= 0) + map.put(n, values[i]); } + return map; } /** *

    Extracts and identifies all UWS standard and non-standard parameters from the map.

    * - *

    note: The default input parameter controllers are set by default (see {@link #getDefaultUWSParamControllers()}).

    + *

    note: The default input parameter controllers are set by default (see {@link #getDefaultControllers()}).

    * * @param params A map of parameters. * @@ -228,7 +300,7 @@ public class UWSParameters implements Iterable> { *

    Extracts and identifies all UWS standard and non-standard parameters from the map.

    * *

    note: Even if no controllers is provided, this constructor sets the default - * input parameter controllers (see {@link #getDefaultUWSParamControllers()}).

    + * input parameter controllers (see {@link #getDefaultControllers()}).

    * * @param params A map of parameters. * @param expectedAdditionalParams The names of all expected additional parameters. @@ -245,13 +317,11 @@ public class UWSParameters implements Iterable> { // Load all parameters: if (params != null && !params.isEmpty()){ - synchronized(params){ - Iterator> it = params.entrySet().iterator(); - Entry entry; - while(it.hasNext()){ - entry = it.next(); - set(entry.getKey(), entry.getValue()); - } + Iterator> it = params.entrySet().iterator(); + Entry entry; + while(it.hasNext()){ + entry = it.next(); + set(entry.getKey(), entry.getValue()); } } } @@ -271,30 +341,26 @@ public class UWSParameters implements Iterable> { /** *

    Must return the input parameter controller of the specified parameter.

    * - *

    note 1: This function is supposed to be case sensitive !

    - *

    note 2: By default, this function just asks to the {@link UWS} thanks to the function {@link UWS#getInputParamController(String)}.

    + *

    note: This function is supposed to be case sensitive !

    * * @param inputParamName The name of the parameter whose the controller is asked. * - * @return The corresponding controller or null if there is no controller for the specified parameter - * or if this {@link UWSParameters} instance doesn't know a {@link UWS}. + * @return The corresponding controller or null if there is no controller for the specified parameter. */ protected InputParamController getController(final String inputParamName){ return mapParamControllers.get(inputParamName); } /** - *

    Must return the list of all available input parameter controllers.

    + * Must return the list of all available input parameter controllers. * - *

    note: By default, this function just asks to the {@link UWS} thanks to the function {@link UWS#getInputParamControllers()}.

    - * - * @return The list of all available controllers or null if there is no controller - * or if this {@link UWSParameters} instance doesn't know a {@link UWS}. + * @return An iterator over all available controllers. */ protected Iterator> getControllers(){ return mapParamControllers.entrySet().iterator(); } + @Override public final Iterator> iterator(){ return params.entrySet().iterator(); } @@ -358,19 +424,36 @@ public class UWSParameters implements Iterable> { if (newParams != null && !newParams.params.isEmpty()){ synchronized(params){ additionalParams = null; + files = null; String[] updated = new String[newParams.params.size()]; + Object oldValue; int i = 0; for(Entry entry : newParams){ // Test whether this parameter is allowed to be modified after its initialization: InputParamController controller = getController(entry.getKey()); if (controller != null && !controller.allowModification()) - throw new UWSException("The parameter \"" + entry.getKey() + "\" can not be modified after initialization !"); - // If the value is NULL, removes this parameter: - if (entry.getValue() == null) - params.remove(entry.getKey()); - // Else set it: - else - params.put(entry.getKey(), entry.getValue()); + throw new UWSException(UWSException.FORBIDDEN, "The parameter \"" + entry.getKey() + "\" can not be modified after initialization!"); + // Determine whether the value already exists: + if (params.containsKey(entry.getKey()) || entry.getKey().toLowerCase().matches(UWS_RW_PARAMETERS_REGEXP)){ + // If the value is NULL, throw an error (no parameter can be removed after job creation): + if (entry.getValue() == null) + throw new UWSException(UWSException.FORBIDDEN, "Removing a parameter (here: \"" + entry.getKey() + "\") from a job is forbidden!"); + // Else update the parameter value: + else{ + // If the parameter to replace is an uploaded file, it must be physically removed before replacement: + oldValue = params.get(entry.getKey()); + if (oldValue != null && oldValue instanceof UploadFile){ + try{ + ((UploadFile)oldValue).deleteFile(); + }catch(IOException ioe){} + } + // Perform the replacement: + params.put(entry.getKey(), entry.getValue()); + } + }else + // No parameter can be added after job creation: + throw new UWSException(UWSException.FORBIDDEN, "Adding a parameter (here: \"" + entry.getKey() + "\") to an existing job is forbidden by the UWS protocol!"); + // Update the list of updated parameters: updated[i++] = entry.getKey(); } return updated; @@ -384,10 +467,12 @@ public class UWSParameters implements Iterable> { * *

    note 1: The case of the parameter name MUST BE correct EXCEPT FOR the standard UWS parameters (i.e. runId, executionDuration, destructionTime).

    *

    note 2: If the name of the parameter is {@link UWSJob#PARAM_PARAMETERS PARAMETERS}, this function will return exactly what {@link #getAdditionalParameters()} returns.

    + *

    note 3: Depending of the way the parameters are fetched from an HTTP request, the returned object may be an array. Each item of this array would then be an occurrence of the parameter in the request (MAYBE in the same order as submitted).

    * * @param name Name of the parameter to get. * - * @return Value of the specified parameter, or null if the given name is null, empty or has no value. + * @return Value of the specified parameter, or null if the given name is null, or an array or {@link Object}s if several values + * have been submitted for the same parameter, empty or has no value. * * @see #normalizeParamName(String) * @see #getAdditionalParameters() @@ -402,6 +487,34 @@ public class UWSParameters implements Iterable> { return params.get(normalizedName); } + /** + * Get the list of all uploaded files. + * + * @return An iterator over the list of uploaded files. + * + * @since 4.1 + */ + public final Iterator getFiles(){ + if (files == null){ + files = new ArrayList(3); + synchronized(params){ + for(Object v : params.values()){ + if (v == null) + continue; + else if (v instanceof UploadFile) + files.add((UploadFile)v); + else if (v.getClass().isArray()){ + for(Object o : (Object[])v){ + if (o instanceof UploadFile) + files.add((UploadFile)o); + } + } + } + } + } + return files.iterator(); + } + /** *

    Sets the given value to the specified parameter. * But if the given value is null, the specified parameter is merely removed.

    @@ -413,7 +526,7 @@ public class UWSParameters implements Iterable> { *

    note 5: If the parameter {@link UWSJob#PARAM_PARAMETERS PARAMETERS} is given, it must be a Map. In this case, the map is read and all its entries are added individually.

    * * @param name Name of the parameter to set (add, update or remove). note: not case sensitive ONLY FOR the standard UWS parameters ! - * @param value The value to set. note: NULL means that the specified parameter must be removed ! + * @param value The value to set. note: NULL means that the specified parameter must be removed ; several values may have been provided using an array of Objects. * * @return The old value of the specified parameter. null may mean that the parameter has just been added, but it may also mean that nothing has been done (because, the given name is null, empty or corresponds to a read-only parameter). * @@ -423,6 +536,10 @@ public class UWSParameters implements Iterable> { */ @SuppressWarnings("unchecked") public final Object set(final String name, Object value) throws UWSException{ + // If the given value is NULL, the parameter must be removed: + if (value == null) + return remove(name); + // Normalize (take into account the case ONLY FOR the non-standard UWS parameters) the given parameter name: String normalizedName = normalizeParamName(name); @@ -432,38 +549,42 @@ public class UWSParameters implements Iterable> { synchronized(params){ additionalParams = null; - - // If the given value is NULL, the parameter must be removed: - if (value == null) - return params.remove(normalizedName); - else{ - // Case of the PARAMETERS parameter: read all parameters and set them individually into this UWSParameters instance: - if (normalizedName.equals(UWSJob.PARAM_PARAMETERS)){ - // the value MUST BE a Map: - if (value instanceof Map){ - try{ - Map otherParams = (Map)value; - HashMap mapOldValues = new HashMap(otherParams.size()); - Object oldValue = null; - for(Entry entry : otherParams.entrySet()){ - oldValue = set(entry.getKey(), entry.getValue()); - mapOldValues.put(entry.getKey(), oldValue); - } - return mapOldValues; - }catch(ClassCastException cce){ - return null; + files = null; + + // Case of the PARAMETERS parameter: read all parameters and set them individually into this UWSParameters instance: + if (normalizedName.equals(UWSJob.PARAM_PARAMETERS)){ + // the value MUST BE a Map: + if (value instanceof Map){ + try{ + Map otherParams = (Map)value; + HashMap mapOldValues = new HashMap(otherParams.size()); + Object oldValue = null; + for(Entry entry : otherParams.entrySet()){ + oldValue = set(entry.getKey(), entry.getValue()); + mapOldValues.put(entry.getKey(), oldValue); } - }else + return mapOldValues; + }catch(ClassCastException cce){ return null; - }else{ - // Check the value before setting it: - InputParamController controller = getController(normalizedName); - if (controller != null) - value = controller.check(value); - - // Set the new value: - return params.put(normalizedName, value); + } + }else + return null; + }else{ + // Check the value before setting it: + InputParamController controller = getController(normalizedName); + if (controller != null) + value = controller.check(value); + + // If the parameter already exists and it is an uploaded file, delete it before its replacement: + Object oldValue = params.get(normalizedName); + if (oldValue != null && oldValue instanceof UploadFile){ + try{ + ((UploadFile)oldValue).deleteFile(); + }catch(IOException ioe){} } + + // Set the new value: + return params.put(normalizedName, value); } } } @@ -487,7 +608,17 @@ public class UWSParameters implements Iterable> { synchronized(params){ additionalParams = null; - return params.remove(normalizedName); + files = null; + // Remove the file: + Object removed = params.remove(normalizedName); + // If the removed parameter was a file, remove it from the server: + if (removed != null && removed instanceof UploadFile){ + try{ + ((UploadFile)removed).deleteFile(); + }catch(IOException ioe){} + } + // Return the value of the removed parameter: + return removed; } } @@ -589,7 +720,7 @@ public class UWSParameters implements Iterable> { return (Date)value; else if (value instanceof String){ try{ - Date destruction = UWSJob.dateFormat.parse((String)value); + Date destruction = ISO8601Format.parseToDate((String)value); synchronized(params){ params.put(UWSJob.PARAM_DESTRUCTION_TIME, destruction); } diff --git a/src/uws/job/serializer/JSONSerializer.java b/src/uws/job/serializer/JSONSerializer.java index 9ffb8325f1f206f3ee1a7c853f8c36996dddc41a..48dc5dbf5b28df33c7df44ade4199d3863ff6880 100644 --- a/src/uws/job/serializer/JSONSerializer.java +++ b/src/uws/job/serializer/JSONSerializer.java @@ -16,29 +16,27 @@ package uws.job.serializer; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import org.json.JSONException; import org.json.Json4Uws; -import uws.UWSException; - +import uws.ISO8601Format; import uws.job.ErrorSummary; import uws.job.JobList; import uws.job.Result; import uws.job.UWSJob; - import uws.job.user.JobOwner; - import uws.service.UWS; import uws.service.UWSUrl; /** * Lets serializing any UWS resource in JSON. * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (09/2014) * * @see Json4Uws */ @@ -51,178 +49,105 @@ public class JSONSerializer extends UWSSerializer { } @Override - public String getUWS(final UWS uws, final JobOwner user) throws UWSException{ - try{ - return Json4Uws.getJson(uws).toString(); - }catch(JSONException je){ - throw new UWSException(je); - } + public String getUWS(final UWS uws, final JobOwner user) throws JSONException{ + return Json4Uws.getJson(uws).toString(); } @Override - public String getJobList(final JobList jobsList, final JobOwner owner, final boolean root) throws UWSException{ - try{ - return Json4Uws.getJson(jobsList, owner).toString(); - }catch(JSONException je){ - throw new UWSException(je); - } + public String getJobList(final JobList jobsList, final JobOwner owner, final boolean root) throws JSONException{ + return Json4Uws.getJson(jobsList, owner).toString(); } @Override - public String getJob(final UWSJob job, final boolean root) throws UWSException{ - try{ - return Json4Uws.getJson(job, null, false).toString(); - }catch(JSONException je){ - throw new UWSException(je); - } + public String getJob(final UWSJob job, final boolean root) throws JSONException{ + return Json4Uws.getJson(job, null, false).toString(); } @Override - public String getJobRef(final UWSJob job, final UWSUrl jobsListUrl) throws UWSException{ - try{ - return Json4Uws.getJson(job, jobsListUrl, true).toString(); - }catch(JSONException je){ - throw new UWSException(je); - } + public String getJobRef(final UWSJob job, final UWSUrl jobsListUrl) throws JSONException{ + return Json4Uws.getJson(job, jobsListUrl, true).toString(); } @Override - public String getJobID(final UWSJob job, final boolean root) throws UWSException{ - try{ - return Json4Uws.getJson(UWSJob.PARAM_JOB_ID, job.getJobId()).toString(); - }catch(JSONException je){ - throw new UWSException(je); - } + public String getJobID(final UWSJob job, final boolean root) throws JSONException{ + return Json4Uws.getJson(UWSJob.PARAM_JOB_ID, job.getJobId()).toString(); } @Override - public String getRunID(final UWSJob job, final boolean root) throws UWSException{ - try{ - return Json4Uws.getJson(UWSJob.PARAM_RUN_ID, job.getRunId()).toString(); - }catch(JSONException je){ - throw new UWSException(je); - } + public String getRunID(final UWSJob job, final boolean root) throws JSONException{ + return Json4Uws.getJson(UWSJob.PARAM_RUN_ID, job.getRunId()).toString(); } @Override - public String getOwnerID(final UWSJob job, final boolean root) throws UWSException{ + public String getOwnerID(final UWSJob job, final boolean root) throws JSONException{ if (job.getOwner() == null) return "{}"; - else{ - try{ - return Json4Uws.getJson(UWSJob.PARAM_OWNER, job.getOwner().getPseudo()).toString(); - }catch(JSONException je){ - throw new UWSException(je); - } - } + else + return Json4Uws.getJson(UWSJob.PARAM_OWNER, job.getOwner().getPseudo()).toString(); } @Override - public String getPhase(final UWSJob job, final boolean root) throws UWSException{ - try{ - return Json4Uws.getJson(UWSJob.PARAM_PHASE, job.getPhase().toString()).toString(); - }catch(JSONException je){ - throw new UWSException(je); - } + public String getPhase(final UWSJob job, final boolean root) throws JSONException{ + return Json4Uws.getJson(UWSJob.PARAM_PHASE, job.getPhase().toString()).toString(); } @Override - public String getQuote(final UWSJob job, final boolean root) throws UWSException{ - try{ - return Json4Uws.getJson(UWSJob.PARAM_QUOTE, job.getQuote()).toString(); - }catch(JSONException je){ - throw new UWSException(je); - } + public String getQuote(final UWSJob job, final boolean root) throws JSONException{ + return Json4Uws.getJson(UWSJob.PARAM_QUOTE, job.getQuote()).toString(); } @Override - public String getExecutionDuration(final UWSJob job, final boolean root) throws UWSException{ - try{ - return Json4Uws.getJson(UWSJob.PARAM_EXECUTION_DURATION, job.getExecutionDuration()).toString(); - }catch(JSONException je){ - throw new UWSException(je); - } + public String getExecutionDuration(final UWSJob job, final boolean root) throws JSONException{ + return Json4Uws.getJson(UWSJob.PARAM_EXECUTION_DURATION, job.getExecutionDuration()).toString(); } @Override - public String getDestructionTime(final UWSJob job, final boolean root) throws UWSException{ + public String getDestructionTime(final UWSJob job, final boolean root) throws JSONException{ if (job.getDestructionTime() != null){ - try{ - return Json4Uws.getJson(UWSJob.PARAM_DESTRUCTION_TIME, UWSJob.dateFormat.format(job.getDestructionTime())).toString(); - }catch(JSONException je){ - throw new UWSException(je); - } + return Json4Uws.getJson(UWSJob.PARAM_DESTRUCTION_TIME, ISO8601Format.format(job.getDestructionTime())).toString(); }else return "{}"; } @Override - public String getStartTime(final UWSJob job, final boolean root) throws UWSException{ - if (job.getDestructionTime() != null){ - try{ - return Json4Uws.getJson(UWSJob.PARAM_START_TIME, UWSJob.dateFormat.format(job.getDestructionTime())).toString(); - }catch(JSONException je){ - throw new UWSException(je); - } - }else + public String getStartTime(final UWSJob job, final boolean root) throws JSONException{ + if (job.getDestructionTime() != null) + return Json4Uws.getJson(UWSJob.PARAM_START_TIME, ISO8601Format.format(job.getDestructionTime())).toString(); + else return "{}"; } @Override - public String getEndTime(final UWSJob job, final boolean root) throws UWSException{ - if (job.getDestructionTime() != null){ - try{ - return Json4Uws.getJson(UWSJob.PARAM_END_TIME, UWSJob.dateFormat.format(job.getDestructionTime())).toString(); - }catch(JSONException je){ - throw new UWSException(je); - } - }else + public String getEndTime(final UWSJob job, final boolean root) throws JSONException{ + if (job.getDestructionTime() != null) + return Json4Uws.getJson(UWSJob.PARAM_END_TIME, ISO8601Format.format(job.getDestructionTime())).toString(); + else return "{}"; } @Override - public String getErrorSummary(final ErrorSummary error, final boolean root) throws UWSException{ - try{ - return Json4Uws.getJson(error).toString(); - }catch(JSONException je){ - throw new UWSException(je); - } + public String getErrorSummary(final ErrorSummary error, final boolean root) throws JSONException{ + return Json4Uws.getJson(error).toString(); } @Override - public String getAdditionalParameters(final UWSJob job, final boolean root) throws UWSException{ - try{ - return Json4Uws.getJobParamsJson(job).toString(); - }catch(JSONException je){ - throw new UWSException(je); - } + public String getAdditionalParameters(final UWSJob job, final boolean root) throws JSONException{ + return Json4Uws.getJobParamsJson(job).toString(); } @Override - public String getAdditionalParameter(final String paramName, final Object paramValue, final boolean root) throws UWSException{ - try{ - return Json4Uws.getJson(paramName, (paramValue == null) ? null : paramValue.toString()).toString(); - }catch(JSONException je){ - throw new UWSException(je); - } + public String getAdditionalParameter(final String paramName, final Object paramValue, final boolean root) throws JSONException{ + return Json4Uws.getJson(paramName, (paramValue == null) ? null : paramValue.toString()).toString(); } @Override - public String getResults(final UWSJob job, final boolean root) throws UWSException{ - try{ - return Json4Uws.getJobResultsJson(job).toString(); - }catch(JSONException je){ - throw new UWSException(je); - } + public String getResults(final UWSJob job, final boolean root) throws JSONException{ + return Json4Uws.getJobResultsJson(job).toString(); } @Override - public String getResult(final Result result, final boolean root) throws UWSException{ - try{ - return Json4Uws.getJobResultJson(result).toString(); - }catch(JSONException je){ - throw new UWSException(je); - } + public String getResult(final Result result, final boolean root) throws JSONException{ + return Json4Uws.getJobResultJson(result).toString(); } } diff --git a/src/uws/job/serializer/UWSSerializer.java b/src/uws/job/serializer/UWSSerializer.java index 3303987621c111d3af67570c640748042f9ac659..6a191d7e598f3cdfac7c370f21e03d8b7eae1da9 100644 --- a/src/uws/job/serializer/UWSSerializer.java +++ b/src/uws/job/serializer/UWSSerializer.java @@ -16,21 +16,19 @@ package uws.job.serializer; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.Serializable; +import uws.ISO8601Format; import uws.UWSException; -import uws.UWSExceptionFactory; - import uws.job.ErrorSummary; import uws.job.JobList; import uws.job.Result; import uws.job.UWSJob; import uws.job.user.JobOwner; - -import uws.service.UWSService; import uws.service.UWS; import uws.service.UWSUrl; @@ -42,8 +40,8 @@ import uws.service.UWSUrl; *
  • JSON by the class {@link JSONSerializer}
  • * * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (12/2014) * * @see XMLSerializer * @see JSONSerializer @@ -52,7 +50,7 @@ public abstract class UWSSerializer implements Serializable { private static final long serialVersionUID = 1L; /** MIME type for XML: application/xml */ - public static final String MIME_TYPE_XML = "application/xml"; + public static final String MIME_TYPE_XML = "text/xml"; /** MIME type for JSON: application/json */ public static final String MIME_TYPE_JSON = "application/json"; /** MIME type for TEXT: text/plain */ @@ -72,9 +70,9 @@ public abstract class UWSSerializer implements Serializable { * @return The serialization of the given attribute * or the serialization of the whole job if the given attributes array is empty or null. * - * @throws UWSException If the specified attribute/parameter/result does not exist. + * @throws Exception If an error occurs while serializing the specified job/attribute/parameter/result. */ - public String getJob(final UWSJob job, final String[] attributes, final boolean root) throws UWSException{ + public String getJob(final UWSJob job, final String[] attributes, final boolean root) throws Exception{ if (attributes == null || attributes.length <= 0) return getJob(job, root); @@ -97,16 +95,16 @@ public abstract class UWSSerializer implements Serializable { return job.getQuote() + ""; // START TIME: else if (firstAttribute.equalsIgnoreCase(UWSJob.PARAM_START_TIME)) - return (job.getStartTime() == null) ? "" : UWSJob.dateFormat.format(job.getStartTime()); + return (job.getStartTime() == null) ? "" : ISO8601Format.format(job.getStartTime()); // END TIME: else if (firstAttribute.equalsIgnoreCase(UWSJob.PARAM_END_TIME)) - return (job.getEndTime() == null) ? "" : UWSJob.dateFormat.format(job.getEndTime()); + return (job.getEndTime() == null) ? "" : ISO8601Format.format(job.getEndTime()); // EXECUTION DURATION: else if (firstAttribute.equalsIgnoreCase(UWSJob.PARAM_EXECUTION_DURATION)) return job.getExecutionDuration() + ""; // DESTRUCTION TIME: else if (firstAttribute.equalsIgnoreCase(UWSJob.PARAM_DESTRUCTION_TIME)) - return (job.getDestructionTime() == null) ? "" : UWSJob.dateFormat.format(job.getDestructionTime()); + return (job.getDestructionTime() == null) ? "" : ISO8601Format.format(job.getDestructionTime()); // PARAMETERS LIST: else if (firstAttribute.equalsIgnoreCase(UWSJob.PARAM_PARAMETERS)){ if (attributes.length <= 1) @@ -115,10 +113,23 @@ public abstract class UWSSerializer implements Serializable { // PARAMETER: String secondAttribute = attributes[1]; Object value = job.getAdditionalParameterValue(secondAttribute); - if (value != null) - return value.toString(); - else - throw UWSExceptionFactory.incorrectJobParameter(job.getJobId(), secondAttribute); + if (value != null){ + // CASE: array value + if (value.getClass().isArray()){ + Object[] items = (Object[])value; + StringBuffer arrayAsString = new StringBuffer(); + for(Object item : items){ + if (arrayAsString.length() > 0) + arrayAsString.append(' ').append(';').append(' '); + arrayAsString.append(item.toString()); + } + return arrayAsString.toString(); + } + // DEFAULT: + else + return value.toString(); + }else + throw new UWSException(UWSException.NOT_FOUND, "No parameter named \"" + secondAttribute + "\" in the job \"" + job.getJobId() + "\"!"); } // RESULTS LIST: }else if (firstAttribute.equalsIgnoreCase(UWSJob.PARAM_RESULTS)){ @@ -131,7 +142,7 @@ public abstract class UWSSerializer implements Serializable { if (r != null) return getResult(r, root); else - throw UWSExceptionFactory.incorrectJobResult(job.getJobId(), secondAttribute); + throw new UWSException(UWSException.NOT_FOUND, "No result named \"" + secondAttribute + "\" in the job \"" + job.getJobId() + "\"!"); } // ERROR DETAILS or ERROR SUMMARY: }else if (firstAttribute.equalsIgnoreCase(UWSJob.PARAM_ERROR_SUMMARY)) @@ -141,7 +152,7 @@ public abstract class UWSSerializer implements Serializable { return getErrorSummary(job.getErrorSummary(), root); // OTHERS: else - throw UWSExceptionFactory.incorrectJobParameter(job.getJobId(), firstAttribute); + throw new UWSException(UWSException.NOT_FOUND, "No job attribute named \"" + firstAttribute + "\" in the job \"" + job.getJobId() + "\"!"); } @Override @@ -160,12 +171,14 @@ public abstract class UWSSerializer implements Serializable { * Serializes the given UWS. * * @param uws The UWS to serialize. + * * @return The serialization of the given UWS. - * @throws UWSException If there is an error during the serialization. * - * @see UWSSerializer#getUWS(UWSService, String) + * @throws Exception If there is an error during the serialization. + * + * @see UWSSerializer#getUWS(UWS, JobOwner) */ - public String getUWS(final UWS uws) throws UWSException{ + public String getUWS(final UWS uws) throws Exception{ return getUWS(uws, null); } @@ -176,9 +189,10 @@ public abstract class UWSSerializer implements Serializable { * @param user The user which has asked the serialization of the given UWS. * * @return The serialization of the UWS. - * @throws UWSException If there is an error during the serialization. + * + * @throws Exception If there is an error during the serialization. */ - public abstract String getUWS(final UWS uws, final JobOwner user) throws UWSException; + public abstract String getUWS(final UWS uws, final JobOwner user) throws Exception; /** * Serializes the given jobs list. @@ -188,9 +202,10 @@ public abstract class UWSSerializer implements Serializable { * in a top level serialization (for a jobs list: uws), true otherwise. * * @return The serialization of the given jobs list. - * @throws UWSException If there is an error during the serialization. + * + * @throws Exception If there is an error during the serialization. */ - public String getJobList(final JobList jobsList, final boolean root) throws UWSException{ + public String getJobList(final JobList jobsList, final boolean root) throws Exception{ return getJobList(jobsList, null, root); } @@ -203,10 +218,9 @@ public abstract class UWSSerializer implements Serializable { * in a top level serialization (for a jobs list: uws), true otherwise. * @return The serialization of the given jobs list. * - * - * @throws UWSException If there is an error during the serialization. + * @throws Exception If there is an error during the serialization. */ - public abstract String getJobList(final JobList jobsList, JobOwner owner, final boolean root) throws UWSException; + public abstract String getJobList(final JobList jobsList, JobOwner owner, final boolean root) throws Exception; /** * Serializes the whole given job. @@ -217,9 +231,9 @@ public abstract class UWSSerializer implements Serializable { * * @return The serialization of the given job. * - * @throws UWSException If there is an error during the serialization. + * @throws Exception If there is an error during the serialization. */ - public abstract String getJob(final UWSJob job, final boolean root) throws UWSException; + public abstract String getJob(final UWSJob job, final boolean root) throws Exception; /** * Serializes just a reference on the given job. @@ -229,11 +243,11 @@ public abstract class UWSSerializer implements Serializable { * * @return The serialization of a reference on the given job. * - * @throws UWSException If there is an error during the serialization. + * @throws Exception If there is an error during the serialization. * * @since 3.1 */ - public abstract String getJobRef(final UWSJob job, final UWSUrl jobsListUrl) throws UWSException; + public abstract String getJobRef(final UWSJob job, final UWSUrl jobsListUrl) throws Exception; /** * Serializes the ID of the given job. @@ -244,9 +258,9 @@ public abstract class UWSSerializer implements Serializable { * * @return The serialization of the job ID. * - * @throws UWSException If there is an error during the serialization. + * @throws Exception If there is an error during the serialization. */ - public abstract String getJobID(final UWSJob job, final boolean root) throws UWSException; + public abstract String getJobID(final UWSJob job, final boolean root) throws Exception; /** * Serializes the run ID of the given job. @@ -257,9 +271,9 @@ public abstract class UWSSerializer implements Serializable { * * @return The serialization of the run ID. * - * @throws UWSException If there is an error during the serialization. + * @throws Exception If there is an error during the serialization. */ - public abstract String getRunID(final UWSJob job, final boolean root) throws UWSException; + public abstract String getRunID(final UWSJob job, final boolean root) throws Exception; /** * Serializes the owner ID of the given job. @@ -270,9 +284,9 @@ public abstract class UWSSerializer implements Serializable { * * @return The serialization of the owner ID. * - * @throws UWSException If there is an error during the serialization. + * @throws Exception If there is an error during the serialization. */ - public abstract String getOwnerID(final UWSJob job, final boolean root) throws UWSException; + public abstract String getOwnerID(final UWSJob job, final boolean root) throws Exception; /** * Serializes the phase of the given job. @@ -283,9 +297,9 @@ public abstract class UWSSerializer implements Serializable { * * @return The serialization of the phase. * - * @throws UWSException If there is an error during the serialization. + * @throws Exception If there is an error during the serialization. */ - public abstract String getPhase(final UWSJob job, final boolean root) throws UWSException; + public abstract String getPhase(final UWSJob job, final boolean root) throws Exception; /** * Serializes the quote of the given job. @@ -296,9 +310,9 @@ public abstract class UWSSerializer implements Serializable { * * @return The serialization of the quote. * - * @throws UWSException If there is an error during the serialization. + * @throws Exception If there is an error during the serialization. */ - public abstract String getQuote(final UWSJob job, final boolean root) throws UWSException; + public abstract String getQuote(final UWSJob job, final boolean root) throws Exception; /** * Serializes the start time of the given job. @@ -309,9 +323,9 @@ public abstract class UWSSerializer implements Serializable { * * @return The serialization of the start time. * - * @throws UWSException If there is an error during the serialization. + * @throws Exception If there is an error during the serialization. */ - public abstract String getStartTime(final UWSJob job, final boolean root) throws UWSException; + public abstract String getStartTime(final UWSJob job, final boolean root) throws Exception; /** * Serializes the end time of the given job. @@ -322,9 +336,9 @@ public abstract class UWSSerializer implements Serializable { * * @return The serialization of the end time. * - * @throws UWSException If there is an error during the serialization. + * @throws Exception If there is an error during the serialization. */ - public abstract String getEndTime(final UWSJob job, final boolean root) throws UWSException; + public abstract String getEndTime(final UWSJob job, final boolean root) throws Exception; /** * Serializes the execution duration of the given job. @@ -335,9 +349,9 @@ public abstract class UWSSerializer implements Serializable { * * @return The serialization of the execution duration. * - * @throws UWSException If there is an error during the serialization. + * @throws Exception If there is an error during the serialization. */ - public abstract String getExecutionDuration(final UWSJob job, final boolean root) throws UWSException; + public abstract String getExecutionDuration(final UWSJob job, final boolean root) throws Exception; /** * Serializes the destruction time of the given job. @@ -348,9 +362,9 @@ public abstract class UWSSerializer implements Serializable { * * @return The serialization of the destruction time. * - * @throws UWSException If there is an error during the serialization. + * @throws Exception If there is an error during the serialization. */ - public abstract String getDestructionTime(final UWSJob job, final boolean root) throws UWSException; + public abstract String getDestructionTime(final UWSJob job, final boolean root) throws Exception; /** * Serializes the given error summary. @@ -361,9 +375,9 @@ public abstract class UWSSerializer implements Serializable { * * @return The serialization of the error summary. * - * @throws UWSException If there is an error during the serialization. + * @throws Exception If there is an error during the serialization. */ - public abstract String getErrorSummary(final ErrorSummary error, final boolean root) throws UWSException; + public abstract String getErrorSummary(final ErrorSummary error, final boolean root) throws Exception; /** * Serializes the results of the given job. @@ -374,9 +388,9 @@ public abstract class UWSSerializer implements Serializable { * * @return The serialization of the results. * - * @throws UWSException If there is an error during the serialization. + * @throws Exception If there is an error during the serialization. */ - public abstract String getResults(final UWSJob job, final boolean root) throws UWSException; + public abstract String getResults(final UWSJob job, final boolean root) throws Exception; /** * Serializes the given result. @@ -387,9 +401,9 @@ public abstract class UWSSerializer implements Serializable { * * @return The serialization of the result. * - * @throws UWSException If there is an error during the serialization. + * @throws Exception If there is an error during the serialization. */ - public abstract String getResult(final Result result, final boolean root) throws UWSException; + public abstract String getResult(final Result result, final boolean root) throws Exception; /** * Serializes the parameters of the given job. @@ -400,9 +414,9 @@ public abstract class UWSSerializer implements Serializable { * * @return The serialization of the parameters. * - * @throws UWSException If there is an error during the serialization. + * @throws Exception If there is an error during the serialization. */ - public abstract String getAdditionalParameters(final UWSJob job, final boolean root) throws UWSException; + public abstract String getAdditionalParameters(final UWSJob job, final boolean root) throws Exception; /** * Serializes the specified parameter. @@ -414,7 +428,7 @@ public abstract class UWSSerializer implements Serializable { * * @return The serialization of the parameter. * - * @throws UWSException If there is an error during the serialization. + * @throws Exception If there is an error during the serialization. */ - public abstract String getAdditionalParameter(final String paramName, final Object paramValue, final boolean root) throws UWSException; + public abstract String getAdditionalParameter(final String paramName, final Object paramValue, final boolean root) throws Exception; } diff --git a/src/uws/job/serializer/XMLSerializer.java b/src/uws/job/serializer/XMLSerializer.java index 2107b54a00fde8422799649e6e9a117df078155a..15fe433e8d66777a78a8544273d9e6e9e6172ef5 100644 --- a/src/uws/job/serializer/XMLSerializer.java +++ b/src/uws/job/serializer/XMLSerializer.java @@ -16,29 +16,29 @@ package uws.job.serializer; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.Iterator; -import uws.UWSException; - +import uws.ISO8601Format; import uws.job.ErrorSummary; import uws.job.JobList; import uws.job.Result; import uws.job.UWSJob; import uws.job.user.JobOwner; - import uws.service.UWS; import uws.service.UWSUrl; +import uws.service.request.UploadFile; /** * Lets serializing any UWS resource in XML. * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (02/2015) */ public class XMLSerializer extends UWSSerializer { private static final long serialVersionUID = 1L; @@ -106,10 +106,10 @@ public class XMLSerializer extends UWSSerializer { /** * Gets all UWS namespaces declarations needed for an XML representation of a UWS object. * - * @return The UWS namespaces:
    (i.e. = "xmlns:uws=[...] xmlns:xlink=[...] xmlns:xs=[...] xmlns:xsi=[...]"). + * @return The UWS namespaces:
    (i.e. = "xmlns:uws=[...] xmlns:xlink=[...] xmlns:xs=[...] xmlns:xsi=[...] xsi:schemaLocation=[...]"). */ public String getUWSNamespace(){ - return "xmlns:uws=\"http://www.ivoa.net/xml/UWS/v1.0\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:xs=\"http://www.w3.org/2001/XMLSchema\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\""; + return "xmlns=\"http://www.ivoa.net/xml/UWS/v1.0\" xmlns:xlink=\"http://www.w3.org/1999/xlink\" xmlns:xs=\"http://www.w3.org/2001/XMLSchema\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://www.ivoa.net/xml/UWS/v1.0 http://www.ivoa.net/xml/UWS/v1.0 http://www.w3.org/1999/xlink http://www.w3.org/1999/xlink.xsd http://www.w3.org/2001/XMLSchema http://www.w3.org/2001/XMLSchema.xsd\""; } /** @@ -151,8 +151,8 @@ public class XMLSerializer extends UWSSerializer { for(JobList jobList : uws){ UWSUrl jlUrl = jobList.getUrl(); xml.append("\t\t\n"); } xml.append("\t\n"); @@ -163,13 +163,15 @@ public class XMLSerializer extends UWSSerializer { } @Override - public String getJobList(final JobList jobsList, final JobOwner owner, final boolean root) throws UWSException{ - String name = jobsList.getName(); + public String getJobList(final JobList jobsList, final JobOwner owner, final boolean root){ StringBuffer xml = new StringBuffer(getHeader()); - xml.append(""); UWSUrl jobsListUrl = jobsList.getUrl(); @@ -177,7 +179,7 @@ public class XMLSerializer extends UWSSerializer { while(it.hasNext()) xml.append("\n\t").append(getJobRef(it.next(), jobsListUrl)); - xml.append("\n"); + xml.append("\n"); return xml.toString(); } @@ -188,9 +190,10 @@ public class XMLSerializer extends UWSSerializer { String newLine = "\n\t"; // general information: - xml.append(""); + xml.append(""); xml.append(newLine).append(getJobID(job, false)); - xml.append(newLine).append(getRunID(job, false)); + if (job.getRunId() != null) + xml.append(newLine).append(getRunID(job, false)); xml.append(newLine).append(getOwnerID(job, false)); xml.append(newLine).append(getPhase(job, false)); xml.append(newLine).append(getQuote(job, false)); @@ -212,7 +215,7 @@ public class XMLSerializer extends UWSSerializer { xml.append(newLine).append(getErrorSummary(job.getErrorSummary(), false)); tabPrefix = ""; - return xml.append("\n").toString(); + return xml.append("\n").toString(); } @Override @@ -223,160 +226,199 @@ public class XMLSerializer extends UWSSerializer { url = jobsListUrl.getRequestURL(); } - StringBuffer xml = new StringBuffer(" 0) - xml.append("\" runId=\"").append(escapeXMLAttribute(job.getRunId())); + /* NOTE: NO ATTRIBUTE "runId" IN THE XML SCHEMA! + * if (job.getRunId() != null && job.getRunId().length() > 0) + * xml.append("\" runId=\"").append(escapeXMLAttribute(job.getRunId())); + */ xml.append("\" xlink:href=\""); if (url != null) - xml.append(escapeURL(url)); - xml.append("\">").append(getPhase(job, false)).append(""); + xml.append(escapeXMLAttribute(url)); + xml.append("\">\n\t\t").append(getPhase(job, false)).append("\n\t"); return xml.toString(); } @Override public String getJobID(final UWSJob job, final boolean root){ - return (new StringBuffer(root ? getHeader() : "")).append("").append(escapeXMLData(job.getJobId())).append("").toString(); + return (new StringBuffer(root ? getHeader() : "")).append("").append(escapeXMLData(job.getJobId())).append("").toString(); } @Override public String getRunID(final UWSJob job, final boolean root){ - StringBuffer xml = new StringBuffer(root ? getHeader() : ""); - xml.append(""); - else - xml.append(">").append(escapeXMLData(job.getRunId())).append(""); - return xml.toString(); + if (job.getRunId() != null){ + StringBuffer xml = new StringBuffer(root ? getHeader() : ""); + xml.append("").append(escapeXMLData(job.getRunId())).append(""); + return xml.toString(); + }else + return ""; } @Override public String getOwnerID(final UWSJob job, final boolean root){ StringBuffer xml = new StringBuffer(root ? getHeader() : ""); - xml.append(""); else - xml.append(">").append(escapeXMLData(job.getOwner().getPseudo())).append(""); + xml.append(">").append(escapeXMLData(job.getOwner().getPseudo())).append(""); return xml.toString(); } @Override public String getPhase(final UWSJob job, final boolean root){ - return (new StringBuffer(root ? getHeader() : "")).append("").append(job.getPhase()).append("").toString(); + return (new StringBuffer(root ? getHeader() : "")).append("").append(job.getPhase()).append("").toString(); } @Override public String getQuote(final UWSJob job, final boolean root){ StringBuffer xml = new StringBuffer(root ? getHeader() : ""); - xml.append(""); else - xml.append(">").append(job.getQuote()).append(""); + xml.append(">").append(job.getQuote()).append(""); return xml.toString(); } @Override public String getStartTime(final UWSJob job, final boolean root){ StringBuffer xml = new StringBuffer(root ? getHeader() : ""); - xml.append(""); else - xml.append(">").append(UWSJob.dateFormat.format(job.getStartTime())).append(""); + xml.append(">").append(ISO8601Format.format(job.getStartTime())).append(""); return xml.toString(); } @Override public String getEndTime(final UWSJob job, final boolean root){ StringBuffer xml = new StringBuffer(root ? getHeader() : ""); - xml.append(""); else - xml.append(">").append(UWSJob.dateFormat.format(job.getEndTime())).append(""); + xml.append(">").append(ISO8601Format.format(job.getEndTime())).append(""); return xml.toString(); } @Override public String getDestructionTime(final UWSJob job, final boolean root){ StringBuffer xml = new StringBuffer(root ? getHeader() : ""); - xml.append(""); else - xml.append(">").append(UWSJob.dateFormat.format(job.getDestructionTime())).append(""); + xml.append(">").append(ISO8601Format.format(job.getDestructionTime())).append(""); return xml.toString(); } @Override public String getExecutionDuration(final UWSJob job, final boolean root){ - return (new StringBuffer(root ? getHeader() : "")).append("").append(job.getExecutionDuration()).append("").toString(); + return (new StringBuffer(root ? getHeader() : "")).append("").append(job.getExecutionDuration()).append("").toString(); } @Override public String getErrorSummary(final ErrorSummary error, final boolean root){ - StringBuffer xml = new StringBuffer(root ? getHeader() : ""); - xml.append(tabPrefix).append(""); - xml.append("\n\t").append(tabPrefix).append("").append(escapeXMLData(error.getMessage())).append(""); - xml.append("\n").append(tabPrefix).append(""); + xml.append("\n\t").append(tabPrefix).append("").append(escapeXMLData(error.getMessage())).append(""); + xml.append("\n").append(tabPrefix).append(""); + return xml.toString(); }else - xml.append(" xsi:nil=\"true\" />"); - return xml.toString(); + return ""; } @Override public String getAdditionalParameters(final UWSJob job, final boolean root){ StringBuffer xml = new StringBuffer(root ? getHeader() : ""); - xml.append(tabPrefix).append(""); + xml.append(tabPrefix).append(""); String newLine = "\n\t" + tabPrefix; for(String paramName : job.getAdditionalParameters()) xml.append(newLine).append(getAdditionalParameter(paramName, job.getAdditionalParameterValue(paramName), false)); - xml.append("\n").append(tabPrefix).append(""); + xml.append("\n").append(tabPrefix).append("
    "); return xml.toString(); } @Override public String getAdditionalParameter(final String paramName, final Object paramValue, final boolean root){ if (paramName != null && paramValue != null){ - if (root) - return paramValue.toString(); - else - return (new StringBuffer("").append(escapeXMLData(paramValue.toString())).append("").toString(); - }else + // If ROOT, just the value must be returned: + if (root){ + if (paramValue.getClass().isArray()){ + StringBuffer buf = new StringBuffer(); + for(Object o : (Object[])paramValue){ + if (buf.length() > 0) + buf.append(';'); + buf.append(o.toString()); + } + return buf.toString(); + }else + return paramValue.toString(); + } + // OTHERWISE, return the XML description: + else{ + StringBuffer buf = new StringBuffer(); + // if array (=> multiple occurrences of the parameter), each item must be one individual parameter: + if (paramValue.getClass().isArray()){ + for(Object o : (Object[])paramValue){ + if (buf.length() > 0) + buf.append("\n\t").append(tabPrefix); + buf.append(getAdditionalParameter(paramName, o, root)); + } + } + // otherwise, just return the XML parameter description: + else{ + buf.append("").append(escapeXMLData(paramValue.toString())).append(""); + } + return buf.toString(); + } + } + // If NO VALUE or NO NAME, return an empty string: + else return ""; } @Override public String getResults(final UWSJob job, final boolean root){ StringBuffer xml = new StringBuffer(root ? getHeader() : ""); - xml.append(tabPrefix).append(""); + xml.append(tabPrefix).append(""); Iterator it = job.getResults(); String newLine = "\n\t" + tabPrefix; while(it.hasNext()) xml.append(newLine).append(getResult(it.next(), false)); - xml.append("\n").append(tabPrefix).append(""); + xml.append("\n").append(tabPrefix).append(""); return xml.toString(); } @Override public String getResult(final Result result, final boolean root){ StringBuffer xml = new StringBuffer(root ? getHeader() : ""); - xml.append("= 0) - xml.append(" size=\"").append(result.getSize()).append("\""); + + /* NOTE: THE FOLLOWING ATTRIBUTES MAY PROVIDE USEFUL INFORMATION TO USERS, BUT THEY ARE NOT ALLOWED BY THE CURRENT UWS STANDARD. + * HOWEVER, IF, ONE DAY, THEY ARE, THE FOLLOWING LINES SHOULD BE UNCOMNENTED. + * + * if (result.getMimeType() != null) + * xml.append(" mime=\"").append(escapeXMLAttribute(result.getMimeType())).append("\""); + * if (result.getSize() >= 0) + * xml.append(" size=\"").append(result.getSize()).append("\""); + */ + return xml.append(" />").toString(); } @@ -384,16 +426,31 @@ public class XMLSerializer extends UWSSerializer { /* ESCAPE METHODS */ /* ************** */ /** - *

    Escapes the content of a node (data between the open and the close tags).

    - * - *

    By default: surrounds the given data by "<![CDATA[" and "]]>".

    + * Escapes the content of a node (data between the open and the close tags). * * @param data Data to escape. * * @return Escaped data. */ public static String escapeXMLData(final String data){ - return ""; + StringBuffer encoded = new StringBuffer(); + for(int i = 0; i < data.length(); i++){ + char c = data.charAt(i); + switch(c){ + case '&': + encoded.append("&"); + break; + case '<': + encoded.append("<"); + break; + case '>': + encoded.append(">"); + break; + default: + encoded.append(ensureLegalXml(c)); + } + } + return encoded.toString(); } /** @@ -420,11 +477,8 @@ public class XMLSerializer extends UWSSerializer { case '"': encoded.append("""); break; - case '\'': - encoded.append("'"); - break; default: - encoded.append(c); + encoded.append(ensureLegalXml(c)); } } return encoded.toString(); @@ -448,4 +502,22 @@ public class XMLSerializer extends UWSSerializer { } } + /** + *

    Returns a legal XML character corresponding to an input character. + * Certain characters are simply illegal in XML (regardless of encoding). + * If the input character is legal in XML, it is returned; + * otherwise some other weird but legal character + * (currently the inverted question mark, "\u00BF") is returned instead.

    + * + *

    Note: copy of the STILTS VOSerializer.ensureLegalXml(char) function.

    + * + * @param c input character + * @return legal XML character, c if possible + * + * @since 4.1 + */ + public static char ensureLegalXml(char c){ + return ((c >= '\u0020' && c <= '\uD7FF') || (c >= '\uE000' && c <= '\uFFFD') || ((c) == 0x09 || (c) == 0x0A || (c) == 0x0D)) ? c : '\u00BF'; + } + } diff --git a/src/uws/service/AbstractUWSFactory.java b/src/uws/service/AbstractUWSFactory.java index 04fbe8a5e18c9fbe76fef551e03bab0a00edccee..86d1112601db72eacc434cf3238760b137a34544 100644 --- a/src/uws/service/AbstractUWSFactory.java +++ b/src/uws/service/AbstractUWSFactory.java @@ -16,7 +16,8 @@ package uws.service; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.util.ArrayList; @@ -28,26 +29,26 @@ import java.util.Map; import javax.servlet.http.HttpServletRequest; import uws.UWSException; - import uws.job.ErrorSummary; import uws.job.JobThread; import uws.job.Result; import uws.job.UWSJob; - import uws.job.parameters.DestructionTimeController; +import uws.job.parameters.DestructionTimeController.DateField; import uws.job.parameters.ExecutionDurationController; import uws.job.parameters.InputParamController; import uws.job.parameters.UWSParameters; -import uws.job.parameters.DestructionTimeController.DateField; - import uws.job.user.JobOwner; +import uws.service.file.UWSFileManager; +import uws.service.request.RequestParser; +import uws.service.request.UWSRequestParser; /** *

    Abstract implementation of {@link UWSFactory}. * Only the function which creates a {@link JobThread} from a {@link UWSJob} needs to be implemented.

    * - * @author Grégory Mantelet (CDS) - * @version 06/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (11/2014) */ public abstract class AbstractUWSFactory implements UWSFactory { @@ -101,6 +102,11 @@ public abstract class AbstractUWSFactory implements UWSFactory { return new UWSParameters(req, expectedAdditionalParams, inputParamControllers); } + @Override + public RequestParser createRequestParser(final UWSFileManager fileManager) throws UWSException{ + return new UWSRequestParser(fileManager); + } + /** * Adds the name of an additional parameter which must be identified without taking into account its case * and then stored with the case of the given name. diff --git a/src/uws/service/UWS.java b/src/uws/service/UWS.java index 492b17b668b2f33c1c74eadd8c2fcdec64bf679f..09d5e2456b4ab128bd2b56f1f6ff0ac7f046d658 100644 --- a/src/uws/service/UWS.java +++ b/src/uws/service/UWS.java @@ -16,19 +16,18 @@ package uws.service; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import uws.UWSException; - import uws.job.JobList; - import uws.job.serializer.UWSSerializer; - import uws.service.backup.UWSBackupManager; import uws.service.file.UWSFileManager; - import uws.service.log.UWSLog; +import uws.service.request.RequestParser; +import uws.service.request.UWSRequestParser; /** *

    @@ -59,11 +58,28 @@ import uws.service.log.UWSLog; * the UWS and the servlet. *

    * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + *

    IMPORTANT: + * All implementations of this interface should implement properly the function {@link #destroy()} and should call it + * when the JVM or the HTTP server application is closing. + *

    + * + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (02/2015) */ public interface UWS extends Iterable { + /** Attribute of the HttpServletRequest to set and to get in order to access the request ID set by the UWS library. + * @since 4.1 */ + public static final String REQ_ATTRIBUTE_ID = "UWS_REQUEST_ID"; + + /** Attribute of the HttpServletRequest to set and to get in order to access the parameters extracted by the UWS library (using a RequestParser). + * @since 4.1 */ + public static final String REQ_ATTRIBUTE_PARAMETERS = "UWS_PARAMETERS"; + + /** Attribute of the HttpServletRequest to set and to get in order to access the user at the origin of the HTTP request. + * @since 4.1 */ + public static final String REQ_ATTRIBUTE_USER = "UWS_USER"; + /** * Gets the name of this UWS. * @@ -78,6 +94,28 @@ public interface UWS extends Iterable { */ public String getDescription(); + /* ***************** */ + /* RESOURCES RELEASE */ + /* ***************** */ + + /** + *

    + * End properly this UWS: jobs should be backuped (if this feature is enable), timers and threads should be stopped, + * open files and database connections should be closed, etc... + * In brief, this function should release all used resources. + *

    + * + *

    IMPORTANT: This function should be called only when the JVM or the Web Application Server is stopping.

    + * + *

    Note: + * A call to this function may prevent this instance of {@link UWS} to execute any subsequent HTTP request, or the behavior + * would be unpredictable. + *

    + * + * @since 4.1 + */ + public void destroy(); + /* ******************* */ /* JOB LIST MANAGEMENT */ /* ******************* */ @@ -85,7 +123,7 @@ public interface UWS extends Iterable { /** * Adds a jobs list to this UWS. * - * @param jl The jobs list to add. + * @param newJL The jobs list to add. * * @return true if the jobs list has been successfully added, * false if the given jobs list is null or if a jobs list with this name already exists @@ -110,17 +148,6 @@ public interface UWS extends Iterable { */ public int getNbJobList(); - /* - *

    Removes the specified jobs list.

    - *

    note: After the call of this function, the UWS reference of the given jobs list should be removed (see {@link JobList#setUWS(UWS)}).

    - * - * @param name Name of the jobs list to remove. - * - * @return The removed jobs list - * or null if no jobs list with the given name has been found. - * - public JobList removeJobList(final String name) throws UWSException;*/ - /** *

    Destroys the specified jobs list.

    *

    note: After the call of this function, the UWS reference of the given jobs list should be removed (see {@link JobList#setUWS(UWS)}).

    @@ -183,8 +210,9 @@ public interface UWS extends Iterable { /* ******************* */ /** + * Gets the object which is able to identify a user from an HTTP request. * - * @return + * @return Its user identifier. */ public UserIdentifier getUserIdentifier(); @@ -202,6 +230,22 @@ public interface UWS extends Iterable { */ public UWSFactory getFactory(); + /* ******************** */ + /* HTTP REQUEST PARSING */ + /* ******************** */ + + /** + *

    Get its HTTP request parser.

    + *

    note: This parser is the only one to be able to extract UWS and TAP parameters from any HTTP request. + * Its behavior is adapted in function of the used HTTP method and of the content-type. A default implementation is + * provided by the UWS library: {@link UWSRequestParser}.

    + * + * @return Its request parser. + * + * @since 4.1 + */ + public RequestParser getRequestParser(); + /* *************** */ /* FILE MANAGEMENT */ /* *************** */ diff --git a/src/uws/service/UWSFactory.java b/src/uws/service/UWSFactory.java index 4cf384c1b5b4078879419414dfba1ff35123797a..73507c28a3e0dd48047950199bdb98a155125cc3 100644 --- a/src/uws/service/UWSFactory.java +++ b/src/uws/service/UWSFactory.java @@ -16,7 +16,8 @@ package uws.service; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.util.List; @@ -25,20 +26,20 @@ import java.util.Map; import javax.servlet.http.HttpServletRequest; import uws.UWSException; - import uws.job.ErrorSummary; import uws.job.JobThread; import uws.job.Result; import uws.job.UWSJob; - import uws.job.parameters.UWSParameters; import uws.job.user.JobOwner; +import uws.service.file.UWSFileManager; +import uws.service.request.RequestParser; /** * Let's creating UWS jobs, their threads and extracting their parameters from {@link HttpServletRequest}. * - * @author Grégory Mantelet (CDS) - * @version 06/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (11/2014) * * @see UWS#getFactory() */ @@ -95,7 +96,7 @@ public interface UWSFactory { /** * Lets extracting all parameters from the given request. * - * @param req The request from which parameters must be extracted. + * @param request The request from which parameters must be extracted. * * @return The extracted parameters. * @@ -114,4 +115,18 @@ public interface UWSFactory { */ public UWSParameters createUWSParameters(final Map params) throws UWSException; + /** + * Create a parser of HTTP requests. This object is able to deal with the different formats + * in which parameters are provided in an HTTP request. + * + * @param manager File manager to use if files are uploaded in an HTTP request. + * + * @return The request parser to use. + * + * @throws UWSException If an error occurs while creating the parser. + * + * @since 4.1 + */ + public RequestParser createRequestParser(final UWSFileManager manager) throws UWSException; + } diff --git a/src/uws/service/UWSService.java b/src/uws/service/UWSService.java index b6555c83a963b685ac64a62cb7de12a54d8d389e..2cfac56a7d3734ff07796169c4c7ae3bb68e77f7 100644 --- a/src/uws/service/UWSService.java +++ b/src/uws/service/UWSService.java @@ -16,177 +16,130 @@ package uws.service; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; - import java.net.URL; - import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import java.util.Vector; + +import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; + import uws.AcceptHeader; import uws.UWSException; -import uws.UWSExceptionFactory; -import uws.job.ExecutionPhase; +import uws.UWSToolBox; import uws.job.JobList; -import uws.job.UWSJob; - -import uws.job.manager.DefaultExecutionManager; +import uws.job.JobThread; import uws.job.serializer.JSONSerializer; import uws.job.serializer.UWSSerializer; import uws.job.serializer.XMLSerializer; import uws.job.user.JobOwner; - import uws.service.actions.AddJob; import uws.service.actions.DestroyJob; import uws.service.actions.GetJobParam; import uws.service.actions.JobSummary; import uws.service.actions.ListJobs; import uws.service.actions.SetJobParam; +import uws.service.actions.SetUWSParameter; import uws.service.actions.ShowHomePage; import uws.service.actions.UWSAction; import uws.service.backup.UWSBackupManager; - import uws.service.error.DefaultUWSErrorWriter; import uws.service.error.ServiceErrorWriter; import uws.service.file.UWSFileManager; import uws.service.log.DefaultUWSLog; import uws.service.log.UWSLog; +import uws.service.log.UWSLog.LogLevel; +import uws.service.request.RequestParser; /** - *

    General description

    - * - *

    An abstract facility to implement the Universal Worker Service pattern.

    + *

    This class implements directly the interface {@link UWS} and so, it represents the core of a UWS service.

    * - *

    It can manage several jobs lists (create new, get and remove).

    + *

    Usage

    * - *

    It also interprets {@link HttpServletRequest}, applies the action specified in its given URL and parameters - * (according to the IVOA Proposed Recommendation of 2010-02-10) - * and returns the corresponding response in a {@link HttpServletResponse}.

    - * - *

    The UWS URL interpreter

    - * - *

    Any subclass of {@link UWSService} has one object called the UWS URL interpreter. It is stored in the field {@link #urlInterpreter}. - * It lets interpreting the URL of any received request. Thus you can know on which jobs list, job and/or job attribute(s) - * the request applies.

    - * - *

    This interpreter must be initialized with the base URL/URI of this UWS. By using the default constructor (the one with no parameter), - * the URL interpreter will be built at the first request (see {@link UWSUrl#UWSUrl(HttpServletRequest)}) and so the base URI is - * extracted directly from the request).

    - * - *

    You want to set another base URI or to use a custom URL interpreter, you have to set yourself the interpreter - * by using the method {@link #setUrlInterpreter(UWSUrl)}.

    + *

    + * Using this class is very simple! An instance must be created by providing at a factory - {@link UWSFactory} - and a file manager - {@link UWSFileManager}. + * This creation must be done in the init() function of a {@link HttpServlet}. Then, still in init(), at least one job list must be created. + * Finally, in order to ensure that all requests are interpreted by the UWS service, they must be sent to the created {@link UWSService} in the function + * {@link #executeRequest(HttpServletRequest, HttpServletResponse)}. + *

    + *

    Here is an example of what should look like the servlet class:

    + *
    + * public class MyUWSService extends HttpServlet {
    + * 	private UWS uws;
      * 
    - * 

    Create a job

    + * public void init(ServletConfig config) throws ServletException { + * try{ + * // Create the UWS service: + * uws = new UWSService(new MyUWSFactory(), new LocalUWSFileManager(new File(config.getServletContext().getRealPath("UWSFiles")))); + * // Create at least one job list (otherwise no job can be started): + * uws.addJobList("jobList"); + * }catch(UWSException ue){ + * throw new ServletException("Can not initialize the UWS service!", ue); + * } + * } * - *

    The most important abstract function of this class is {@link UWSService#createJob(Map)}. It allows to create an instance - * of the type of job which is managed by this UWS. The only parameter is a map of a job attributes. It is the same map that - * take the functions {@link UWSJob#UWSJob(Map)} and {@link UWSJob#addOrUpdateParameters(Map)}.

    + * public void destroy(){ + * if (uws != null) + * uws.destroy(); + * } * - *

    There are two convenient implementations of this abstract method in {@link BasicUWS} and {@link ExtendedUWS}. These two implementations - * are based on the Java Reflection.

    + * public void service(final HttpServletRequest request, final HttpServletResponse response) throws ServletException, IOException{ + * try{ + * service.executeRequest(request, response); + * }catch(UWSException ue){ + * response.sendError(ue.getHttpErrorCode(), ue.getMessage()); + * } + * } + * } + *
    * *

    UWS actions

    * - *

    All the actions described in the IVOA recommendation are already managed. Each of these actions are defined in - * an instance of {@link UWSAction}:

    + *

    + * All standard UWS actions are already implemented in this class. However, it is still possible to modify their implementation and/or to + * add or remove some actions. + *

    + *

    + * A UWS action is actually implemented here by a class extending the abstract class {@link UWSAction}. Here is the full list of all + * the available and already implemented actions: + *

    *
      - *
    • {@link UWSAction#LIST_JOBS LIST_JOBS}: see the class {@link ListJobs}
    • - *
    • {@link UWSAction#ADD_JOB ADD_JOB}: see the class {@link AddJob}
    • - *
    • {@link UWSAction#DESTROY_JOB DESTROY_JOB}: see the class {@link DestroyJob}
    • - *
    • {@link UWSAction#JOB_SUMMARY JOB_SUMMARY}: see the class {@link JobSummary}
    • - *
    • {@link UWSAction#GET_JOB_PARAM GET_JOB_PARAM}: see the class {@link GetJobParam}
    • - *
    • {@link UWSAction#SET_JOB_PARAM SET_JOB_PARAM}: see the class {@link SetJobParam}
    • - *
    • {@link UWSAction#HOME_PAGE HOME_PAGE}: see the class {@link ShowHomePage}
    • + *
    • {@link AddJob}
    • + *
    • {@link DestroyJob}
    • + *
    • {@link JobSummary}
    • + *
    • {@link GetJobParam}
    • + *
    • {@link SetJobParam}
    • + *
    • {@link ListJobs}
    • *
    - * - *

    However you can add your own UWS actions ! To do that you just need to implement the abstract class {@link UWSAction} - * and to call the method {@link #addUWSAction(UWSAction)} with an instance of this implementation.

    - * - *

    IMPORTANT: You must be careful when you override the function {@link UWSAction#match(UWSUrl, String, HttpServletRequest)} - * so that your test is as precise as possible ! Indeed the order in which the actions of a UWS are evaluated is very important !
    - * If you want to be sure your custom UWS action is always evaluated before any other UWS action you can use the function - * {@link #addUWSAction(int, UWSAction)} with 0 as first parameter !

    - * - *

    Note: You can also replace an existing UWS action thanks to the method {@link #replaceUWSAction(UWSAction)} or - * {@link #setUWSAction(int, UWSAction)} !

    - * - *

    User identification

    - * - *

    Some UWS actions need to know the current user so that they can adapt their response (i.e. LIST_JOBS must return the jobs of only - * one user: the current one). Thus, before executing a UWS action (and even before choosing the good action in function of the request) - * the function {@link UserIdentifier#extractUserId(UWSUrl, HttpServletRequest)} is called. Its goal - * is to identify the current user in function of the received request.

    - * *

    - * Notes: - *

      - *
    • If this function returns NULL, the UWS actions must be executed on all jobs, whatever is their owner !
    • - *
    • {@link UserIdentifier} is an interface. So you must implement it and then set its extension to this UWS - * by using {@link #setUserIdentifier(UserIdentifier)}.
    • - *
    - *

    + * To add an action, you should use the function {@link #addUWSAction(UWSAction)}, to remove one {@link #removeUWSAction(int)} or {@link #removeUWSAction(String)}. + * Note that this last function takes a String parameter. This parameter is the name of the UWS action to remove. Indeed, each UWS action must have an internal + * name representing the action. Thus, it is possible to replace a UWS action implementation by using the function {@link #replaceUWSAction(UWSAction)} ; this + * function will replace the action having the same name as the given action. *

    * - *

    Queue management

    - * - *

    One of the goals of a UWS is to manage an execution queue for all managed jobs. This task is given to an instance - * of {@link DefaultExecutionManager}, stored in the field {@link #executionManager}. Each time a job is created, - * the UWS sets it the execution manager (see {@link AddJob}). Thus the {@link UWSJob#start()} method will ask to the manager - * whether it can execute now or whether it must be put in a {@link ExecutionPhase#QUEUED QUEUED} phase until enough resources are available for its execution.

    - * - *

    By extending the class {@link DefaultExecutionManager} and by overriding {@link DefaultExecutionManager#isReadyForExecution(UWSJob)} - * you can change the condition which puts a job in the {@link ExecutionPhase#EXECUTING EXECUTING} or in the {@link ExecutionPhase#QUEUED QUEUED} phase. By default, a job is put - * in the {@link ExecutionPhase#QUEUED QUEUED} phase if there are more running jobs than a given number.

    - * - *

    With this manager it is also possible to list all running jobs in addition of all queued jobs, thanks to the methods: - * {@link DefaultExecutionManager#getRunningJobs()}, {@link DefaultExecutionManager#getQueuedJobs()}, {@link DefaultExecutionManager#getNbRunningJobs()} - * and {@link DefaultExecutionManager#getNbQueuedJobs()}.

    - * - *

    Serializers & MIME types

    - * - *

    According to the IVOA recommendation, the XML format is the default format in which each UWS resource must be returned. However it - * is told that other formats can also be managed. To allow that, {@link UWSService} manages a list of {@link UWSSerializer} and - * lets define which is the default one to use. By default, there are two serializers: {@link XMLSerializer} (the default choice) - * and {@link JSONSerializer}.

    - * - *

    One proposed way to choose automatically the format to use is to look at the Accept header of a HTTP-Request. This header field is - * a list of MIME types (each one with a quality - a sort of priority). Thus each {@link UWSSerializer} is associated with a MIME type so - * that {@link UWSService} can choose automatically the preferred format and so, the serializer to use.

    - * - *

    WARNING: Only one {@link UWSSerializer} can be associated with a given MIME type in an {@link UWSService} instance ! - * Thus, if you add a {@link UWSSerializer} to a UWS, and this UWS has already a serializer for the same MIME type, - * it will be replaced by the added one.

    + *

    Home page

    * - *

    Note: A XML document can be linked to a XSLT style-sheet. By using the method {@link XMLSerializer#setXSLTPath(String)} - * you can define the path/URL of the XLST to link to each UWS resource.
    - * Since the {@link XMLSerializer} is the default format for a UWS resource you can also use the function - * {@link UWSService#setXsltURL(String)} !

    - * - *

    The UWS Home page

    - * - *

    As for a job or a jobs list, a UWS is also a UWS resource. That's why it can also be serialized !

    - * - *

    However in some cases it could more interesting to substitute this resource by a home page of the whole UWS by using the function: - * {@link #setHomePage(String)} or {@link #setHomePage(URL, boolean)}. + *

    + * In addition of all the actions listed above, a last action is automatically added: {@link ShowHomePage}. This is the action which will display the home page of + * the UWS service. It is called when the root resource of the web service is asked. To change it, you can either overwrite this action + * (see {@link #replaceUWSAction(UWSAction)}) or set an home page URL with the function {@link #setHomePage(String)} (the parameter is a URI pointing on either + * a local or a remote resource) or {@link #setHomePage(URL, boolean)}. *

    * - *

    Note: To go back to the UWS serialization (that is to say to abort a call to {@link #setHomePage(String)}), - * use the method {@link #setDefaultHomePage()} !

    - * - * - * @author Grégory Mantelet (CDS) - * @version 06/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (04/2015) */ public class UWSService implements UWS { - private static final long serialVersionUID = 1L; /** Name of this UWS. */ protected String name = null; @@ -236,30 +189,41 @@ public class UWSService implements UWS { /** Lets logging info/debug/warnings/errors about this UWS. */ protected UWSLog logger; + /** Lets extract all parameters from an HTTP request, whatever is its content-type. + * @since 4.1*/ + protected final RequestParser requestParser; + /** Lets writing/formatting any exception/throwable in a HttpServletResponse. */ protected ServiceErrorWriter errorWriter; + /** Last generated request ID. If the next generated request ID is equivalent to this one, + * a new one will generate in order to ensure the unicity. + * @since 4.1 */ + protected static String lastRequestID = null; + /* ************ */ /* CONSTRUCTORS */ - /* ************ *//** - *

    Builds a UWS (the base URI will be extracted at the first request directly from the request itself).

    - * - *

    - * By default, this UWS has 2 serialization formats: XML ({@link XMLSerializer}) and JSON ({@link JSONSerializer}). - * All the default actions of a UWS are also already implemented. - * However, you still have to create at least one job list ! - *

    - * - *

    note: since no logger is provided, a default one is set automatically (see {@link DefaultUWSLog}).

    - * - * @param jobFactory Object which lets creating the UWS jobs managed by this UWS and their thread/task. - * @param fileManager Object which lets managing all files managed by this UWS (i.e. log, result, backup, error, ...). - * - * @throws NullPointerException If at least one of the parameters is null. - * - * @see #UWSService(UWSFactory, UWSFileManager, UWSLog) - */ - public UWSService(final UWSFactory jobFactory, final UWSFileManager fileManager){ + /* ************ */ + /** + *

    Builds a UWS (the base URI will be extracted at the first request directly from the request itself).

    + * + *

    + * By default, this UWS has 2 serialization formats: XML ({@link XMLSerializer}) and JSON ({@link JSONSerializer}). + * All the default actions of a UWS are also already implemented. + * However, you still have to create at least one job list ! + *

    + * + *

    note: since no logger is provided, a default one is set automatically (see {@link DefaultUWSLog}).

    + * + * @param jobFactory Object which lets creating the UWS jobs managed by this UWS and their thread/task. + * @param fileManager Object which lets managing all files managed by this UWS (i.e. log, result, backup, error, ...). + * + * @throws NullPointerException If at least one of the parameters is null. + * @throws UWSException If unable to create a request parser using the factory (see {@link UWSFactory#createRequestParser(UWSFileManager)}). + * + * @see #UWSService(UWSFactory, UWSFileManager, UWSLog) + */ + public UWSService(final UWSFactory jobFactory, final UWSFileManager fileManager) throws UWSException{ this(jobFactory, fileManager, (UWSLog)null); } @@ -277,18 +241,22 @@ public class UWSService implements UWS { * @param logger Object which lets printing any message (error, info, debug, warning). * * @throws NullPointerException If at least one of the parameters is null. + * @throws UWSException If unable to create a request parser using the factory (see {@link UWSFactory#createRequestParser(UWSFileManager)}). */ - public UWSService(final UWSFactory jobFactory, final UWSFileManager fileManager, final UWSLog logger){ + public UWSService(final UWSFactory jobFactory, final UWSFileManager fileManager, final UWSLog logger) throws UWSException{ if (jobFactory == null) - throw new NullPointerException("Missing UWS factory ! Can not create a UWSService."); + throw new NullPointerException("Missing UWS factory! Can not create a UWSService."); factory = jobFactory; if (fileManager == null) - throw new NullPointerException("Missing UWS file manager ! Can not create a UWSService."); + throw new NullPointerException("Missing UWS file manager! Can not create a UWSService."); this.fileManager = fileManager; this.logger = (logger == null) ? new DefaultUWSLog(this) : logger; - errorWriter = new DefaultUWSErrorWriter(this); + + requestParser = jobFactory.createRequestParser(fileManager); + + errorWriter = new DefaultUWSErrorWriter(this.logger); // Initialize the list of jobs: mapJobLists = new LinkedHashMap(); @@ -305,6 +273,7 @@ public class UWSService implements UWS { uwsActions.add(new ShowHomePage(this)); uwsActions.add(new ListJobs(this)); uwsActions.add(new AddJob(this)); + uwsActions.add(new SetUWSParameter(this)); uwsActions.add(new DestroyJob(this)); uwsActions.add(new JobSummary(this)); uwsActions.add(new GetJobParam(this)); @@ -345,11 +314,22 @@ public class UWSService implements UWS { // Extract the name of the UWS: try{ + // Set the URL interpreter: urlInterpreter = new UWSUrl(baseURI); + + // ...and the name of this service: name = urlInterpreter.getUWSName(); - getLogger().uwsInitialized(this); - }catch(UWSException ex){ - throw new UWSException(UWSException.BAD_REQUEST, ex, "Invalid base UWS URI (" + baseURI + ") !"); + + // Log the successful initialization: + logger.logUWS(LogLevel.INFO, this, "INIT", "UWS successfully initialized!", null); + + }catch(NullPointerException ex){ + // Log the exception: + // (since the first constructor has already been called successfully, the logger is now NOT NULL): + logger.logUWS(LogLevel.FATAL, null, "INIT", "Invalid base UWS URI: " + baseURI + "! You should check the configuration of the service.", ex); + + // Throw a new UWSException with a more understandable message: + throw new UWSException(UWSException.BAD_REQUEST, ex, "Invalid base UWS URI (" + baseURI + ")!"); } } @@ -362,9 +342,11 @@ public class UWSService implements UWS { * @param fileManager Object which lets managing all files managed by this UWS (i.e. log, result, backup, error, ...). * @param urlInterpreter The UWS URL interpreter to use in this UWS. * + * @throws UWSException If unable to create a request parser using the factory (see {@link UWSFactory#createRequestParser(UWSFileManager)}). + * * @see #UWSService(UWSFactory, UWSFileManager, UWSLog, UWSUrl) */ - public UWSService(final UWSFactory jobFactory, final UWSFileManager fileManager, final UWSUrl urlInterpreter){ + public UWSService(final UWSFactory jobFactory, final UWSFileManager fileManager, final UWSUrl urlInterpreter) throws UWSException{ this(jobFactory, fileManager, null, urlInterpreter); } @@ -375,17 +357,49 @@ public class UWSService implements UWS { * @param fileManager Object which lets managing all files managed by this UWS (i.e. log, result, backup, error, ...). * @param logger Object which lets printing any message (error, info, debug, warning). * @param urlInterpreter The UWS URL interpreter to use in this UWS. + * + * @throws UWSException If unable to create a request parser using the factory (see {@link UWSFactory#createRequestParser(UWSFileManager)}). */ - public UWSService(final UWSFactory jobFactory, final UWSFileManager fileManager, final UWSLog logger, final UWSUrl urlInterpreter){ + public UWSService(final UWSFactory jobFactory, final UWSFileManager fileManager, final UWSLog logger, final UWSUrl urlInterpreter) throws UWSException{ this(jobFactory, fileManager, logger); setUrlInterpreter(urlInterpreter); if (this.urlInterpreter != null) - getLogger().uwsInitialized(this); + logger.logUWS(LogLevel.INFO, this, "INIT", "UWS successfully initialized.", null); + } + + @Override + public void destroy(){ + // Backup all jobs: + /* Jobs are backuped now so that running jobs are set back to the PENDING phase in the backup. + * Indeed, the "stopAll" operation of the ExecutionManager may fail and would set the phase to ERROR + * for the wrong reason. */ + if (backupManager != null){ + // save all jobs: + backupManager.setEnabled(true); + backupManager.saveAll(); + // stop the automatic backup, if there is one: + backupManager.setEnabled(false); + } + + // Stop all jobs and stop watching for the jobs' destruction: + for(JobList jl : mapJobLists.values()){ + jl.getExecutionManager().stopAll(); + jl.getDestructionManager().stop(); + } + + // Just in case that previous clean "stop"s did not work, try again an interruption for all running threads: + /* note: timers are not part of this ThreadGroup and so, they won't be affected by this function call. */ + JobThread.tg.interrupt(); + + // Log the service is stopped: + if (logger != null) + logger.logUWS(LogLevel.INFO, this, "STOP", "UWS Service \"" + getName() + "\" stopped!", null); } /* ************** */ /* LOG MANAGEMENT */ /* ************** */ + @Override public UWSLog getLogger(){ return logger; } @@ -414,6 +428,7 @@ public class UWSService implements UWS { /* ***************** */ /* GETTERS & SETTERS */ /* ***************** */ + @Override public final String getName(){ return name; } @@ -427,6 +442,7 @@ public class UWSService implements UWS { this.name = name; } + @Override public final String getDescription(){ return description; } @@ -451,6 +467,7 @@ public class UWSService implements UWS { return (urlInterpreter == null) ? null : urlInterpreter.getBaseURI(); } + @Override public final UWSUrl getUrlInterpreter(){ return urlInterpreter; } @@ -464,6 +481,8 @@ public class UWSService implements UWS { this.urlInterpreter = urlInterpreter; if (name == null && urlInterpreter != null) name = urlInterpreter.getUWSName(); + if (this.urlInterpreter != null) + this.urlInterpreter.setUwsURI(null); } /** @@ -472,6 +491,7 @@ public class UWSService implements UWS { * * @return The used UserIdentifier (MAY BE NULL). */ + @Override public final UserIdentifier getUserIdentifier(){ return userIdentifier; } @@ -485,14 +505,17 @@ public class UWSService implements UWS { userIdentifier = identifier; } + @Override public final UWSFactory getFactory(){ return factory; } + @Override public final UWSFileManager getFileManager(){ return fileManager; } + @Override public final UWSBackupManager getBackupManager(){ return backupManager; } @@ -500,7 +523,7 @@ public class UWSService implements UWS { /** *

    * Sets its backup manager. - * This manager will be called at each user action to save only its own jobs list by calling {@link UWSBackupManager#saveOwner(String)}. + * This manager will be called at each user action to save only its own jobs list by calling {@link UWSBackupManager#saveOwner(JobOwner)}. *

    * * @param backupManager Its new backup manager. @@ -509,6 +532,11 @@ public class UWSService implements UWS { this.backupManager = backupManager; } + @Override + public final RequestParser getRequestParser(){ + return requestParser; + } + /* ******************** */ /* HOME PAGE MANAGEMENT */ /* ******************** */ @@ -596,7 +624,7 @@ public class UWSService implements UWS { if (serializers.containsKey(mimeType)) defaultSerializer = mimeType; else - throw UWSExceptionFactory.missingSerializer(mimeType, "Impossible to set the default serializer."); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Missing UWS serializer for the MIME types: " + mimeType + "! The default serializer won't be set."); } /** @@ -646,6 +674,7 @@ public class UWSService implements UWS { return serializers.values().iterator(); } + @Override public final UWSSerializer getSerializer(String mimeTypes) throws UWSException{ choosenSerializer = null; @@ -725,14 +754,17 @@ public class UWSService implements UWS { * * @see java.lang.Iterable#iterator() */ + @Override public final Iterator iterator(){ return mapJobLists.values().iterator(); } + @Override public final JobList getJobList(String name){ return mapJobLists.get(name); } + @Override public final int getNbJobList(){ return mapJobLists.size(); } @@ -746,9 +778,10 @@ public class UWSService implements UWS { * false if the given jobs list is null or if a jobs list with this name already exists * or if a UWS is already associated with another UWS. * - * @see JobList#setUWS(AbstractUWS) + * @see JobList#setUWS(UWS) * @see UWS#addJobList(JobList) */ + @Override public final boolean addJobList(JobList jl){ if (jl == null) return false; @@ -759,44 +792,14 @@ public class UWSService implements UWS { jl.setUWS(this); mapJobLists.put(jl.getName(), jl); }catch(IllegalStateException ise){ - logger.error("The jobs list \"" + jl.getName() + "\" can not be added into the UWS " + getName() + " !", ise); + logger.logUWS(LogLevel.ERROR, jl, "ADD_JOB_LIST", "The jobs list \"" + jl.getName() + "\" can not be added into the UWS " + getName() + ": it may already be associated with one!", ise); return false; } return true; } - /*public final JobList removeJobList(String name){ - JobList jl = mapJobLists.get(name); - if (jl != null){ - if (removeJobList(jl)) - return jl; - } - return null; - }*/ - - /* - * Removes the given jobs list from this UWS. - * - * @param jl The jobs list to remove. - * - * @return true if the jobs list has been successfully removed, false otherwise. - * - * @see JobList#removeAll() - * @see JobList#setUWS(UWSService) - * - public boolean removeJobList(JobList jl){ - if (jl == null) - return false; - - jl = mapJobLists.remove(jl.getName()); - if (jl != null){ - jl.removeAll(); - jl.setUWS(null); - } - return jl != null; - }*/ - + @Override public final boolean destroyJobList(String name){ return destroyJobList(mapJobLists.get(name)); } @@ -809,7 +812,7 @@ public class UWSService implements UWS { * @return true if the given jobs list has been destroyed, false otherwise. * * @see JobList#clear() - * @see JobList#setUWS(UWSService) + * @see JobList#setUWS(UWS) */ public boolean destroyJobList(JobList jl){ if (jl == null) @@ -821,23 +824,12 @@ public class UWSService implements UWS { jl.clear(); jl.setUWS(null); }catch(IllegalStateException ise){ - getLogger().warning("Impossible to erase completely the association between the jobs list \"" + jl.getName() + "\" and the UWS \"" + getName() + "\", because: " + ise.getMessage()); + logger.logUWS(LogLevel.WARNING, jl, "DESTROY_JOB_LIST", "Impossible to erase completely the association between the jobs list \"" + jl.getName() + "\" and the UWS \"" + getName() + "\"!", ise); } } return jl != null; } - /* - * Removes all managed jobs lists. - * - * @see #removeJobList(String) - * - public final void removeAllJobLists(){ - ArrayList jlNames = new ArrayList(mapJobLists.keySet()); - for(String jlName : jlNames) - removeJobList(jlName); - }*/ - /** * Destroys all managed jobs lists. * @@ -856,7 +848,7 @@ public class UWSService implements UWS { *

    Lets adding the given action to this UWS.

    * *

    WARNING: The action will be added at the end of the actions list of this UWS. That means, it will be evaluated (call of - * the method {@link UWSAction#match(UWSUrl, String, HttpServletRequest)}) lastly !

    + * the method {@link UWSAction#match(UWSUrl, JobOwner, HttpServletRequest)}) lastly !

    * * @param action The UWS action to add. * @@ -1000,6 +992,27 @@ public class UWSService implements UWS { /* ********************** */ /* UWS MANAGEMENT METHODS */ /* ********************** */ + + /** + *

    Generate a unique ID for the given request.

    + * + *

    By default, a timestamp is returned.

    + * + * @param request Request whose an ID is asked. + * + * @return The ID of the given request. + * + * @since 4.1 + */ + protected synchronized String generateRequestID(final HttpServletRequest request){ + String id; + do{ + id = System.currentTimeMillis() + ""; + }while(lastRequestID != null && lastRequestID.startsWith(id)); + lastRequestID = id; + return id; + } + /** *

    Executes the given request according to the IVOA Proposed Recommendation of 2010-02-10. * The result is returned in the given response.

    @@ -1009,7 +1022,7 @@ public class UWSService implements UWS { *
  • Load the request in the UWS URL interpreter (see {@link UWSUrl#load(HttpServletRequest)})
  • *
  • Extract the user ID (see {@link UserIdentifier#extractUserId(UWSUrl, HttpServletRequest)})
  • *
  • Iterate - in order - on all available actions and apply the first which matches. - * (see {@link UWSAction#match(UWSUrl, String, HttpServletRequest)} and {@link UWSAction#apply(UWSUrl, String, HttpServletRequest, HttpServletResponse)})
  • + * (see {@link UWSAction#match(UWSUrl, JobOwner, HttpServletRequest)} and {@link UWSAction#apply(UWSUrl, JobOwner, HttpServletRequest, HttpServletResponse)}) * * * @param request The UWS request. @@ -1023,27 +1036,52 @@ public class UWSService implements UWS { * @see UWSUrl#UWSUrl(HttpServletRequest) * @see UWSUrl#load(HttpServletRequest) * @see UserIdentifier#extractUserId(UWSUrl, HttpServletRequest) - * @see UWSAction#match(UWSUrl, String, HttpServletRequest) - * @see UWSAction#apply(UWSUrl, String, HttpServletRequest, HttpServletResponse) + * @see UWSAction#match(UWSUrl, JobOwner, HttpServletRequest) + * @see UWSAction#apply(UWSUrl, JobOwner, HttpServletRequest, HttpServletResponse) */ public boolean executeRequest(HttpServletRequest request, HttpServletResponse response) throws UWSException, IOException{ if (request == null || response == null) return false; + // Generate a unique ID for this request execution (for log purpose only): + final String reqID = generateRequestID(request); + if (request.getAttribute(UWS.REQ_ATTRIBUTE_ID) == null) + request.setAttribute(UWS.REQ_ATTRIBUTE_ID, reqID); + + // Extract all parameters: + if (request.getAttribute(UWS.REQ_ATTRIBUTE_PARAMETERS) == null){ + try{ + request.setAttribute(UWS.REQ_ATTRIBUTE_PARAMETERS, requestParser.parse(request)); + }catch(UWSException ue){ + logger.log(LogLevel.ERROR, "REQUEST_PARSER", "Can not extract the HTTP request parameters!", ue); + } + } + + // Log the reception of the request: + logger.logHttp(LogLevel.INFO, request, reqID, null, null); + boolean actionApplied = false; UWSAction action = null; JobOwner user = null; try{ - // Update the UWS URL interpreter: - if (urlInterpreter == null){ + if (this.urlInterpreter == null){ + // Initialize the URL interpreter if not already done: setUrlInterpreter(new UWSUrl(request)); - getLogger().uwsInitialized(this); + + // Log the successful initialization: + logger.logUWS(LogLevel.INFO, this, "INIT", "UWS successfully initialized.", null); } + + // Update the UWS URL interpreter: + UWSUrl urlInterpreter = new UWSUrl(this.urlInterpreter); urlInterpreter.load(request); // Identify the user: - user = (userIdentifier == null) ? null : userIdentifier.extractUserId(urlInterpreter, request); + user = UWSToolBox.getUser(request, userIdentifier); + + // Set the character encoding: + response.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); // Apply the appropriate UWS action: for(int i = 0; action == null && i < uwsActions.size(); i++){ @@ -1056,19 +1094,64 @@ public class UWSService implements UWS { // If no corresponding action has been found, throw an error: if (action == null) - throw new UWSException(UWSException.NOT_IMPLEMENTED, "Unknown UWS action ! This HTTP request can not be interpreted by this UWS service !"); + throw new UWSException(UWSException.NOT_IMPLEMENTED, "Unknown UWS action!"); response.flushBuffer(); - logger.httpRequest(request, user, (action != null) ? action.getName() : null, HttpServletResponse.SC_OK, "[OK]", null); + + // Log the successful execution of the action: + logger.logHttp(LogLevel.INFO, response, reqID, user, "UWS action \"" + ((action != null) ? action.getName() : null) + "\" successfully executed.", null); + + }catch(IOException ioe){ + /* + * Any IOException thrown while writing the HTTP response is generally caused by a client abortion (intentional or timeout) + * or by a connection closed with the client for another reason. + * Consequently, a such error should not be considered as a real error from the server or the library: the request is + * canceled, and so the response is not expected. It is anyway not possible any more to send it (header and/or body) totally + * or partially. + * Nothing can solve this error. So the "error" is just reported as a simple information and theoretically the action + * executed when this error has been thrown is already stopped. + */ + logger.logHttp(LogLevel.INFO, response, reqID, user, "HTTP request aborted or connection with the client closed => the UWS action \"" + action.getName() + "\" has stopped and the body of the HTTP response can not have been partially or completely written!", null); }catch(UWSException ex){ + /* + * Any known/"expected" UWS exception is logged but also returned to the HTTP client in an error document. + * Since the error is known, it is supposed to have already been logged with a full stack trace. Thus, there + * is no need to log again its stack trace...just its message is logged. + * Besides, this error may also be just a redirection and not a true error. In such case, the error message + * is not logged. + */ + // If redirection, flag the action as executed with success: if (ex.getHttpErrorCode() == UWSException.SEE_OTHER) actionApplied = true; - sendError(ex, request, user, (action != null) ? action.getName() : null, response); - }catch(Exception ex){ - sendError(ex, request, user, (action != null) ? action.getName() : null, response); + sendError(ex, request, reqID, user, ((action != null) ? action.getName() : null), response); + + }catch(IllegalStateException ise){ + /* + * Any IllegalStateException that reaches this point, is supposed coming from a HttpServletResponse operation which + * has to reset the response buffer (e.g. resetBuffer(), sendRedirect(), sendError()). + * If this exception happens, the library tried to rewrite the HTTP response body with a message or a result, + * while this body has already been partially sent to the client. It is then no longer possible to change its content. + * Consequently, the error is logged as FATAL and a message will be appended at the end of the already submitted response + * to alert the HTTP client that an error occurs and the response should not be considered as complete and reliable. + */ + // Write the error in the response and return the appropriate HTTP status code: + errorWriter.writeError(ise, response, request, reqID, user, ((action != null) ? action.getName() : null)); + // Log the error: + getLogger().logHttp(LogLevel.FATAL, response, reqID, user, "HTTP response already partially committed => the UWS action \"" + action.getName() + "\" has stopped and the body of the HTTP response can not have been partially or completely written!", (ise.getCause() != null) ? ise.getCause() : ise); + + }catch(Throwable t){ + /* + * Any other error is considered as unexpected if it reaches this point. Consequently, it has not yet been logged. + * So its stack trace will be fully logged, and an appropriate message will be returned to the HTTP client. The + * returned document should contain not too technical information which would be useless for the user. + */ + sendError(t, request, reqID, user, ((action != null) ? action.getName() : null), response); + }finally{ executedAction = action; + // Free resources about uploaded files ; only unused files will be deleted: + UWSToolBox.deleteUploads(request); } return actionApplied; @@ -1089,9 +1172,9 @@ public class UWSService implements UWS { public void redirect(String url, HttpServletRequest request, JobOwner user, String uwsAction, HttpServletResponse response) throws IOException, UWSException{ response.setStatus(HttpServletResponse.SC_SEE_OTHER); response.setContentType(request.getContentType()); + response.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); response.setHeader("Location", url); response.flushBuffer(); - logger.httpRequest(request, user, uwsAction, HttpServletResponse.SC_SEE_OTHER, "[Redirection toward " + url + "]", null); } /** @@ -1103,26 +1186,25 @@ public class UWSService implements UWS { * * @param error The error to send/display. * @param request The request which has caused the given error (not used by default). + * @param reqID ID of the request. * @param user The user which executes the given request. * @param uwsAction The UWS action corresponding to the given request. * @param response The response in which the error must be published. * * @throws IOException If there is an error when calling {@link #redirect(String, HttpServletRequest, JobOwner, String, HttpServletResponse)} or {@link HttpServletResponse#sendError(int, String)}. - * @throws UWSException If there is an error when calling {@link #redirect(String, HttpServletRequest, JobOwner, String, HttpServletResponse))}. + * @throws UWSException If there is an error when calling {@link #redirect(String, HttpServletRequest, JobOwner, String, HttpServletResponse)}. * * @see #redirect(String, HttpServletRequest, JobOwner, String, HttpServletResponse) - * @see {@link ServiceErrorWriter#writeError(Throwable, HttpServletResponse, HttpServletRequest, JobOwner, String)} + * @see #sendError(Throwable, HttpServletRequest, String, JobOwner, String, HttpServletResponse) */ - public final void sendError(UWSException error, HttpServletRequest request, JobOwner user, String uwsAction, HttpServletResponse response) throws IOException, UWSException{ - if (error.getHttpErrorCode() == UWSException.SEE_OTHER) + public final void sendError(UWSException error, HttpServletRequest request, String reqID, JobOwner user, String uwsAction, HttpServletResponse response) throws IOException, UWSException{ + if (error.getHttpErrorCode() == UWSException.SEE_OTHER){ + // Log the redirection, if any: + logger.logHttp(LogLevel.INFO, response, reqID, user, "HTTP " + UWSException.SEE_OTHER + " [Redirection toward " + error.getMessage() + "] - Action \"" + uwsAction + "\" successfully executed.", null); + // Apply the redirection: redirect(error.getMessage(), request, user, uwsAction, response); - else{ - errorWriter.writeError(error, response, request, user, uwsAction); - /*if (error.getHttpErrorCode() == UWSException.INTERNAL_SERVER_ERROR) - logger.error(error); - response.sendError(error.getHttpErrorCode(), error.getMessage()); - logger.httpRequest(request, user, uwsAction, error.getHttpErrorCode(), error.getMessage(), error);*/ - } + }else + sendError((Throwable)error, request, reqID, user, uwsAction, response); } /** @@ -1136,20 +1218,23 @@ public class UWSService implements UWS { * * @param error The error to send/display. * @param request The request which has caused the given error (not used by default). + * @param reqID ID of the request. * @param user The user which executes the given request. - * @param uwsAction The UWS action corresponding to the given request. + * @param uwsAction The UWS action corresponding to the given request. * @param response The response in which the error must be published. * * @throws IOException If there is an error when calling {@link HttpServletResponse#sendError(int, String)}. - * @throws UWSException * - * @see {@link ServiceErrorWriter#writeError(Throwable, HttpServletResponse, HttpServletRequest, JobOwner, String)} + * @see ServiceErrorWriter#writeError(Throwable, HttpServletResponse, HttpServletRequest, String, JobOwner, String) */ - public final void sendError(Exception error, HttpServletRequest request, JobOwner user, String uwsAction, HttpServletResponse response) throws IOException, UWSException{ - errorWriter.writeError(error, response, request, user, uwsAction); - /*logger.error(error); - response.sendError(UWSException.INTERNAL_SERVER_ERROR, error.getMessage()); - logger.httpRequest(request, user, uwsAction, UWSException.INTERNAL_SERVER_ERROR, error.getMessage(), error);*/ + public final void sendError(Throwable error, HttpServletRequest request, String reqID, JobOwner user, String uwsAction, HttpServletResponse response) throws IOException{ + // Write the error in the response and return the appropriate HTTP status code: + errorWriter.writeError(error, response, request, reqID, user, uwsAction); + // Log the error: + if (error instanceof UWSException) + logger.logHttp(LogLevel.ERROR, response, reqID, user, "UWS action \"" + uwsAction + "\" FAILED with the error: \"" + error.getMessage() + "\"!", null); + else + logger.logHttp(LogLevel.FATAL, response, reqID, user, "UWS action \"" + uwsAction + "\" execution FAILED with a GRAVE error!", error); } } diff --git a/src/uws/service/UWSServlet.java b/src/uws/service/UWSServlet.java index bacb48034a2d9288e97467f24ed830c0407157e4..52c4201b71ce9c99e0a5c2365917c03242130055 100644 --- a/src/uws/service/UWSServlet.java +++ b/src/uws/service/UWSServlet.java @@ -16,14 +16,14 @@ package uws.service; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.PrintWriter; - import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; @@ -31,52 +31,44 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.lang.IllegalStateException; - import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; - import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; -import org.apache.catalina.connector.ClientAbortException; - import uws.AcceptHeader; import uws.UWSException; import uws.UWSExceptionFactory; import uws.UWSToolBox; - import uws.job.ErrorSummary; import uws.job.JobList; +import uws.job.JobThread; import uws.job.Result; import uws.job.UWSJob; - import uws.job.parameters.DestructionTimeController; +import uws.job.parameters.DestructionTimeController.DateField; import uws.job.parameters.ExecutionDurationController; import uws.job.parameters.InputParamController; import uws.job.parameters.UWSParameters; -import uws.job.parameters.DestructionTimeController.DateField; - import uws.job.serializer.JSONSerializer; import uws.job.serializer.UWSSerializer; import uws.job.serializer.XMLSerializer; - import uws.job.user.JobOwner; - import uws.service.actions.UWSAction; - import uws.service.backup.UWSBackupManager; - import uws.service.error.DefaultUWSErrorWriter; import uws.service.error.ServiceErrorWriter; import uws.service.file.LocalUWSFileManager; import uws.service.file.UWSFileManager; - import uws.service.log.DefaultUWSLog; import uws.service.log.UWSLog; +import uws.service.log.UWSLog.LogLevel; +import uws.service.request.RequestParser; +import uws.service.request.UWSRequestParser; +import uws.service.request.UploadFile; /** *

    @@ -88,7 +80,7 @@ import uws.service.log.UWSLog; *

    UWS Definition

    *

    * To create a such servlet, you have to extend this class. Once done, only two functions must be - * implemented: {@link #createJob(Map, JobOwner)} and {@link #initUWS()}. + * implemented: {@link #createJob(HttpServletRequest, JobOwner)} and {@link #initUWS()}. *

    *

    * The first one will be called by the library each time a job must be created. All the job parameters @@ -101,12 +93,37 @@ import uws.service.log.UWSLog; * * addJobList(new JobList<MyJob>("jlName")); * + *

    The below code show an example of usage of this class:

    + *
    + * public class MyUWSServlet extends UWSServlet {
    + * 
    + * 	// Initialize the UWS service by creating at least one job list.
    + * 	public void initUWS() throws UWSException {
    + * 		addJobList(new JobList("jobList"));
    + * 	}
    + * 
    + * 	// Create the job process corresponding to the job to execute ; generally, the process identification can be merely done by checking the job list name. 
    + * 	public JobThread createJobThread(UWSJob job) throws UWSException {
    + * 		if (job.getJobList().getName().equals("jobList"))
    + * 			return new MyJobThread(job);
    + * 		else
    + * 			throw new UWSException("Impossible to create a job inside the jobs list \"" + job.getJobList().getName() + "\" !");
    + * 	}
    + * }
    + * 
    *

    * The name and the description of the UWS may be specified in the web.xml file as init-param of the servlet: * name and description. The other way is to directly set the corresponding * attributes: {@link #name} and {@link #description}. *

    * + *

    Note: + * If any error occurs while the initialization or the creation of a {@link UWSServlet} instance, a {@link ServletException} + * will be thrown with a basic message dedicated to the service users. This basic and non-informative message is + * obviously not intended to the administrator which will be able to get the reason of the failure + * (with a stack trace when available) in the log files. + *

    + * *

    UWS customization

    *

    * As for the classic HTTP servlets, this servlet has one method for each method of the implemented protocol. @@ -114,12 +131,12 @@ import uws.service.log.UWSLog; * These functions are: *

    *
      - *
    • {@link #doAddJob(HttpServletRequest, HttpServletResponse, JobOwner)}
    • - *
    • {@link #doDestroyJob(HttpServletRequest, HttpServletResponse, JobOwner)}
    • - *
    • {@link #doGetJobParam(HttpServletRequest, HttpServletResponse, JobOwner)}
    • - *
    • {@link #doJobSummary(HttpServletRequest, HttpServletResponse, JobOwner)}
    • - *
    • {@link #doListJob(HttpServletRequest, HttpServletResponse, JobOwner)}
    • - *
    • {@link #doSetJobParam(HttpServletRequest, HttpServletResponse, JobOwner)}
    • + *
    • {@link #doAddJob(UWSUrl, HttpServletRequest, HttpServletResponse, JobOwner)}
    • + *
    • {@link #doDestroyJob(UWSUrl, HttpServletRequest, HttpServletResponse, JobOwner)}
    • + *
    • {@link #doGetJobParam(UWSUrl, HttpServletRequest, HttpServletResponse, JobOwner)}
    • + *
    • {@link #doJobSummary(UWSUrl, HttpServletRequest, HttpServletResponse, JobOwner)}
    • + *
    • {@link #doListJob(UWSUrl, HttpServletRequest, HttpServletResponse, JobOwner)}
    • + *
    • {@link #doSetJobParam(UWSUrl, HttpServletRequest, HttpServletResponse, JobOwner)}
    • *
    *

    * They are all already implemented following their definition in the IVOA document. However, @@ -131,8 +148,8 @@ import uws.service.log.UWSLog; * So, they can be overridden as in any HTTP servlet. *

    * - * @author Grégory Mantelet (CDS) - * @version 06/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (04/2015) */ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory { private static final long serialVersionUID = 1L; @@ -173,6 +190,10 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory /** Lets logging info/debug/warnings/errors about this UWS. */ protected UWSLog logger; + /** Lets extract all parameters from an HTTP request, whatever is its content-type. + * @since 4.1*/ + protected RequestParser requestParser; + /** Lets writing/formatting any exception/throwable in a HttpServletResponse. */ protected ServiceErrorWriter errorWriter; @@ -183,6 +204,8 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory @Override public final void init() throws ServletException{ + final String INIT_ERROR_MSG = "UWS initialization ERROR! Contact the administrator of the service to figure out the failure."; + // Set the general information about this UWS: name = getServletConfig().getInitParameter("name"); description = getServletConfig().getInitParameter("description"); @@ -190,15 +213,26 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory // Set the file manager to use: try{ fileManager = createFileManager(); - if (fileManager == null) - throw new ServletException("Missing file manager ! The function createFileManager() MUST return a valid instanceof UWSFileManager !"); + if (fileManager == null){ + logger.logUWS(LogLevel.FATAL, null, "INIT", "Missing file manager! The function createFileManager() MUST return a valid instanceof UWSFileManager!", null); + throw new ServletException(INIT_ERROR_MSG); + } }catch(UWSException ue){ - throw new ServletException("Error while setting the file manager.", ue); + logger.logUWS(LogLevel.FATAL, null, "INIT", "Can't create a file manager!", ue); + throw new ServletException(INIT_ERROR_MSG, ue); } // Set the logger: logger = new DefaultUWSLog(this); - errorWriter = new DefaultUWSErrorWriter(this); + errorWriter = new DefaultUWSErrorWriter(logger); + + // Set the request parser: + try{ + requestParser = createRequestParser(fileManager); + }catch(UWSException ue){ + logger.logUWS(LogLevel.FATAL, null, "INIT", "Can't create a request parser!", ue); + throw new ServletException(INIT_ERROR_MSG, ue); + } // Initialize the list of jobs: mapJobLists = new LinkedHashMap(); @@ -209,16 +243,52 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory addSerializer(new JSONSerializer()); try{ + // Initialize the service: initUWS(); - logger.uwsInitialized(this); + + // Log the successful initialization: + logger.logUWS(LogLevel.INFO, this, "INIT", "UWS successfully initialized.", null); + }catch(UWSException ue){ - logger.error("UWS initialization impossible !", ue); - throw new ServletException("Error while initializing UWS ! See the log for more details."); + logger.logUWS(LogLevel.FATAL, null, "INIT", "Can't execute the custom initialization of this UWS service (UWSServlet.initUWS())!", ue); + throw new ServletException(INIT_ERROR_MSG); } } public abstract void initUWS() throws UWSException; + @Override + public void destroy(){ + // Backup all jobs: + /* Jobs are backuped now so that running jobs are set back to the PENDING phase in the backup. + * Indeed, the "stopAll" operation of the ExecutionManager may fail and would set the phase to ERROR + * for the wrong reason. */ + if (backupManager != null){ + // save all jobs: + backupManager.setEnabled(true); + backupManager.saveAll(); + // stop the automatic backup, if there is one: + backupManager.setEnabled(false); + } + + // Stop all jobs and stop watching for the jobs' destruction: + for(JobList jl : mapJobLists.values()){ + jl.getExecutionManager().stopAll(); + jl.getDestructionManager().stop(); + } + + // Just in case that previous clean "stop"s did not work, try again an interruption for all running threads: + /* note: timers are not part of this ThreadGroup and so, they won't be affected by this function call. */ + JobThread.tg.interrupt(); + + // Log the service is stopped: + if (logger != null) + logger.logUWS(LogLevel.INFO, this, "STOP", "UWS Service \"" + getName() + "\" stopped!", null); + + // Default destroy function: + super.destroy(); + } + public UWSFileManager createFileManager() throws UWSException{ UWSFileManager fm = null; String rootPath = getServletConfig().getInitParameter("rootDirectory"); @@ -236,16 +306,46 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory return fileManager; } + @Override + public RequestParser getRequestParser(){ + return requestParser; + } + @Override public final void service(ServletRequest req, ServletResponse resp) throws ServletException, IOException{ super.service(req, resp); } + protected static String lastRequestID = null; + + protected synchronized String generateRequestID(final HttpServletRequest request){ + String id; + do{ + id = System.currentTimeMillis() + ""; + }while(lastRequestID != null && lastRequestID.startsWith(id)); + lastRequestID = id; + return id; + } + @Override protected final void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException{ String uwsAction = null; JobOwner user = null; + // Generate a unique ID for this request execution (for log purpose only): + final String reqID = generateRequestID(req); + req.setAttribute(UWS.REQ_ATTRIBUTE_ID, reqID); + + // Extract all parameters: + try{ + req.setAttribute(UWS.REQ_ATTRIBUTE_PARAMETERS, requestParser.parse(req)); + }catch(UWSException ue){ + logger.log(LogLevel.WARNING, "REQUEST_PARSER", "Can not extract the HTTP request parameters!", ue); + } + + // Log the reception of the request: + logger.logHttp(LogLevel.INFO, req, reqID, null, null); + try{ String method = req.getMethod(); @@ -258,7 +358,10 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory requestUrl.load(req); // Identify the user: - user = (userIdentifier == null) ? null : userIdentifier.extractUserId(requestUrl, req); + user = UWSToolBox.getUser(req, userIdentifier); + + // Set the character encoding: + resp.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); // METHOD GET: if (method.equals("GET")){ @@ -282,11 +385,8 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory uwsAction = UWSAction.GET_JOB_PARAM; doGetJobParam(requestUrl, req, resp, user); - }else{ - logger.httpRequest(req, user, null, 0, null, null); - super.service(req, resp); - return; - } + }else + throw new UWSException(UWSException.NOT_IMPLEMENTED, "Unknown UWS action!"); }// METHOD POST: else if (method.equals("POST")){ @@ -300,34 +400,40 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory uwsAction = UWSAction.ADD_JOB; doAddJob(requestUrl, req, resp, user); - }// SET JOB PARAMETER: - else if (requestUrl.hasJobList() && requestUrl.hasJob() && (!requestUrl.hasAttribute() || requestUrl.getAttributes().length == 1) && req.getParameterMap().size() > 0){ - uwsAction = UWSAction.SET_JOB_PARAM; - doSetJobParam(requestUrl, req, resp, user); + }// SET JOB's UWS STANDARD PARAMETER + else if (requestUrl.hasJobList() && requestUrl.hasJob() && requestUrl.getAttributes().length == 1 && requestUrl.getAttributes()[0].toLowerCase().matches(UWSParameters.UWS_RW_PARAMETERS_REGEXP) && UWSToolBox.hasParameter(requestUrl.getAttributes()[0], req, false)){ + uwsAction = UWSAction.SET_UWS_PARAMETER; + doSetUWSParameter(requestUrl, req, resp, user); }// DESTROY JOB: - else if (requestUrl.hasJobList() && requestUrl.hasJob() && req.getParameter(UWSJob.PARAM_ACTION) != null && req.getParameter(UWSJob.PARAM_ACTION).equalsIgnoreCase(UWSJob.ACTION_DELETE)){ + else if (requestUrl.hasJobList() && requestUrl.hasJob() && UWSToolBox.hasParameter(UWSJob.PARAM_ACTION, UWSJob.ACTION_DELETE, req, false)){ uwsAction = UWSAction.DESTROY_JOB; doDestroyJob(requestUrl, req, resp, user); - }else{ - logger.httpRequest(req, user, null, 0, null, null); - super.service(req, resp); - return; - } + }// SET JOB PARAMETER: + else if (requestUrl.hasJobList() && requestUrl.hasJob() && (!requestUrl.hasAttribute() || requestUrl.getAttributes().length == 1 && requestUrl.getAttributes()[0].equalsIgnoreCase(UWSJob.PARAM_PARAMETERS)) && UWSToolBox.getNbParameters(req) > 0){ + uwsAction = UWSAction.SET_JOB_PARAM; + doSetJobParam(requestUrl, req, resp, user); + + }else + throw new UWSException(UWSException.NOT_IMPLEMENTED, "Unknown UWS action!"); }// METHOD PUT: else if (method.equals("PUT")){ // SET JOB PARAMETER: - if (requestUrl.hasJobList() && requestUrl.hasJob() && req.getMethod().equalsIgnoreCase("put") && requestUrl.getAttributes().length >= 2 && requestUrl.getAttributes()[0].equalsIgnoreCase(UWSJob.PARAM_PARAMETERS) && req.getParameter(requestUrl.getAttributes()[1]) != null){ + if (requestUrl.hasJobList() && requestUrl.hasJob() && requestUrl.getAttributes().length >= 2 && requestUrl.getAttributes()[0].equalsIgnoreCase(UWSJob.PARAM_PARAMETERS)){ uwsAction = UWSAction.SET_JOB_PARAM; + if (!UWSToolBox.hasParameter(requestUrl.getAttributes()[1], req, false)) + throw new UWSException(UWSException.BAD_REQUEST, "Wrong parameter name in the PUT request! Expected: " + requestUrl.getAttributes()[1]); doSetJobParam(requestUrl, req, resp, user); - }else{ - logger.httpRequest(req, user, null, 0, null, null); - super.service(req, resp); - return; - } + }// SET JOB's UWS STANDARD PARAMETER + else if (requestUrl.hasJobList() && requestUrl.hasJob() && requestUrl.getAttributes().length == 1 && requestUrl.getAttributes()[0].toLowerCase().matches(UWSParameters.UWS_RW_PARAMETERS_REGEXP) && UWSToolBox.hasParameter(requestUrl.getAttributes()[0], req, false)){ + uwsAction = UWSAction.SET_UWS_PARAMETER; + doSetUWSParameter(requestUrl, req, resp, user); + + }else + throw new UWSException(UWSException.NOT_IMPLEMENTED, "Unknown UWS action!"); }// METHOD DELETE: else if (method.equals("DELETE")){ @@ -336,31 +442,65 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory uwsAction = UWSAction.DESTROY_JOB; doDestroyJob(requestUrl, req, resp, user); - }else{ - logger.httpRequest(req, user, null, 0, null, null); - super.service(req, resp); - return; - } + }else + throw new UWSException(UWSException.NOT_IMPLEMENTED, "Unknown UWS action!"); - }// ELSE => DEFAULT BEHAVIOR: - else{ - logger.httpRequest(req, user, null, 0, null, null); - super.service(req, resp); - return; - } + }// ELSE ERROR: + else + throw new UWSException(UWSException.NOT_IMPLEMENTED, "Unknown UWS action!"); resp.flushBuffer(); - logger.httpRequest(req, user, uwsAction, HttpServletResponse.SC_OK, "[OK]", null); + + // Log the successful execution of the action: + logger.logHttp(LogLevel.INFO, resp, reqID, user, "UWS action \"" + uwsAction + "\" successfully executed.", null); + + }catch(IOException ioe){ + /* + * Any IOException thrown while writing the HTTP response is generally caused by a client abortion (intentional or timeout) + * or by a connection closed with the client for another reason. + * Consequently, a such error should not be considered as a real error from the server or the library: the request is + * canceled, and so the response is not expected. It is anyway not possible any more to send it (header and/or body) totally + * or partially. + * Nothing can solve this error. So the "error" is just reported as a simple information and theoretically the action + * executed when this error has been thrown is already stopped. + */ + logger.logHttp(LogLevel.INFO, resp, reqID, user, "HTTP request aborted or connection with the client closed => the UWS action \"" + uwsAction + "\" has stopped and the body of the HTTP response can not have been partially or completely written!", null); }catch(UWSException ex){ - sendError(ex, req, user, uwsAction, resp); - }catch(ClientAbortException cae){ - logger.info("Request aborted by the user !"); - logger.httpRequest(req, user, uwsAction, HttpServletResponse.SC_OK, "[Client abort => ClientAbortException]", null); + /* + * Any known/"expected" UWS exception is logged but also returned to the HTTP client in an error document. + * Since the error is known, it is supposed to have already been logged with a full stack trace. Thus, there + * is no need to log again its stack trace...just its message is logged. + * Besides, this error may also be just a redirection and not a true error. In such case, the error message + * is not logged. + */ + sendError(ex, req, reqID, user, uwsAction, resp); + + }catch(IllegalStateException ise){ + /* + * Any IllegalStateException that reaches this point, is supposed coming from a HttpServletResponse operation which + * has to reset the response buffer (e.g. resetBuffer(), sendRedirect(), sendError()). + * If this exception happens, the library tried to rewrite the HTTP response body with a message or a result, + * while this body has already been partially sent to the client. It is then no longer possible to change its content. + * Consequently, the error is logged as FATAL and a message will be appended at the end of the already submitted response + * to alert the HTTP client that an error occurs and the response should not be considered as complete and reliable. + */ + // Write the error in the response and return the appropriate HTTP status code: + errorWriter.writeError(ise, resp, req, reqID, user, uwsAction); + // Log the error: + getLogger().logHttp(LogLevel.FATAL, resp, reqID, user, "HTTP response already partially committed => the UWS action \"" + uwsAction + "\" has stopped and the body of the HTTP response can not have been partially or completely written!", (ise.getCause() != null) ? ise.getCause() : ise); + }catch(Throwable t){ - logger.error("Request unexpectedly aborted !", t); - logger.httpRequest(req, user, uwsAction, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, t.getMessage(), t); - resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, t.getMessage()); + /* + * Any other error is considered as unexpected if it reaches this point. Consequently, it has not yet been logged. + * So its stack trace will be fully logged, and an appropriate message will be returned to the HTTP client. The + * returned document should contain not too technical information which would be useless for the user. + */ + sendError(t, req, reqID, user, uwsAction, resp); + + }finally{ + // Free resources about uploaded files ; only unused files will be deleted: + UWSToolBox.deleteUploads(req); } } @@ -370,13 +510,23 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory protected void writeHomePage(UWSUrl requestUrl, HttpServletRequest req, HttpServletResponse resp, JobOwner user) throws UWSException, ServletException, IOException{ UWSSerializer serializer = getSerializer(req.getHeader("Accept")); resp.setContentType(serializer.getMimeType()); - String serialization = serializer.getUWS(this); + resp.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); + String serialization; + try{ + serialization = serializer.getUWS(this); + }catch(Exception e){ + if (!(e instanceof UWSException)){ + getLogger().logUWS(LogLevel.ERROR, requestUrl, "SERIALIZE", "Can't display the default home page, due to a serialization error!", e); + throw new UWSException(UWSException.NO_CONTENT, e, "No home page available for this UWS service!"); + }else + throw (UWSException)e; + } if (serialization != null){ PrintWriter output = resp.getWriter(); output.print(serialization); output.flush(); }else - throw UWSExceptionFactory.incorrectSerialization(serialization, "the UWS " + getName()); + throw new UWSException(UWSException.NO_CONTENT, "No home page available for this UWS service."); } protected void doListJob(UWSUrl requestUrl, HttpServletRequest req, HttpServletResponse resp, JobOwner user) throws UWSException, ServletException, IOException{ @@ -386,7 +536,16 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory // Write the jobs list: UWSSerializer serializer = getSerializer(req.getHeader("Accept")); resp.setContentType(serializer.getMimeType()); - jobsList.serialize(resp.getOutputStream(), serializer, user); + resp.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); + try{ + jobsList.serialize(resp.getOutputStream(), serializer, user); + }catch(Exception e){ + if (!(e instanceof UWSException)){ + getLogger().logUWS(LogLevel.ERROR, requestUrl, "SERIALIZE", "Can not serialize the jobs list \"" + jobsList.getName() + "\"!", e); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, e, "Can not format properly the jobs list \"" + jobsList.getName() + "\"!"); + }else + throw (UWSException)e; + } } protected void doAddJob(UWSUrl requestUrl, HttpServletRequest req, HttpServletResponse resp, JobOwner user) throws UWSException, ServletException, IOException{ @@ -395,25 +554,48 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory // Forbids the job creation if the user has not the WRITE permission for the specified jobs list: if (user != null && !user.hasWritePermission(jobsList)) - throw UWSExceptionFactory.writePermissionDenied(user, true, jobsList.getName()); + throw new UWSException(UWSException.PERMISSION_DENIED, UWSExceptionFactory.writePermissionDenied(user, true, jobsList.getName())); // Create the job: UWSJob newJob = createJob(req, user); // Add it to the jobs list: if (jobsList.addNewJob(newJob) != null){ + // Start the job if the phase parameter was provided with the "RUN" value: + if (UWSToolBox.hasParameter(UWSJob.PARAM_PHASE, UWSJob.PHASE_RUN, req, false)) + newJob.start(); // Make a redirection to the added job: redirect(requestUrl.jobSummary(jobsList.getName(), newJob.getJobId()).getRequestURL(), req, user, UWSAction.ADD_JOB, resp); }else throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Unable to add the new job " + newJob.getJobId() + " to the jobs list " + jobsList.getName() + ". (ID already used = " + (jobsList.getJob(newJob.getJobId()) != null) + ")"); } + protected void doSetUWSParameter(UWSUrl requestUrl, HttpServletRequest req, HttpServletResponse resp, JobOwner user) throws UWSException, ServletException, IOException{ + // Get the job: + UWSJob job = getJob(requestUrl); + + // Forbids the action if the user has not the WRITE permission for the specified job: + if (user != null && !user.hasWritePermission(job)) + throw new UWSException(UWSException.PERMISSION_DENIED, UWSExceptionFactory.writePermissionDenied(user, true, job.getJobId())); + + String name = requestUrl.getAttributes()[0]; + job.addOrUpdateParameter(name, UWSToolBox.getParameter(name, req, false), user); + + // Make a redirection to the job: + redirect(requestUrl.jobSummary(requestUrl.getJobListName(), job.getJobId()).getRequestURL(), req, user, getName(), resp); + } + protected void doDestroyJob(UWSUrl requestUrl, HttpServletRequest req, HttpServletResponse resp, JobOwner user) throws UWSException, ServletException, IOException{ // Get the jobs list: JobList jobsList = getJobList(requestUrl.getJobListName()); // Destroy the job: - jobsList.destroyJob(requestUrl.getJobId(), user); + try{ + jobsList.destroyJob(requestUrl.getJobId(), user); + }catch(UWSException ue){ + getLogger().logUWS(LogLevel.ERROR, requestUrl, "DESTROY_JOB", "Can not destroy the job \"" + requestUrl.getJobId() + "\"!", ue); + throw ue; + } // Make a redirection to the jobs list: redirect(requestUrl.listJobs(jobsList.getName()).getRequestURL(), req, user, UWSAction.DESTROY_JOB, resp); @@ -426,7 +608,16 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory // Write the job summary: UWSSerializer serializer = getSerializer(req.getHeader("Accept")); resp.setContentType(serializer.getMimeType()); - job.serialize(resp.getOutputStream(), serializer, user); + resp.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); + try{ + job.serialize(resp.getOutputStream(), serializer, user); + }catch(Exception e){ + if (!(e instanceof UWSException)){ + getLogger().logUWS(LogLevel.ERROR, requestUrl, "SERIALIZE", "Can not serialize the job \"" + job.getJobId() + "\"!", e); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, e, "Can not format properly the job \"" + job.getJobId() + "\"!"); + }else + throw (UWSException)e; + } } protected void doGetJobParam(UWSUrl requestUrl, HttpServletRequest req, HttpServletResponse resp, JobOwner user) throws UWSException, ServletException, IOException{ @@ -439,7 +630,7 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory if (attributes[0].equalsIgnoreCase(UWSJob.PARAM_RESULTS) && attributes.length > 1){ Result result = job.getResult(attributes[1]); if (result == null) - throw UWSExceptionFactory.incorrectJobResult(job.getJobId(), attributes[1]); + throw new UWSException(UWSException.NOT_FOUND, "No result identified with \"" + attributes[1] + "\" in the job \"" + job.getJobId() + "\"!"); else if (result.getHref() != null && !result.getHref().trim().isEmpty() && !result.getHref().equalsIgnoreCase(req.getRequestURL().toString())) redirect(result.getHref(), req, user, UWSAction.GET_JOB_PARAM, resp); else{ @@ -448,6 +639,7 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory input = getFileManager().getResultInput(result, job); UWSToolBox.write(input, result.getMimeType(), result.getSize(), resp); }catch(IOException ioe){ + getLogger().logUWS(LogLevel.ERROR, result, "GET_RESULT", "Can not read the content of the result \"" + result.getId() + "\" of the job \"" + job.getJobId() + "\"!", ioe); throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, ioe, "Can not read the content of the result " + result.getId() + " (job ID: " + job.getJobId() + ")."); }finally{ if (input != null) @@ -458,29 +650,63 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory else if (attributes[0].equalsIgnoreCase(UWSJob.PARAM_ERROR_SUMMARY) && attributes.length > 1 && attributes[1].equalsIgnoreCase("details")){ ErrorSummary error = job.getErrorSummary(); if (error == null) - throw UWSExceptionFactory.noErrorSummary(job.getJobId()); + throw new UWSException(UWSException.NOT_FOUND, "No error summary for the job \"" + job.getJobId() + "\"!"); else{ InputStream input = null; try{ input = getFileManager().getErrorInput(error, job); - UWSToolBox.write(input, "text/plain", getFileManager().getErrorSize(error, job), resp); + UWSToolBox.write(input, errorWriter.getErrorDetailsMIMEType(), getFileManager().getErrorSize(error, job), resp); }catch(IOException ioe){ + getLogger().logUWS(LogLevel.ERROR, error, "GET_ERROR", "Can not read the details of the error summary of the job \"" + job.getJobId() + "\"!", ioe); throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, ioe, "Can not read the error details (job ID: " + job.getJobId() + ")."); }finally{ if (input != null) input.close(); } } + }// REFERENCE FILE: Display the content of the uploaded file or redirect to the URL (if it is a URL): + else if (attributes[0].equalsIgnoreCase(UWSJob.PARAM_PARAMETERS) && attributes.length > 1 && job.getAdditionalParameterValue(attributes[1]) != null && job.getAdditionalParameterValue(attributes[1]) instanceof UploadFile){ + UploadFile upl = (UploadFile)job.getAdditionalParameterValue(attributes[1]); + if (upl.getLocation().matches("^http(s)?://")) + redirect(upl.getLocation(), req, user, getName(), resp); + else{ + InputStream input = null; + try{ + input = getFileManager().getUploadInput(upl); + UWSToolBox.write(input, upl.mimeType, upl.length, resp); + }catch(IOException ioe){ + getLogger().logUWS(LogLevel.ERROR, upl, "GET_PARAMETER", "Can not read the content of the uploaded file \"" + upl.paramName + "\" of the job \"" + job.getJobId() + "\"!", ioe); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, ioe, "Can not read the content of the uploaded file " + upl.paramName + " (job ID: " + job.getJobId() + ")."); + }finally{ + if (input != null) + input.close(); + } + } }// DEFAULT CASE: Display the serialization of the selected UWS object: else{ // Write the value/content of the selected attribute: UWSSerializer serializer = getSerializer(req.getHeader("Accept")); String uwsField = attributes[0]; - if (uwsField == null || uwsField.trim().isEmpty() || (attributes.length <= 1 && (uwsField.equalsIgnoreCase(UWSJob.PARAM_ERROR_SUMMARY) || uwsField.equalsIgnoreCase(UWSJob.PARAM_RESULTS) || uwsField.equalsIgnoreCase(UWSJob.PARAM_PARAMETERS)))) + boolean jobSerialization = false; + // Set the content type: + if (uwsField == null || uwsField.trim().isEmpty() || (attributes.length <= 1 && (uwsField.equalsIgnoreCase(UWSJob.PARAM_ERROR_SUMMARY) || uwsField.equalsIgnoreCase(UWSJob.PARAM_RESULTS) || uwsField.equalsIgnoreCase(UWSJob.PARAM_PARAMETERS)))){ resp.setContentType(serializer.getMimeType()); - else + jobSerialization = true; + }else resp.setContentType("text/plain"); - job.serialize(resp.getOutputStream(), attributes, serializer); + // Set the character encoding: + resp.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); + // Serialize the selected attribute: + try{ + job.serialize(resp.getOutputStream(), attributes, serializer); + }catch(Exception e){ + if (!(e instanceof UWSException)){ + String errorMsgPart = (jobSerialization ? "the job \"" + job.getJobId() + "\"" : "the parameter " + uwsField + " of the job \"" + job.getJobId() + "\""); + getLogger().logUWS(LogLevel.ERROR, requestUrl, "SERIALIZE", "Can not serialize " + errorMsgPart + "!", e); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, e, "Can not format properly " + errorMsgPart + "!"); + }else + throw (UWSException)e; + } } } @@ -502,19 +728,19 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory } public UWSJob getJob(UWSUrl requestUrl, JobOwner user) throws UWSException{ - // Get the jobs list: - JobList jobsList = getJobList(requestUrl.getJobListName()); - // Get the job ID: String jobId = requestUrl.getJobId(); - - if (jobId == null) - throw UWSExceptionFactory.missingJobID(); - - // Get the job: - UWSJob job = jobsList.getJob(jobId, user); - if (job == null) - throw UWSExceptionFactory.incorrectJobID(jobsList.getName(), jobId); + UWSJob job = null; + + if (jobId != null){ + // Get the jobs list: + JobList jobsList = getJobList(requestUrl.getJobListName()); + // Get the job: + job = jobsList.getJob(jobId, user); + if (job == null) + throw new UWSException(UWSException.NOT_FOUND, "Incorrect job ID! The job \"" + jobId + "\" does not exist in the jobs list \"" + jobsList.getName() + "\"."); + }else + throw new UWSException(UWSException.BAD_REQUEST, "Missing job ID!"); return job; } @@ -547,6 +773,11 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory return new UWSParameters(req, expectedAdditionalParams, inputParamControllers); } + @Override + public RequestParser createRequestParser(final UWSFileManager fileManager) throws UWSException{ + return new UWSRequestParser(fileManager); + } + /* ****************************** */ /* REDIRECTION & ERROR MANAGEMENT */ /* ****************************** */ @@ -563,9 +794,9 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory public void redirect(String url, HttpServletRequest request, JobOwner user, String uwsAction, HttpServletResponse response) throws ServletException, IOException{ response.setStatus(HttpServletResponse.SC_SEE_OTHER); response.setContentType(request.getContentType()); + response.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); response.setHeader("Location", url); response.flushBuffer(); - logger.httpRequest(request, user, uwsAction, HttpServletResponse.SC_SEE_OTHER, "[Redirection toward " + url + "]", null); } /** @@ -577,20 +808,58 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory * * @param error The error to send/display. * @param request The request which has caused the given error (not used by default). + * @param reqID ID of the request. + * @param user The user which executes the given request. + * @param uwsAction The UWS action corresponding to the given request. * @param response The response in which the error must be published. * * @throws IOException If there is an error when calling {@link #redirect(String, HttpServletRequest, JobOwner, String, HttpServletResponse)} or {@link HttpServletResponse#sendError(int, String)}. * @throws UWSException If there is an error when calling {@link #redirect(String, HttpServletRequest, JobOwner, String, HttpServletResponse)}. * * @see #redirect(String, HttpServletRequest, JobOwner, String, HttpServletResponse) - * @see #writeError(Throwable, HttpServletResponse, HttpServletRequest, JobOwner, String) + * @see #sendError(Throwable, HttpServletRequest, String, JobOwner, String, HttpServletResponse) */ - public void sendError(UWSException error, HttpServletRequest request, JobOwner user, String uwsAction, HttpServletResponse response) throws ServletException, IOException{ - if (error.getHttpErrorCode() == UWSException.SEE_OTHER) - redirect(error.getMessage(), request, user, uwsAction, response); - else{ - errorWriter.writeError(error, response, request, user, uwsAction); - } + public final void sendError(UWSException error, HttpServletRequest request, String reqID, JobOwner user, String uwsAction, HttpServletResponse response) throws ServletException{ + if (error.getHttpErrorCode() == UWSException.SEE_OTHER){ + // Log the redirection, if any: + logger.logHttp(LogLevel.INFO, response, reqID, user, "HTTP " + UWSException.SEE_OTHER + " [Redirection toward " + error.getMessage() + "] - Action \"" + uwsAction + "\" successfully executed.", null); + // Apply the redirection: + try{ + redirect(error.getMessage(), request, user, uwsAction, response); + }catch(IOException ioe){ + logger.logHttp(LogLevel.FATAL, request, reqID, "Can not redirect the response toward " + error.getMessage(), error); + throw new ServletException("Can not redirect the response! You should notify the administrator of the service (FATAL-" + reqID + "). However, while waiting a correction of this problem, you can manually go toward " + error.getMessage() + "."); + } + }else + sendError((Exception)error, request, reqID, user, uwsAction, response); + } + + /** + *

    + * Fills the response with the given error. + * The stack trace of the error is printed on the standard output and then the function + * {@link HttpServletResponse#sendError(int, String)} is called with the HTTP status code is {@link UWSException#INTERNAL_SERVER_ERROR} + * and the message of the given exception. + *

    + * + * + * @param error The error to send/display. + * @param request The request which has caused the given error (not used by default). + * @param reqID ID of the request. + * @param user The user which executes the given request. + * @param uwsAction The UWS action corresponding to the given request. + * @param response The response in which the error must be published. + * + * @throws IOException If there is an error when calling {@link HttpServletResponse#sendError(int, String)}. + * @throws UWSException + * + * @see ServiceErrorWriter#writeError(Throwable, HttpServletResponse, HttpServletRequest, String, JobOwner, String) + */ + public final void sendError(Throwable error, HttpServletRequest request, String reqID, JobOwner user, String uwsAction, HttpServletResponse response) throws ServletException{ + // Write the error in the response and return the appropriate HTTP status code: + errorWriter.writeError(error, response, request, reqID, user, uwsAction); + // Log the error: + logger.logHttp(LogLevel.ERROR, response, reqID, user, "Can not complete the UWS action \"" + uwsAction + "\", because: " + error.getMessage(), error); } /* ************** */ @@ -798,6 +1067,7 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory /** * @return The name. */ + @Override public final String getName(){ return name; } @@ -805,6 +1075,7 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory /** * @return The description. */ + @Override public final String getDescription(){ return description; } @@ -812,25 +1083,29 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory /* ******************** */ /* JOBS LIST MANAGEMENT */ /* ******************** */ + @Override public final Iterator iterator(){ return mapJobLists.values().iterator(); } + @Override public JobList getJobList(String name) throws UWSException{ if (name != null) name = name.trim(); if (name == null || name.length() == 0) - throw UWSExceptionFactory.missingJobListName(); + throw new UWSException(UWSException.BAD_REQUEST, "Missing job list name!"); else if (!mapJobLists.containsKey(name)) - throw UWSExceptionFactory.incorrectJobListName(name); + throw new UWSException(UWSException.NOT_FOUND, "Incorrect job list name ! The jobs list \"" + name + "\" does not exist."); else return mapJobLists.get(name); } + @Override public final int getNbJobList(){ return mapJobLists.size(); } + @Override public final boolean addJobList(JobList jl){ if (jl == null) return false; @@ -841,44 +1116,14 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory jl.setUWS(this); mapJobLists.put(jl.getName(), jl); }catch(IllegalStateException ise){ - logger.error("The jobs list \"" + jl.getName() + "\" can not be added into the UWS " + getName() + " !", ise); + logger.logUWS(LogLevel.ERROR, jl, "ADD_JOB_LIST", "The jobs list \"" + jl.getName() + "\" can not be added into the UWS " + getName() + ": it may already be associated with one!", ise); return false; } return true; } - /*public final JobList removeJobList(String name){ - JobList jl = mapJobLists.get(name); - if (jl != null){ - if (removeJobList(jl)) - return jl; - } - return null; - }*/ - - /* - * Removes the given jobs list from this UWS. - * - * @param jl The jobs list to remove. - * - * @return true if the jobs list has been successfully removed, false otherwise. - * - * @see JobList#removeAll() - * @see JobList#setUWS(AbstractUWS) - * - public boolean removeJobList(JobList jl){ - if (jl == null) - return false; - - jl = mapJobLists.remove(jl.getName()); - if (jl != null){ - jl.removeAll(); - jl.setUWS(null); - } - return jl != null; - }*/ - + @Override public final boolean destroyJobList(String name){ return destroyJobList(mapJobLists.get(name)); } @@ -891,7 +1136,7 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory * @return true if the given jobs list has been destroyed, false otherwise. * * @see JobList#clear() - * @see JobList#setUWS(AbstractUWS) + * @see JobList#setUWS(UWS) */ public boolean destroyJobList(JobList jl){ if (jl == null) @@ -903,7 +1148,7 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory jl.clear(); jl.setUWS(null); }catch(IllegalStateException ise){ - getLogger().warning("Impossible to erase completely the association between the jobs list \"" + jl.getName() + "\" and the UWS \"" + getName() + "\", because: " + ise.getMessage()); + logger.logUWS(LogLevel.WARNING, jl, "DESTROY_JOB_LIST", "Impossible to erase completely the association between the jobs list \"" + jl.getName() + "\" and the UWS \"" + getName() + "\"!", ise); } } return jl != null; @@ -946,6 +1191,7 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory * @see AcceptHeader#AcceptHeader(String) * @see AcceptHeader#getOrderedMimeTypes() */ + @Override public final UWSSerializer getSerializer(String mimeTypes) throws UWSException{ UWSSerializer choosenSerializer = null; @@ -963,7 +1209,7 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory if (choosenSerializer == null){ choosenSerializer = serializers.get(defaultSerializer); if (choosenSerializer == null) - throw UWSExceptionFactory.missingSerializer(mimeTypes + " (given MIME types) and " + defaultSerializer + " (default serializer MIME type)"); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Missing UWS serializer for the MIME types: " + mimeTypes + " (given MIME types) and " + defaultSerializer + " (default serializer MIME type)" + "!"); } return choosenSerializer; @@ -1017,6 +1263,7 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory * * @return The used UserIdentifier (MAY BE NULL). */ + @Override public final UserIdentifier getUserIdentifier(){ return userIdentifier; } @@ -1038,6 +1285,7 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory * * @return Its UWS URL interpreter. */ + @Override public final UWSUrl getUrlInterpreter(){ return urlInterpreter; } @@ -1045,7 +1293,7 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory /** * Sets the UWS URL interpreter to use in this UWS. * - * @param urlInterpreter Its new UWS URL interpreter (may be null. In this case, it will be created from the next request ; see {@link #executeRequest(HttpServletRequest, HttpServletResponse)}). + * @param urlInterpreter Its new UWS URL interpreter (may be null. In this case, it will be created from the next request ; see {@link #service(HttpServletRequest, HttpServletResponse)}). */ public final void setUrlInterpreter(UWSUrl urlInterpreter){ this.urlInterpreter = urlInterpreter; @@ -1061,6 +1309,7 @@ public abstract class UWSServlet extends HttpServlet implements UWS, UWSFactory * * @return Its backup manager. */ + @Override public final UWSBackupManager getBackupManager(){ return backupManager; } diff --git a/src/uws/service/UWSUrl.java b/src/uws/service/UWSUrl.java index 95073f706940405bd76786c856494b980aa46bad..5b0cea96b71071192a4d6d1740781b4eba3d944b 100644 --- a/src/uws/service/UWSUrl.java +++ b/src/uws/service/UWSUrl.java @@ -16,28 +16,25 @@ package uws.service; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.Serializable; - import java.net.MalformedURLException; import java.net.URL; - import java.util.Map; import javax.servlet.http.HttpServletRequest; -import uws.UWSException; import uws.UWSToolBox; - import uws.job.UWSJob; /** * This class helps managing with UWS URLs and URIs. * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (09/2014) */ public class UWSUrl implements Serializable { private static final long serialVersionUID = 1L; @@ -91,16 +88,16 @@ public class UWSUrl implements Serializable { * * @param baseURI The baseURI to consider in all URL or request parsing. * - * @throws UWSException If the given baseURI is null or is an empty string. + * @throws NullPointerException If the given baseURI is null or is an empty string. */ - public UWSUrl(String baseURI) throws UWSException{ + public UWSUrl(String baseURI) throws NullPointerException{ if (baseURI == null) - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "The given base UWS URI is NULL !"); + throw new NullPointerException("The given base UWS URI is NULL!"); this.baseURI = normalizeURI(baseURI); if (baseURI.length() == 0) - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "The given base UWS URI is empty !"); + throw new NullPointerException("The given base UWS URI is empty!"); } /** @@ -108,19 +105,20 @@ public class UWSUrl implements Serializable { * * @param request The request to parse to get the baseURI. * - * @throws UWSException If the given request is null or if the extracted baseURI is null or is an empty string. + * @throws NullPointerException If the given request is null or if the extracted baseURI is null or is an empty string. * * @see #extractBaseURI(HttpServletRequest) */ - public UWSUrl(HttpServletRequest request) throws UWSException{ + public UWSUrl(HttpServletRequest request) throws NullPointerException{ + // Extract the base URI: String uri = extractBaseURI(request); if (uri == null) - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "The extracted base UWS URI is NULL !"); - - baseURI = normalizeURI(uri); + throw new NullPointerException("The extracted base UWS URI is NULL!"); + else + baseURI = normalizeURI(uri); - if (baseURI.length() == 0) - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "The extracted base UWS URI is empty !"); + // Load the rest of the request: + load(request); } /** @@ -509,9 +507,9 @@ public class UWSUrl implements Serializable { public final void setUwsURI(String uwsURI){ if (uwsURI == null || uwsURI.trim().length() == 0) this.uwsURI = null; - else{ + else this.uwsURI = uwsURI.trim(); - } + loadUwsURI(); updateRequestURL(); } diff --git a/src/uws/service/UserIdentifier.java b/src/uws/service/UserIdentifier.java index 5ec66dc624bc800ffb72f5b41f12e097005fff74..af08b4740e52fc6d70371f7d0c444654eab6ed7d 100644 --- a/src/uws/service/UserIdentifier.java +++ b/src/uws/service/UserIdentifier.java @@ -22,15 +22,13 @@ package uws.service; import java.io.Serializable; import java.util.Map; -import uws.job.user.JobOwner; - -import uws.service.UWSUrl; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import uws.UWSException; +import uws.job.user.JobOwner; import uws.service.actions.UWSAction; import uws.service.backup.DefaultUWSBackupManager; -import uws.UWSException; - -import javax.servlet.http.HttpServletRequest; /** *

    Lets defining how identifying a user thanks to a HTTP request.

    @@ -67,7 +65,7 @@ public interface UserIdentifier extends Serializable { * * @param id ID of the user. * @param pseudo Pseudo of the user (may be NULL). - * @param otherdata Other data about the user (may be NULL or empty). + * @param otherData Other data about the user (may be NULL or empty). * * @return The corresponding user. * diff --git a/src/uws/service/actions/AddJob.java b/src/uws/service/actions/AddJob.java index a7265016ad2f5f296e7db559587cdecbca4ec5e3..3d00f3d1a6788fc4da07acc8bdb896fed9f9514c 100644 --- a/src/uws/service/actions/AddJob.java +++ b/src/uws/service/actions/AddJob.java @@ -16,7 +16,8 @@ package uws.service.actions; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; @@ -26,14 +27,12 @@ import javax.servlet.http.HttpServletResponse; import uws.UWSException; import uws.UWSExceptionFactory; - import uws.job.JobList; import uws.job.UWSJob; - import uws.job.user.JobOwner; - import uws.service.UWSService; import uws.service.UWSUrl; +import uws.service.log.UWSLog.LogLevel; /** *

    The "Add Job" action of a UWS.

    @@ -43,8 +42,8 @@ import uws.service.UWSUrl; *

    This action creates a new job and adds it to the specified jobs list. * The response of this action is a redirection to the new job resource (that is to say: a redirection to the job summary of the new job).

    * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (04/2015) */ public class AddJob extends UWSAction { private static final long serialVersionUID = 1L; @@ -74,7 +73,7 @@ public class AddJob extends UWSAction { *
  • the UWS URL does not make a reference to a job (so: no job ID),
  • *
  • the HTTP method is HTTP-POST.
  • * - * @see uws.service.actions.UWSAction#match(uws.service.UWSUrl, java.lang.String, javax.servlet.http.HttpServletRequest) + * @see uws.service.actions.UWSAction#match(UWSUrl, JobOwner, HttpServletRequest) */ @Override public boolean match(UWSUrl urlInterpreter, JobOwner user, HttpServletRequest request) throws UWSException{ @@ -87,11 +86,10 @@ public class AddJob extends UWSAction { * * @see #getJobsList(UWSUrl) * @see uws.service.UWSFactory#createJob(HttpServletRequest, JobOwner) - * @see UWSService#setExecutionManager(uws.job.manager.ExecutionManager) * @see JobList#addNewJob(UWSJob) * @see UWSService#redirect(String, HttpServletRequest, JobOwner, String, HttpServletResponse) * - * @see uws.service.actions.UWSAction#apply(uws.service.UWSUrl, java.lang.String, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) + * @see uws.service.actions.UWSAction#apply(UWSUrl, JobOwner, HttpServletRequest, HttpServletResponse) */ @Override public boolean apply(UWSUrl urlInterpreter, JobOwner user, HttpServletRequest request, HttpServletResponse response) throws UWSException, IOException{ @@ -100,10 +98,16 @@ public class AddJob extends UWSAction { // Forbids the job creation if the user has not the WRITE permission for the specified jobs list: if (user != null && !user.hasWritePermission(jobsList)) - throw UWSExceptionFactory.writePermissionDenied(user, true, jobsList.getName()); + throw new UWSException(UWSException.PERMISSION_DENIED, UWSExceptionFactory.writePermissionDenied(user, true, jobsList.getName())); // Create the job: - UWSJob newJob = uws.getFactory().createJob(request, user); + UWSJob newJob; + try{ + newJob = uws.getFactory().createJob(request, user); + }catch(UWSException ue){ + getLogger().logUWS(LogLevel.ERROR, urlInterpreter, "ADD_JOB", "Can not create a new job!", ue); + throw ue; + } // Add it to the jobs list: if (jobsList.addNewJob(newJob) != null){ @@ -113,7 +117,7 @@ public class AddJob extends UWSAction { return true; }else - throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Unable to add the new job to the jobs list. (ID of the new job = \"" + newJob.getJobId() + "\" ; ID already used = " + (jobsList.getJob(newJob.getJobId()) != null) + ")"); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Unable to add the new job to the jobs list for an unknown reason. (ID of the new job = \"" + newJob.getJobId() + "\" ; ID already used = " + (jobsList.getJob(newJob.getJobId()) != null) + ")"); } } diff --git a/src/uws/service/actions/DestroyJob.java b/src/uws/service/actions/DestroyJob.java index d6e5d33ef9bafda2d79580816a8b8d33f7932dd8..f2629e14424051ddec7d998999204e0824ebb61f 100644 --- a/src/uws/service/actions/DestroyJob.java +++ b/src/uws/service/actions/DestroyJob.java @@ -16,7 +16,8 @@ package uws.service.actions; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; @@ -25,14 +26,13 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import uws.UWSException; - +import uws.UWSToolBox; import uws.job.JobList; import uws.job.UWSJob; - import uws.job.user.JobOwner; - import uws.service.UWSService; import uws.service.UWSUrl; +import uws.service.log.UWSLog.LogLevel; /** *

    The "Destroy Job" action of a UWS.

    @@ -42,8 +42,8 @@ import uws.service.UWSUrl; *

    This action destroys the job specified in the UWS URL. * The response of this action is a redirection to the jobs list.

    * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (04/2015) */ public class DestroyJob extends UWSAction { private static final long serialVersionUID = 1L; @@ -75,11 +75,11 @@ public class DestroyJob extends UWSAction { *
  • ...or the HTTP method is HTTP-POST and there is the parameter {@link UWSJob#PARAM_ACTION PARAM_ACTION} (=ACTION) with the value {@link UWSJob#ACTION_DELETE ACTION_DELETE} (=DELETE).
  • * * - * @see uws.service.actions.UWSAction#match(uws.service.UWSUrl, java.lang.String, javax.servlet.http.HttpServletRequest) + * @see uws.service.actions.UWSAction#match(UWSUrl, JobOwner, HttpServletRequest) */ @Override public boolean match(UWSUrl urlInterpreter, JobOwner user, HttpServletRequest request) throws UWSException{ - return (urlInterpreter.hasJobList() && urlInterpreter.hasJob() && (request.getMethod().equalsIgnoreCase("delete") || (request.getMethod().equalsIgnoreCase("post") && request.getParameter(UWSJob.PARAM_ACTION) != null && request.getParameter(UWSJob.PARAM_ACTION).equalsIgnoreCase(UWSJob.ACTION_DELETE)))); + return urlInterpreter.hasJobList() && urlInterpreter.hasJob() && (request.getMethod().equalsIgnoreCase("delete") || (request.getMethod().equalsIgnoreCase("post") && UWSToolBox.hasParameter(UWSJob.PARAM_ACTION, UWSJob.ACTION_DELETE, request, false))); } /** @@ -92,7 +92,7 @@ public class DestroyJob extends UWSAction { * @see JobList#destroyJob(String,JobOwner) * @see UWSService#redirect(String, HttpServletRequest, JobOwner, String, HttpServletResponse) * - * @see uws.service.actions.UWSAction#apply(uws.service.UWSUrl, java.lang.String, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) + * @see uws.service.actions.UWSAction#apply(UWSUrl, JobOwner, HttpServletRequest, HttpServletResponse) */ @Override public boolean apply(UWSUrl urlInterpreter, JobOwner user, HttpServletRequest request, HttpServletResponse response) throws UWSException, IOException{ @@ -100,7 +100,13 @@ public class DestroyJob extends UWSAction { JobList jobsList = getJobsList(urlInterpreter); // Destroy the job: - boolean destroyed = jobsList.destroyJob(urlInterpreter.getJobId(), user); + boolean destroyed; + try{ + destroyed = jobsList.destroyJob(urlInterpreter.getJobId(), user); + }catch(UWSException ue){ + getLogger().logUWS(LogLevel.ERROR, urlInterpreter, "DESTROY_JOB", "Can not destroy the job \"" + urlInterpreter.getJobId() + "\"!", ue); + throw ue; + } // Make a redirection to the jobs list: uws.redirect(urlInterpreter.listJobs(jobsList.getName()).getRequestURL(), request, user, getName(), response); diff --git a/src/uws/service/actions/GetJobParam.java b/src/uws/service/actions/GetJobParam.java index 048ceffaaa583fb12050a1bb614d7ce9e982cb48..be56b1ad5074d577a4d56c65d74867d497537257 100644 --- a/src/uws/service/actions/GetJobParam.java +++ b/src/uws/service/actions/GetJobParam.java @@ -16,31 +16,28 @@ package uws.service.actions; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; import java.io.InputStream; import javax.servlet.ServletOutputStream; - import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import uws.UWSException; -import uws.UWSExceptionFactory; import uws.UWSToolBox; - import uws.job.ErrorSummary; import uws.job.Result; import uws.job.UWSJob; - import uws.job.serializer.UWSSerializer; - import uws.job.user.JobOwner; - import uws.service.UWSService; import uws.service.UWSUrl; +import uws.service.log.UWSLog.LogLevel; +import uws.service.request.UploadFile; /** *

    The "Get Job Parameter" action of a UWS.

    @@ -52,8 +49,8 @@ import uws.service.UWSUrl; * whereas it is a complex type (i.e. results, parameters, ...) the value is the serialization of the job attribute itself. * The serializer is choosen in function of the HTTP Accept header.

    * - * @author Grégory Mantelet (CDS) - * @version 06/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (04/2015) */ public class GetJobParam extends UWSAction { private static final long serialVersionUID = 1L; @@ -85,7 +82,7 @@ public class GetJobParam extends UWSAction { *
  • the HTTP method is HTTP-GET.
  • * * - * @see uws.service.actions.UWSAction#match(uws.service.UWSUrl, java.lang.String, javax.servlet.http.HttpServletRequest) + * @see uws.service.actions.UWSAction#match(UWSUrl, JobOwner, HttpServletRequest) */ @Override public boolean match(UWSUrl urlInterpreter, JobOwner user, HttpServletRequest request) throws UWSException{ @@ -99,11 +96,11 @@ public class GetJobParam extends UWSAction { *

    Note: if the specified attribute is simple (i.e. jobID, runID, startTime, ...) it will not serialized ! The response will * merely be the job attribute value (so, the content type will be: text/plain).

    * - * @see #getJob(UWSUrl, String) + * @see #getJob(UWSUrl) * @see UWSService#getSerializer(String) * @see UWSJob#serialize(ServletOutputStream, UWSSerializer) * - * @see uws.service.actions.UWSAction#apply(uws.service.UWSUrl, java.lang.String, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) + * @see uws.service.actions.UWSAction#apply(UWSUrl, JobOwner, HttpServletRequest, HttpServletResponse) */ @Override public boolean apply(UWSUrl urlInterpreter, JobOwner user, HttpServletRequest request, HttpServletResponse response) throws UWSException, IOException{ @@ -116,7 +113,7 @@ public class GetJobParam extends UWSAction { if (attributes[0].equalsIgnoreCase(UWSJob.PARAM_RESULTS) && attributes.length > 1){ Result result = job.getResult(attributes[1]); if (result == null) - throw UWSExceptionFactory.incorrectJobResult(job.getJobId(), attributes[1]); + throw new UWSException(UWSException.NOT_FOUND, "No result identified with \"" + attributes[1] + "\" in the job \"" + job.getJobId() + "\"!"); else if (result.isRedirectionRequired()) uws.redirect(result.getHref(), request, user, getName(), response); else{ @@ -125,6 +122,7 @@ public class GetJobParam extends UWSAction { input = uws.getFileManager().getResultInput(result, job); UWSToolBox.write(input, result.getMimeType(), result.getSize(), response); }catch(IOException ioe){ + getLogger().logUWS(LogLevel.ERROR, result, "GET_RESULT", "Can not read the content of the result \"" + result.getId() + "\" of the job \"" + job.getJobId() + "\"!", ioe); throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, ioe, "Can not read the content of the result " + result.getId() + " (job ID: " + job.getJobId() + ")."); }finally{ if (input != null) @@ -135,30 +133,64 @@ public class GetJobParam extends UWSAction { else if (attributes[0].equalsIgnoreCase(UWSJob.PARAM_ERROR_SUMMARY) && attributes.length > 1 && attributes[1].equalsIgnoreCase("details")){ ErrorSummary error = job.getErrorSummary(); if (error == null) - throw UWSExceptionFactory.noErrorSummary(job.getJobId()); + throw new UWSException(UWSException.NOT_FOUND, "No error summary for the job \"" + job.getJobId() + "\"!"); else{ InputStream input = null; try{ input = uws.getFileManager().getErrorInput(error, job); - UWSToolBox.write(input, "text/plain", uws.getFileManager().getErrorSize(error, job), response); + UWSToolBox.write(input, getUWS().getErrorWriter().getErrorDetailsMIMEType(), uws.getFileManager().getErrorSize(error, job), response); }catch(IOException ioe){ + getLogger().logUWS(LogLevel.ERROR, error, "GET_ERROR", "Can not read the details of the error summary of the job \"" + job.getJobId() + "\"!", ioe); throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, ioe, "Can not read the error details (job ID: " + job.getJobId() + ")."); }finally{ if (input != null) input.close(); } } - } - // DEFAULT CASE: Display the serialization of the selected UWS object: + }// REFERENCE FILE: Display the content of the uploaded file or redirect to the URL (if it is a URL): + else if (attributes[0].equalsIgnoreCase(UWSJob.PARAM_PARAMETERS) && attributes.length > 1 && job.getAdditionalParameterValue(attributes[1]) != null && job.getAdditionalParameterValue(attributes[1]) instanceof UploadFile){ + UploadFile upl = (UploadFile)job.getAdditionalParameterValue(attributes[1]); + if (upl.getLocation().matches("^http(s)?://")) + uws.redirect(upl.getLocation(), request, user, getName(), response); + else{ + InputStream input = null; + try{ + input = uws.getFileManager().getUploadInput(upl); + UWSToolBox.write(input, upl.mimeType, upl.length, response); + }catch(IOException ioe){ + getLogger().logUWS(LogLevel.ERROR, upl, "GET_PARAMETER", "Can not read the content of the uploaded file \"" + upl.paramName + "\" of the job \"" + job.getJobId() + "\"!", ioe); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, ioe, "Can not read the content of the uploaded file " + upl.paramName + " (job ID: " + job.getJobId() + ")."); + }finally{ + if (input != null) + input.close(); + } + } + }// DEFAULT CASE: Display the serialization of the selected UWS object: else{ // Write the value/content of the selected attribute: UWSSerializer serializer = uws.getSerializer(request.getHeader("Accept")); String uwsField = attributes[0]; - if (uwsField == null || uwsField.trim().isEmpty() || (attributes.length <= 1 && (uwsField.equalsIgnoreCase(UWSJob.PARAM_ERROR_SUMMARY) || uwsField.equalsIgnoreCase(UWSJob.PARAM_RESULTS) || uwsField.equalsIgnoreCase(UWSJob.PARAM_PARAMETERS)))) + boolean jobSerialization = false; + // Set the content type: + if (uwsField == null || uwsField.trim().isEmpty() || (attributes.length <= 1 && (uwsField.equalsIgnoreCase(UWSJob.PARAM_ERROR_SUMMARY) || uwsField.equalsIgnoreCase(UWSJob.PARAM_RESULTS) || uwsField.equalsIgnoreCase(UWSJob.PARAM_PARAMETERS)))){ response.setContentType(serializer.getMimeType()); - else + jobSerialization = true; + }else response.setContentType("text/plain"); - job.serialize(response.getOutputStream(), attributes, serializer); + + // Set the character encoding: + response.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); + // Serialize the selected attribute: + try{ + job.serialize(response.getOutputStream(), attributes, serializer); + }catch(Exception e){ + if (!(e instanceof UWSException)){ + String errorMsgPart = (jobSerialization ? "the job \"" + job.getJobId() + "\"" : "the parameter " + uwsField + " of the job \"" + job.getJobId() + "\""); + getLogger().logUWS(LogLevel.ERROR, urlInterpreter, "SERIALIZE", "Can not serialize " + errorMsgPart + "!", e); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, e, "Can not format properly " + errorMsgPart + "!"); + }else + throw (UWSException)e; + } } return true; diff --git a/src/uws/service/actions/JobSummary.java b/src/uws/service/actions/JobSummary.java index adf9ed059e341dcaa2cbf355c3a45dc08c195df3..e35e1d80cab2d67f234f2d6044c2d0c467e47fc0 100644 --- a/src/uws/service/actions/JobSummary.java +++ b/src/uws/service/actions/JobSummary.java @@ -16,26 +16,24 @@ package uws.service.actions; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; import javax.servlet.ServletOutputStream; - import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import uws.UWSException; - +import uws.UWSToolBox; import uws.job.UWSJob; - import uws.job.serializer.UWSSerializer; - import uws.job.user.JobOwner; - import uws.service.UWSService; import uws.service.UWSUrl; +import uws.service.log.UWSLog.LogLevel; /** *

    The "Get Job" action of a UWS.

    @@ -45,8 +43,8 @@ import uws.service.UWSUrl; *

    This action returns the summary of the job specified in the given UWS URL. * This summary is serialized by the {@link UWSSerializer} choosed in function of the HTTP Accept header.

    * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (04/2015) */ public class JobSummary extends UWSAction { private static final long serialVersionUID = 1L; @@ -78,7 +76,7 @@ public class JobSummary extends UWSAction { *
  • the HTTP method is HTTP-GET.
  • * * - * @see uws.service.actions.UWSAction#match(uws.service.UWSUrl, java.lang.String, javax.servlet.http.HttpServletRequest) + * @see uws.service.actions.UWSAction#match(UWSUrl, JobOwner, HttpServletRequest) */ @Override public boolean match(UWSUrl urlInterpreter, JobOwner user, HttpServletRequest request) throws UWSException{ @@ -89,11 +87,11 @@ public class JobSummary extends UWSAction { * Gets the specified job (and throw an error if not found), * chooses the serializer and write the serialization of the job in the given response. * - * @see #getJob(UWSUrl, String) + * @see #getJob(UWSUrl) * @see UWSService#getSerializer(String) * @see UWSJob#serialize(ServletOutputStream, UWSSerializer) * - * @see uws.service.actions.UWSAction#apply(uws.service.UWSUrl, java.lang.String, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) + * @see uws.service.actions.UWSAction#apply(UWSUrl, JobOwner, HttpServletRequest, HttpServletResponse) */ @Override public boolean apply(UWSUrl urlInterpreter, JobOwner user, HttpServletRequest request, HttpServletResponse response) throws UWSException, IOException{ @@ -103,7 +101,16 @@ public class JobSummary extends UWSAction { // Write the job summary: UWSSerializer serializer = uws.getSerializer(request.getHeader("Accept")); response.setContentType(serializer.getMimeType()); - job.serialize(response.getOutputStream(), serializer, user); + response.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); + try{ + job.serialize(response.getOutputStream(), serializer, user); + }catch(Exception e){ + if (!(e instanceof UWSException)){ + getLogger().logUWS(LogLevel.ERROR, urlInterpreter, "SERIALIZE", "Can not serialize the job \"" + job.getJobId() + "\"!", e); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, e, "Can not format properly the job \"" + job.getJobId() + "\"!"); + }else + throw (UWSException)e; + } return true; } diff --git a/src/uws/service/actions/ListJobs.java b/src/uws/service/actions/ListJobs.java index dfeeb4fee6413d55d03114b069f88a4f4a33216f..2ce5c852cd32b2d6cf99ef66ee23d5f6ec25bbdf 100644 --- a/src/uws/service/actions/ListJobs.java +++ b/src/uws/service/actions/ListJobs.java @@ -16,26 +16,24 @@ package uws.service.actions; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; import javax.servlet.ServletOutputStream; - import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import uws.UWSException; - +import uws.UWSToolBox; import uws.job.JobList; - import uws.job.serializer.UWSSerializer; - import uws.job.user.JobOwner; - import uws.service.UWSService; import uws.service.UWSUrl; +import uws.service.log.UWSLog.LogLevel; /** *

    The "List Jobs" action of a UWS.

    @@ -45,8 +43,8 @@ import uws.service.UWSUrl; *

    This action returns the list of jobs contained in the jobs list specified by the URL of the request. * This list is serialized by the {@link UWSSerializer} choosed in function of the HTTP Accept header.

    * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (04/2015) */ public class ListJobs extends UWSAction { private static final long serialVersionUID = 1L; @@ -77,7 +75,7 @@ public class ListJobs extends UWSAction { *
  • the HTTP method is HTTP-GET.
  • * * - * @see uws.service.actions.UWSAction#match(uws.service.UWSUrl, java.lang.String, javax.servlet.http.HttpServletRequest) + * @see uws.service.actions.UWSAction#match(UWSUrl, JobOwner, HttpServletRequest) */ @Override public boolean match(UWSUrl urlInterpreter, JobOwner user, HttpServletRequest request) throws UWSException{ @@ -90,9 +88,9 @@ public class ListJobs extends UWSAction { * * @see #getJobsList(UWSUrl) * @see UWSService#getSerializer(String) - * @see JobList#serialize(ServletOutputStream, UWSSerializer, String) + * @see JobList#serialize(ServletOutputStream, UWSSerializer, JobOwner) * - * @see uws.service.actions.UWSAction#apply(uws.service.UWSUrl, java.lang.String, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) + * @see uws.service.actions.UWSAction#apply(UWSUrl, JobOwner, HttpServletRequest, HttpServletResponse) */ @Override public boolean apply(UWSUrl urlInterpreter, JobOwner user, HttpServletRequest request, HttpServletResponse response) throws UWSException, IOException{ @@ -102,7 +100,16 @@ public class ListJobs extends UWSAction { // Write the jobs list: UWSSerializer serializer = uws.getSerializer(request.getHeader("Accept")); response.setContentType(serializer.getMimeType()); - jobsList.serialize(response.getOutputStream(), serializer, user); + response.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); + try{ + jobsList.serialize(response.getOutputStream(), serializer, user); + }catch(Exception e){ + if (!(e instanceof UWSException)){ + getLogger().logUWS(LogLevel.ERROR, urlInterpreter, "SERIALIZE", "Can not serialize the jobs list \"" + jobsList.getName() + "\"!", e); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, e, "Can not format properly the jobs list \"" + jobsList.getName() + "\"!"); + }else + throw (UWSException)e; + } return true; } diff --git a/src/uws/service/actions/SetJobParam.java b/src/uws/service/actions/SetJobParam.java index 02026bbff3fa922f0552a16b597afa26a2ea6c45..51d848b27aa9a314f37dfcd7233dc68f3d127ecf 100644 --- a/src/uws/service/actions/SetJobParam.java +++ b/src/uws/service/actions/SetJobParam.java @@ -16,7 +16,8 @@ package uws.service.actions; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; @@ -25,14 +26,14 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import uws.UWSException; - +import uws.UWSToolBox; import uws.job.UWSJob; - import uws.job.parameters.UWSParameters; import uws.job.user.JobOwner; - +import uws.service.UWSFactory; import uws.service.UWSService; import uws.service.UWSUrl; +import uws.service.log.UWSLog.LogLevel; /** *

    The "Set Job Parameter" action of a UWS.

    @@ -42,8 +43,8 @@ import uws.service.UWSUrl; *

    This action sets the value of the specified job attribute. * The response of this action is a redirection to the job summary.

    * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (04/2015) */ public class SetJobParam extends UWSAction { private static final long serialVersionUID = 1L; @@ -70,16 +71,16 @@ public class SetJobParam extends UWSAction { * Checks whether: *
      *
    • a job list name is specified in the given UWS URL (note: by default, the existence of the jobs list is not checked),
    • - *
    • a job ID is given in the UWS URL (note: by default, the existence of the job is not checked),
    • + *
    • a job ID is given in the UWS URL (note: by default, the existence of the job is not yet checked),
    • *
    • if the HTTP method is HTTP-POST: there is exactly one attribute and at least one parameter
    • *
    • if the HTTP method is HTTP-PUT: there are at least two attributes ({@link UWSJob#PARAM_PARAMETERS}/{parameter_name}) and there are at least two parameters
    • *
    * - * @see uws.service.actions.UWSAction#match(uws.service.UWSUrl, java.lang.String, javax.servlet.http.HttpServletRequest) + * @see uws.service.actions.UWSAction#match(UWSUrl, JobOwner, HttpServletRequest) */ @Override public boolean match(UWSUrl urlInterpreter, JobOwner user, HttpServletRequest request) throws UWSException{ - return (urlInterpreter.hasJobList() && urlInterpreter.hasJob() && ((request.getMethod().equalsIgnoreCase("post") && (!urlInterpreter.hasAttribute() || urlInterpreter.getAttributes().length == 1) && request.getParameterMap().size() > 0) || (request.getMethod().equalsIgnoreCase("put") && urlInterpreter.getAttributes().length >= 2 && urlInterpreter.getAttributes()[0].equalsIgnoreCase(UWSJob.PARAM_PARAMETERS) && request.getParameter(urlInterpreter.getAttributes()[1]) != null))); + return (urlInterpreter.hasJobList() && urlInterpreter.hasJob() && ((request.getMethod().equalsIgnoreCase("post") && (!urlInterpreter.hasAttribute() || urlInterpreter.getAttributes().length == 1)) || (request.getMethod().equalsIgnoreCase("put") && urlInterpreter.getAttributes().length >= 2 && urlInterpreter.getAttributes()[0].equalsIgnoreCase(UWSJob.PARAM_PARAMETERS) && UWSToolBox.hasParameter(urlInterpreter.getAttributes()[1], request, false)))); } /** @@ -87,19 +88,24 @@ public class SetJobParam extends UWSAction { * changes the value of the specified job attribute * and makes a redirection to the job summary.

    * - * @see #getJob(UWSUrl, String) - * @see UWSService#createUWSParameters(HttpServletRequest) - * @see UWSJob#addOrUpdateParameters(java.util.Map) + * @see #getJob(UWSUrl) + * @see UWSFactory#createUWSParameters(HttpServletRequest) + * @see UWSJob#addOrUpdateParameters(UWSParameters, JobOwner) * @see UWSService#redirect(String, HttpServletRequest, JobOwner, String, HttpServletResponse) - * - * @see uws.service.actions.UWSAction#apply(uws.service.UWSUrl, java.lang.String, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) + * @see uws.service.actions.UWSAction#apply(UWSUrl, JobOwner, HttpServletRequest, HttpServletResponse) */ @Override public boolean apply(UWSUrl urlInterpreter, JobOwner user, HttpServletRequest request, HttpServletResponse response) throws UWSException, IOException{ // Get the job: UWSJob job = getJob(urlInterpreter); - UWSParameters params = uws.getFactory().createUWSParameters(request); + UWSParameters params; + try{ + params = uws.getFactory().createUWSParameters(request); + }catch(UWSException ue){ + getLogger().logUWS(LogLevel.ERROR, request, "SET_PARAM", "Can not parse the sent UWS parameters!", ue); + throw ue; + } // Update the job parameters: boolean updated = job.addOrUpdateParameters(params, user); diff --git a/src/uws/service/actions/SetUWSParameter.java b/src/uws/service/actions/SetUWSParameter.java new file mode 100644 index 0000000000000000000000000000000000000000..5364b3e00d011f92b33bd35e1bd1d5bd64c4a697 --- /dev/null +++ b/src/uws/service/actions/SetUWSParameter.java @@ -0,0 +1,110 @@ +package uws.service.actions; + +/* + * This file is part of UWSLibrary. + * + * UWSLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * UWSLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with UWSLibrary. If not, see . + * + * Copyright 2014-2015 - Astronomisches Rechen Institut (ARI) + */ + +import java.io.IOException; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import uws.UWSException; +import uws.UWSExceptionFactory; +import uws.UWSToolBox; +import uws.job.UWSJob; +import uws.job.parameters.UWSParameters; +import uws.job.user.JobOwner; +import uws.service.UWSService; +import uws.service.UWSUrl; + +/** + *

    The UWS action which lets set the phase (RUN or ABORT), the execution duration and the destruction time of a job + * with a POST or PUT request on {job-id}/{uws-param}.

    + * + *

    Note: The corresponding name is {@link UWSAction#SET_UWS_PARAMETER}.

    + * + * @author Grégory Mantelet (ARI) + * @version 4.1 (04/2015) + * @since 4.1 + */ +public class SetUWSParameter extends UWSAction { + private static final long serialVersionUID = 1L; + + public SetUWSParameter(final UWSService u){ + super(u); + } + + /** + * @see UWSAction#SET_UWS_PARAMETER + * @see uws.service.actions.UWSAction#getName() + */ + @Override + public String getName(){ + return SET_UWS_PARAMETER; + } + + @Override + public String getDescription(){ + return "Let change one of the standard UWS parameters of a job (e.g. phase, executionduration, destruction) (URL: {baseUWS_URL}/{jobListName}/{jobId}/{uws-param}, where {uws-param} = \"phase\" or \"executionduration\" or \"destruction\", Method: HTTP-POST or HTTP-PUT, Parameter: \"{uws-param}={param-value}\" in POST and \"{param-value\" in PUT (content-type:text/plain))"; + } + + /** + * Checks whether: + *
      + *
    • a job list name is specified in the given UWS URL (note: by default, the existence of the jobs list is not checked),
    • + *
    • a job ID is given in the UWS URL (note: by default, the existence of the job is not yet checked),
    • + *
    • the job attribute "phase", "runID", "executionduration" or "destruction" is used in the UWS URL, + *
    • the HTTP method is HTTP-POST or HTTP-PUT.
    • + *
    + * @see uws.service.actions.UWSAction#match(UWSUrl, JobOwner, HttpServletRequest) + */ + @Override + public boolean match(UWSUrl urlInterpreter, JobOwner user, HttpServletRequest request) throws UWSException{ + return (urlInterpreter.hasJobList() && urlInterpreter.hasJob() && urlInterpreter.getAttributes().length == 1 && urlInterpreter.getAttributes()[0].toLowerCase().matches(UWSParameters.UWS_RW_PARAMETERS_REGEXP) && (request.getMethod().equalsIgnoreCase("post") || request.getMethod().equalsIgnoreCase("put")) && UWSToolBox.hasParameter(urlInterpreter.getAttributes()[0], request, false)); + } + + /** + * Get the specified job (throw an error if not found), + * and update the specified UWS standard parameter. + * + * @see #getJob(UWSUrl) + * @see UWSJob#addOrUpdateParameter(String, Object) + * @see UWSService#redirect(String, HttpServletRequest, JobOwner, String, HttpServletResponse) + * + * @see uws.service.actions.UWSAction#apply(UWSUrl, JobOwner, HttpServletRequest, HttpServletResponse) + */ + @Override + public boolean apply(UWSUrl urlInterpreter, JobOwner user, HttpServletRequest request, HttpServletResponse response) throws UWSException, IOException{ + // Get the job: + UWSJob job = getJob(urlInterpreter); + + // Forbids the action if the user has not the WRITE permission for the specified job: + if (user != null && !user.hasWritePermission(job)) + throw new UWSException(UWSException.PERMISSION_DENIED, UWSExceptionFactory.writePermissionDenied(user, true, job.getJobId())); + + String name = urlInterpreter.getAttributes()[0]; + job.addOrUpdateParameter(name, UWSToolBox.getParameter(name, request, false), user); + + // Make a redirection to the job: + uws.redirect(urlInterpreter.jobSummary(urlInterpreter.getJobListName(), job.getJobId()).getRequestURL(), request, user, getName(), response); + + return true; + } + +} diff --git a/src/uws/service/actions/ShowHomePage.java b/src/uws/service/actions/ShowHomePage.java index c43749750a5c16ab68f84a14037dcaceeed0c748..d40c892c556a2e577c183e8f2c3d51a64960a4d9 100644 --- a/src/uws/service/actions/ShowHomePage.java +++ b/src/uws/service/actions/ShowHomePage.java @@ -16,28 +16,26 @@ package uws.service.actions; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.PrintWriter; - import java.net.URL; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import uws.UWSException; -import uws.UWSExceptionFactory; - +import uws.UWSToolBox; import uws.job.serializer.UWSSerializer; - import uws.job.user.JobOwner; - import uws.service.UWSService; import uws.service.UWSUrl; +import uws.service.log.UWSLog.LogLevel; /** *

    The "Show UWS Home Page" action of a UWS.

    @@ -46,8 +44,8 @@ import uws.service.UWSUrl; * *

    This action displays the UWS home page.

    * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (04/2015) */ public class ShowHomePage extends UWSAction { private static final long serialVersionUID = 1L; @@ -85,7 +83,7 @@ public class ShowHomePage extends UWSAction { *
      *
    • Default home page ({@link UWSService#isDefaultHomePage()} returns true): * write the appropriate (considering the Accept header of the HTTP-Request) serialization of this UWS.
    • - *
    • Home redirection ({@link UWSService#isHomePageRedirection()} = true): call {@link UWSService#redirect(String, HttpServletRequest, HttpServletResponse)} with the {@link UWSService#getHomePage()} URL.
    • + *
    • Home redirection ({@link UWSService#isHomePageRedirection()} = true): call {@link UWSService#redirect(String, HttpServletRequest, JobOwner, String, HttpServletResponse)} with the {@link UWSService#getHomePage()} URL.
    • *
    • Otherwise (({@link UWSService#isHomePageRedirection()} = false)): read the content of the resource at the {@link UWSService#getHomePage()} URL and copy it in the given {@link HttpServletResponse}.
    • *
    * @@ -94,21 +92,35 @@ public class ShowHomePage extends UWSAction { * @throws IOException If there is an error while reading at a custom home page URL * or while writing in the given HttpServletResponse. * - * @see uws.service.actions.UWSAction#apply(uws.service.UWSUrl, java.lang.String, javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse) + * @see uws.service.actions.UWSAction#apply(UWSUrl, JobOwner, HttpServletRequest, HttpServletResponse) * @see UWSService#redirect(String, HttpServletRequest, JobOwner, String, HttpServletResponse) */ @Override public boolean apply(UWSUrl urlInterpreter, JobOwner user, HttpServletRequest request, HttpServletResponse response) throws UWSException, IOException{ + if (uws.isDefaultHomePage()){ UWSSerializer serializer = uws.getSerializer(request.getHeader("Accept")); response.setContentType(serializer.getMimeType()); - String serialization = serializer.getUWS(uws); + response.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); + // Get a short and simple serialization of this UWS: + String serialization; + try{ + serialization = serializer.getUWS(uws); + }catch(Exception e){ + if (!(e instanceof UWSException)){ + getLogger().logUWS(LogLevel.ERROR, urlInterpreter, "SERIALIZE", "Can't display the default home page, due to a serialization error!", e); + throw new UWSException(UWSException.NO_CONTENT, e, "No home page available for this UWS service!"); + }else + throw (UWSException)e; + } + // Write the simple UWS serialization in the given response: if (serialization != null){ PrintWriter output = response.getWriter(); output.print(serialization); output.flush(); }else - throw UWSExceptionFactory.incorrectSerialization(serialization, "the UWS " + uws.getName()); + throw new UWSException(UWSException.NO_CONTENT, "No home page available for this UWS service."); + }else{ if (uws.isHomePageRedirection()) uws.redirect(uws.getHomePage(), request, user, getName(), response); @@ -117,6 +129,7 @@ public class ShowHomePage extends UWSAction { BufferedReader reader = new BufferedReader(new InputStreamReader(homePageUrl.openStream())); response.setContentType("text/html"); + response.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); PrintWriter writer = response.getWriter(); try{ String line = null; diff --git a/src/uws/service/actions/UWSAction.java b/src/uws/service/actions/UWSAction.java index 381336386e74807182c44d071d2dcd913d7bfb20..5cd034423dee9859ced78127e672e20a90f1bb80 100644 --- a/src/uws/service/actions/UWSAction.java +++ b/src/uws/service/actions/UWSAction.java @@ -16,7 +16,8 @@ package uws.service.actions; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; @@ -26,15 +27,12 @@ import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import uws.UWSException; -import uws.UWSExceptionFactory; - import uws.job.JobList; import uws.job.UWSJob; - import uws.job.user.JobOwner; - import uws.service.UWSService; import uws.service.UWSUrl; +import uws.service.log.UWSLog; /** *

    Action of a UWS (i.e. "List Jobs", "Get Job", etc...). An instance of a UWSAction can be added to a given UWS thanks to the method @@ -43,8 +41,8 @@ import uws.service.UWSUrl; *

    WARNING: The action of a UWS have, each one, a different name. So be careful about the name of your UWS action ! * By default the name of a UWS action is the full java name of the class !

    * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (11/2014) * * @see UWSService */ @@ -55,6 +53,9 @@ public abstract class UWSAction implements Serializable { public final static String LIST_JOBS = "List Jobs"; /** Name of the UWS action {@link AddJob}. */ public final static String ADD_JOB = "Add Job"; + /** Name of the UWS action {@link SetUWSParameter}. + * @since 4.1 */ + public final static String SET_UWS_PARAMETER = "Set UWS Parameter"; /** Name of the UWS action {@link DestroyJob}. */ public final static String DESTROY_JOB = "Destroy Job"; /** Name of the UWS action {@link JobSummary}. */ @@ -93,6 +94,17 @@ public abstract class UWSAction implements Serializable { return uws; } + /** + * Get the logger associated with this UWS service. + * + * @return UWS logger. + * + * @since 4.1 + */ + public final UWSLog getLogger(){ + return uws.getLogger(); + } + /** *

    Gets the name of this UWS action. MUST BE UNIQUE !

    * @@ -139,9 +151,9 @@ public abstract class UWSAction implements Serializable { if (jlName != null){ jobsList = uws.getJobList(jlName); if (jobsList == null) - throw UWSExceptionFactory.incorrectJobListName(jlName); + throw new UWSException(UWSException.NOT_FOUND, "Incorrect job list name! The jobs list " + jlName + " does not exist."); }else - throw UWSExceptionFactory.missingJobListName(); + throw new UWSException(UWSException.BAD_REQUEST, "Missing job list name!"); return jobsList; } @@ -150,7 +162,7 @@ public abstract class UWSAction implements Serializable { *

    Extracts the job ID from the given UWS URL * and gets the corresponding job from the UWS.

    * - *

    Note: This function calls {@link #getJob(UWSUrl, String, boolean)} with userId=null and checkUser=false !

    + *

    Note: This function calls {@link #getJob(UWSUrl, JobOwner)} with userId=null and checkUser=false !

    * * @param urlInterpreter The UWS URL which contains the ID of the job to get. * @@ -160,7 +172,7 @@ public abstract class UWSAction implements Serializable { * or if there are no corresponding jobs list and/or job in the UWS * or if the specified user has not enough rights to get the specified job. * - * @see #getJob(UWSUrl, String, boolean) + * @see #getJob(UWSUrl, JobOwner) */ protected final UWSJob getJob(UWSUrl urlInterpreter) throws UWSException{ return getJob(urlInterpreter, (JobOwner)null); @@ -181,7 +193,7 @@ public abstract class UWSAction implements Serializable { * * @see UWSUrl#getJobId() * @see #getJobsList(UWSUrl) - * @see JobList#getJob(String,String) + * @see JobList#getJob(String, JobOwner) * * @since 3.1 */ @@ -193,9 +205,9 @@ public abstract class UWSAction implements Serializable { JobList jobsList = getJobsList(urlInterpreter); job = jobsList.getJob(jobId, user); if (job == null) - throw UWSExceptionFactory.incorrectJobID(jobsList.getName(), jobId); + throw new UWSException(UWSException.NOT_FOUND, "Incorrect job ID! The job \"" + jobId + "\" does not exist in the jobs list \"" + jobsList.getName() + "\"."); }else - throw UWSExceptionFactory.missingJobID(); + throw new UWSException(UWSException.BAD_REQUEST, "Missing job ID!"); return job; } @@ -203,7 +215,7 @@ public abstract class UWSAction implements Serializable { /** *

    Extracts the job ID from the given UWS URL and gets the corresponding job from the given jobs list.

    * - *

    Note: This function calls {@link #getJob(UWSUrl, JobList, String, boolean)} with userId=null and checkUser=false !

    + *

    Note: This function calls {@link #getJob(UWSUrl, JobList, JobOwner)} with userId=null and checkUser=false !

    * * @param urlInterpreter The UWS URL which contains the ID of the job to get. * @param jobsList The jobs list which is supposed to contain the job to get. @@ -213,7 +225,7 @@ public abstract class UWSAction implements Serializable { * @throws UWSException If no job ID can be found in the given UWS URL * or if there are no corresponding job in the UWS. * - * @see #getJob(UWSUrl, JobList, String, boolean) + * @see #getJob(UWSUrl, JobList, JobOwner) */ protected final UWSJob getJob(UWSUrl urlInterpreter, JobList jobsList) throws UWSException{ return getJob(urlInterpreter, jobsList, null); @@ -234,8 +246,7 @@ public abstract class UWSAction implements Serializable { * or if the specified user has not enough rights. * * @see UWSUrl#getJobId() - * @see JobList#getJob(String) - * @see JobList#getJob(String,String) + * @see JobList#getJob(String, JobOwner) * * @since 3.1 */ @@ -245,12 +256,12 @@ public abstract class UWSAction implements Serializable { if (jobId != null){ if (jobsList == null) - throw UWSExceptionFactory.missingJobListName(); + throw new UWSException(UWSException.BAD_REQUEST, "Missing job list name!"); job = jobsList.getJob(jobId, user); if (job == null) - throw UWSExceptionFactory.incorrectJobID(jobsList.getName(), jobId); + throw new UWSException(UWSException.NOT_FOUND, "Incorrect job ID! The job \"" + jobId + "\" does not exist in the jobs list \"" + jobsList.getName() + "\"."); }else - throw UWSExceptionFactory.missingJobID(); + throw new UWSException(UWSException.BAD_REQUEST, "Missing job ID!"); return job; } diff --git a/src/uws/service/backup/DefaultUWSBackupManager.java b/src/uws/service/backup/DefaultUWSBackupManager.java index 2a9db4b418bc27e94ba27f1d975c870bfdafadd2..22393f81868e0b9e3e9b9901db83f4580be38e76 100644 --- a/src/uws/service/backup/DefaultUWSBackupManager.java +++ b/src/uws/service/backup/DefaultUWSBackupManager.java @@ -16,14 +16,14 @@ package uws.service.backup; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PrintWriter; - import java.text.ParseException; import java.util.ArrayList; import java.util.Date; @@ -41,25 +41,21 @@ import org.json.JSONTokener; import org.json.JSONWriter; import org.json.Json4Uws; +import uws.ISO8601Format; import uws.UWSException; -import uws.UWSExceptionFactory; import uws.UWSToolBox; - import uws.job.ErrorSummary; import uws.job.ErrorType; import uws.job.JobList; import uws.job.Result; import uws.job.UWSJob; - import uws.job.parameters.UWSParameters; -import uws.job.serializer.JSONSerializer; - import uws.job.user.JobOwner; - import uws.service.UWS; import uws.service.file.UWSFileManager; - import uws.service.log.UWSLog; +import uws.service.log.UWSLog.LogLevel; +import uws.service.request.UploadFile; /** *

    Default implementation of the interface {@link UWSBackupManager}.

    @@ -80,8 +76,8 @@ import uws.service.log.UWSLog; * *

    Another positive value will be considered as the frequency (in milliseconds) of the automatic backup (= {@link #saveAll()}).

    * - * @author Grégory Mantelet (CDS) - * @version 06/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (12/2014) */ public class DefaultUWSBackupManager implements UWSBackupManager { @@ -117,7 +113,7 @@ public class DefaultUWSBackupManager implements UWSBackupManager { * * @param uws The UWS to save/restore. * - * @see #DefaultBackupManager(UWS, long) + * @see #DefaultUWSBackupManager(UWS, long) */ public DefaultUWSBackupManager(final UWS uws){ this(uws, DEFAULT_FREQUENCY); @@ -160,7 +156,7 @@ public class DefaultUWSBackupManager implements UWSBackupManager { * * @throws UWSException If the user identification is disabled (that's to say, if the given UWS has no UserIdentifier) while the parameter byUser is true. * - * @see #DefaultBackupManager(UWS, boolean, long) + * @see #DefaultUWSBackupManager(UWS, boolean, long) */ public DefaultUWSBackupManager(final UWS uws, final boolean byUser) throws UWSException{ this(uws, byUser, byUser ? AT_USER_ACTION : DEFAULT_FREQUENCY); @@ -181,7 +177,7 @@ public class DefaultUWSBackupManager implements UWSBackupManager { this.backupFreq = frequency; if (byUser && uws.getUserIdentifier() == null) - throw new UWSException("Impossible to save/restore a UWS by user, if the user identification is disabled (no UserIdentifier is set to the UWS) !"); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, "Impossible to save/restore a UWS by user if the user identification is disabled (no UserIdentifier is set to the UWS)!"); if (backupFreq == AT_USER_ACTION && !byUser) backupFreq = MANUAL; @@ -206,6 +202,7 @@ public class DefaultUWSBackupManager implements UWSBackupManager { return enabled; } + @Override public final void setEnabled(boolean enabled){ this.enabled = enabled; if (backupFreq > 0){ @@ -310,6 +307,7 @@ public class DefaultUWSBackupManager implements UWSBackupManager { /* SAVE METHODS */ /* ************ */ + @Override public int[] saveAll(){ if (!enabled) return null; @@ -362,7 +360,7 @@ public class DefaultUWSBackupManager implements UWSBackupManager { out.value(getJSONUser(user)); nbSavedOwners++; }catch(JSONException je){ - getLogger().error("Unexpected JSON error while saving the user '" + user.getID() + "' !", je); + getLogger().logUWS(LogLevel.ERROR, user, "BACKUP", "Unexpected JSON error while saving the user '" + user.getID() + "'!", je); } } out.endArray(); @@ -378,9 +376,9 @@ public class DefaultUWSBackupManager implements UWSBackupManager { nbSavedJobs++; writer.flush(); }catch(UWSException ue){ - getLogger().error("Unexpected UWS error while saving the job '" + job.getJobId() + "' !", ue); + getLogger().logUWS(LogLevel.ERROR, job, "BACKUP", "Unexpected UWS error while saving the job '" + job.getJobId() + "'!", ue); }catch(JSONException je){ - getLogger().error("Unexpected JSON error while saving the job '" + job.getJobId() + "' !", je); + getLogger().logUWS(LogLevel.ERROR, job, "BACKUP", "Unexpected JSON error while saving the job '" + job.getJobId() + "'!", je); } } } @@ -390,9 +388,9 @@ public class DefaultUWSBackupManager implements UWSBackupManager { out.endObject(); }catch(JSONException je){ - getLogger().error("Unexpected JSON error while saving the whole UWS !", je); + getLogger().logUWS(LogLevel.ERROR, null, "BACKUP", "Unexpected JSON error while saving the whole UWS !", je); }catch(IOException ie){ - getLogger().error("Unexpected IO error while saving the whole UWS !", ie); + getLogger().logUWS(LogLevel.ERROR, null, "BACKUP", "Unexpected IO error while saving the whole UWS !", ie); }finally{ // Close the writer: if (writer != null) @@ -402,13 +400,14 @@ public class DefaultUWSBackupManager implements UWSBackupManager { // Build the report and log it: int[] report = new int[]{nbSavedJobs,nbJobs,nbSavedOwners,nbOwners}; - getLogger().uwsSaved(uws, report); + getLogger().logUWS(LogLevel.INFO, report, "BACKUPED", "UWS Service \"" + uws.getName() + "\" backuped!", null); lastBackup = new Date(); return report; } + @Override public int[] saveOwner(JobOwner user){ if (!enabled) return null; @@ -436,7 +435,7 @@ public class DefaultUWSBackupManager implements UWSBackupManager { out.object(); // Write the backup date: - out.key("date").value(UWSJob.dateFormat.format(new Date())); + out.key("date").value(ISO8601Format.format(new Date())); // Write the description of the user: out.key("user").value(getJSONUser(user)); @@ -453,9 +452,9 @@ public class DefaultUWSBackupManager implements UWSBackupManager { saveReport[0]++; writer.flush(); }catch(JSONException je){ - getLogger().error("Unexpected JSON error while saving the " + saveReport[1] + "-th job of the job list '" + jl.getName() + "' owned by the user '" + user.getID() + "' !", je); + getLogger().logUWS(LogLevel.ERROR, null, "BACKUP", "Unexpected JSON error while saving the " + saveReport[1] + "-th job of the job list '" + jl.getName() + "' owned by the user '" + user.getID() + "'!", je); }catch(UWSException ue){ - getLogger().error("Unexpected UWS error while saving the " + saveReport[1] + "-th job of the job list '" + jl.getName() + "' owned by the user '" + user.getID() + "' !", ue); + getLogger().logUWS(LogLevel.ERROR, null, "BACKUP", "Unexpected UWS error while saving the " + saveReport[1] + "-th job of the job list '" + jl.getName() + "' owned by the user '" + user.getID() + "'!", ue); } } } @@ -465,16 +464,16 @@ public class DefaultUWSBackupManager implements UWSBackupManager { out.endObject(); // Log the "save" report: - getLogger().ownerJobsSaved(user, saveReport); + getLogger().logUWS(LogLevel.INFO, saveReport, "BACKUPED", "UWS backuped!", null); lastBackup = new Date(); return saveReport; }catch(IOException ie){ - getLogger().error("Unexpected IO error while saving the jobs of user '" + user.getID() + "' !", ie); + getLogger().logUWS(LogLevel.ERROR, null, "BACKUP", "Unexpected IO error while saving the jobs of user '" + user.getID() + "'!", ie); }catch(JSONException je){ - getLogger().error("Unexpected JSON error while saving the jobs of user '" + user.getID() + "' !", je); + getLogger().logUWS(LogLevel.ERROR, null, "BACKUP", "Unexpected JSON error while saving the jobs of user '" + user.getID() + "'!", je); }finally{ // Close the writer: if (writer != null) @@ -525,7 +524,7 @@ public class DefaultUWSBackupManager implements UWSBackupManager { * *

    * note: - * the structure of the returned JSON object is decided by {@link JSONSerializer#getJson(UWSJob)}. + * the structure of the returned JSON object is decided by {@link Json4Uws#getJson(UWSJob)}. * Only one attribute is added: "jobListName". *

    * @@ -539,15 +538,70 @@ public class DefaultUWSBackupManager implements UWSBackupManager { */ protected JSONObject getJSONJob(final UWSJob job, final String jlName) throws UWSException, JSONException{ JSONObject jsonJob = Json4Uws.getJson(job); + + // Re-Build the parameters map, by separating the uploads and the "normal" parameters: + JSONArray uploads = new JSONArray(); + JSONObject params = new JSONObject(); + Object val; + for(String name : job.getAdditionalParameters()){ + // get the raw value: + val = job.getAdditionalParameterValue(name); + // if an array, build a JSON array of strings: + if (val != null && val.getClass().isArray()){ + JSONArray array = new JSONArray(); + for(Object o : (Object[])val){ + if (o != null) + array.put(o.toString()); + } + params.put(name, array); + }else if (val != null && val instanceof UploadFile) + uploads.put(getUploadJson((UploadFile)val)); + // otherwise, just put the value: + else if (val != null) + params.put(name, val); + } + + // Add the parameters and the uploads inside the JSON representation of the job: + jsonJob.put(UWSJob.PARAM_PARAMETERS, params); + jsonJob.put("uwsUploads", uploads); + + // Add the job owner: jsonJob.put(UWSJob.PARAM_OWNER, (job != null && job.getOwner() != null) ? job.getOwner().getID() : null); + + // Add the name of the job list owning the given job: jsonJob.put("jobListName", jlName); + return jsonJob; } + /** + * Get the JSON representation of the given {@link UploadFile}. + * + * @param upl The uploaded file to serialize in JSON. + * + * @return Its JSON representation. + * + * @throws JSONException If there is an error while building the JSON object. + * + * @since 4.1 + */ + protected JSONObject getUploadJson(final UploadFile upl) throws JSONException{ + if (upl == null) + return null; + JSONObject o = new JSONObject(); + o.put("paramName", upl.paramName); + o.put("fileName", upl.fileName); + o.put("location", upl.getLocation()); + o.put("mime", upl.mimeType); + o.put("lenght", upl.length); + return o; + } + /* ******************* */ /* RESTORATION METHODS */ /* ******************* */ + @Override public int[] restoreAll(){ // Removes all current jobs from the UWS before restoring it from files: for(JobList jl : uws) @@ -564,7 +618,7 @@ public class DefaultUWSBackupManager implements UWSBackupManager { // Get the list of the input streams (on all the backup files to read): if (byUser){ if (!userIdentificationEnabled){ - getLogger().error("[restoration] Impossible to restore a UWS by user if the user identification is disabled (that's to say, the UWS has no UserIdentifier) !"); + getLogger().logUWS(LogLevel.ERROR, null, "RESTORATION", "Impossible to restore a UWS by user if the user identification is disabled (that's to say, the UWS has no UserIdentifier)!", null); return null; }else itInput = fileManager.getAllUserBackupInputs(); @@ -572,7 +626,7 @@ public class DefaultUWSBackupManager implements UWSBackupManager { try{ itInput = new SingleInputIterator(fileManager.getBackupInput()); }catch(IOException ioe){ - getLogger().error("[restoration] Restoration of the UWS " + uws.getName() + " failed because an unexpected IO error has occured.", ioe); + getLogger().logUWS(LogLevel.ERROR, null, "RESTORATION", "Restoration of the UWS " + uws.getName() + " failed because an unexpected IO error has occured.", ioe); return null; } } @@ -588,7 +642,7 @@ public class DefaultUWSBackupManager implements UWSBackupManager { HashMap users = new HashMap(); String key; - JSONObject object; + JSONObject object = null; try{ // Reads progressively the general structure (which is theoretically a JSON object): @@ -623,7 +677,7 @@ public class DefaultUWSBackupManager implements UWSBackupManager { } } }catch(UWSException ue){ - getLogger().error("[restoration] A job owner can not be restored !", ue); + getLogger().logUWS(LogLevel.ERROR, object, "RESTORATION", "A job owner can not be restored!", ue); //break; // Because, the key "user" is found ONLY in the backup file of a user. If the user can not be restored, its jobs won't be ! } @@ -649,7 +703,7 @@ public class DefaultUWSBackupManager implements UWSBackupManager { } } }catch(UWSException ue){ - getLogger().error("[restoration] The " + nbUsers + "-th user can not be restored !", ue); + getLogger().logUWS(LogLevel.ERROR, object, "RESTORATION", "The " + nbUsers + "-th user can not be restored!", ue); } } @@ -670,26 +724,26 @@ public class DefaultUWSBackupManager implements UWSBackupManager { if (restoreJob(object, users)) nbRestoredJobs++; }catch(UWSException ue){ - getLogger().error("[restoration] The " + nbJobs + "-th job can not be restored !", ue); + getLogger().logUWS(LogLevel.ERROR, object, "RESTORATION", "The " + nbJobs + "-th job can not be restored!", ue); } } }// any other key is ignore but with a warning message: else - getLogger().warning("[restoration] Key '" + key + "' ignored because unknown ! The UWS may be not completely restored !"); + getLogger().logUWS(LogLevel.WARNING, null, "RESTORATION", "Key '" + key + "' ignored because unknown! The UWS may be not completely restored.", null); } }catch(JSONException je){ - getLogger().error("[restoration] Incorrect JSON format for a UWS backup file !", je); + getLogger().logUWS(LogLevel.ERROR, null, "RESTORATION", "Incorrect JSON format for a UWS backup file!", je); return null; }catch(Exception e){ - getLogger().error("[restoration] Unexpected error while restoring the UWS !", e); + getLogger().logUWS(LogLevel.ERROR, null, "RESTORATION", "Unexpected error while restoring the UWS!", e); return null; }finally{ // Close the reader: try{ inputStream.close(); }catch(IOException ioe){ - getLogger().error("[restoration] Can not close the input stream opened on a user backup file !", ioe); + getLogger().logUWS(LogLevel.ERROR, null, "RESTORATION", "Can not close the input stream opened on a user backup file!", ioe); } // Set the last restoration date: lastRestoration = new Date(); @@ -697,11 +751,11 @@ public class DefaultUWSBackupManager implements UWSBackupManager { } if (!userIdentificationEnabled && nbUsers > 0) - getLogger().warning("[restoration] " + nbUsers + " job owners have not been restored because the user identification is disabled in this UWS ! => Jobs of these users have not been restored !"); + getLogger().logUWS(LogLevel.WARNING, null, "RESTORATION", nbUsers + " job owners have not been restored because the user identification is disabled in this UWS! => Jobs of these users have not been restored.", null); // Build the restoration report and log it: int[] report = new int[]{nbRestoredJobs,nbJobs,nbRestoredUsers,nbUsers}; - getLogger().uwsRestored(uws, report); + getLogger().logUWS(LogLevel.INFO, report, "RESTORED", "UWS restored!", null); return report; } @@ -734,13 +788,13 @@ public class DefaultUWSBackupManager implements UWSBackupManager { else userData.put(key, json.getString(key)); }catch(JSONException je){ - getLogger().error("[restoration] Incorrect JSON format for the serialization of the user " + ID + " !", je); + getLogger().logUWS(LogLevel.WARNING, null, "RESTORATION", "Incorrect JSON format for the serialization of the user \"" + ID + "\"! The restoration of this job may be incomplete.", je); } } // Check that the ID exists: if (ID == null || ID.trim().isEmpty()) - throw UWSExceptionFactory.restoreUserImpossible("Missing user ID !"); + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, null, "Impossible to restore a user from the backup file(s): no ID has been found!"); return uws.getUserIdentifier().restoreUser(ID, pseudo, userData); } @@ -768,6 +822,7 @@ public class DefaultUWSBackupManager implements UWSBackupManager { //Map params = null; ArrayList results = null; ErrorSummary error = null; + JSONArray uploads = null; String[] keys = JSONObject.getNames(json); for(String key : keys){ @@ -806,35 +861,39 @@ public class DefaultUWSBackupManager implements UWSBackupManager { else if (key.equalsIgnoreCase(UWSJob.PARAM_DESTRUCTION_TIME)){ try{ tmp = json.getString(key); - inputParams.put(UWSJob.PARAM_DESTRUCTION_TIME, UWSJob.dateFormat.parse(tmp)); + inputParams.put(UWSJob.PARAM_DESTRUCTION_TIME, ISO8601Format.parseToDate(tmp)); }catch(ParseException pe){ - getLogger().error("[restoration] Incorrect date format for the '" + key + "' parameter !", pe); + getLogger().logUWS(LogLevel.ERROR, json, "RESTORATION", "Incorrect date format for the '" + key + "' parameter!", pe); } }// key=START_TIME: else if (key.equalsIgnoreCase(UWSJob.PARAM_START_TIME)){ tmp = json.getString(key); try{ - Date d = UWSJob.dateFormat.parse(tmp); + Date d = ISO8601Format.parseToDate(tmp); startTime = d.getTime(); }catch(ParseException pe){ - getLogger().error("[restoration] Incorrect date format for the '" + key + "' parameter !", pe); + getLogger().logUWS(LogLevel.ERROR, json, "RESTORATION", "Incorrect date format for the '" + key + "' parameter!", pe); } }// key=END_TIME: else if (key.equalsIgnoreCase(UWSJob.PARAM_END_TIME)){ tmp = json.getString(key); try{ - Date d = UWSJob.dateFormat.parse(tmp); + Date d = ISO8601Format.parseToDate(tmp); endTime = d.getTime(); }catch(ParseException pe){ - getLogger().error("[restoration] Incorrect date format for the '" + key + "' parameter !", pe); + getLogger().logUWS(LogLevel.ERROR, json, "RESTORATION", "Incorrect date format for the '" + key + "' parameter!", pe); } }// key=PARAMETERS: else if (key.equalsIgnoreCase(UWSJob.PARAM_PARAMETERS)) inputParams.put(UWSJob.PARAM_PARAMETERS, getParameters(json.getJSONObject(key))); + // key=uwsUploads: + else if (key.equalsIgnoreCase("uwsUploads")) + uploads = json.getJSONArray(key); + // key=RESULTS: else if (key.equalsIgnoreCase(UWSJob.PARAM_RESULTS)) results = getResults(json.getJSONArray(key)); @@ -845,24 +904,40 @@ public class DefaultUWSBackupManager implements UWSBackupManager { }// Ignore any other key but with a warning message: else - getLogger().warning("[restoration] The job attribute '" + key + "' has been ignored because unknown ! A job may be not completely restored !"); + getLogger().logUWS(LogLevel.WARNING, json, "RESTORATION", "The job attribute '" + key + "' has been ignored because unknown! A job may be not completely restored!", null); }catch(JSONException je){ - getLogger().error("[restoration] Incorrect JSON format for a job serialization (attribute: \"" + key + "\") !", je); + getLogger().logUWS(LogLevel.ERROR, json, "RESTORATION", "Incorrect JSON format for a job serialization (attribute: \"" + key + "\")!", je); + } + } + + // Re-Build all the uploaded files' pointers for this job: + if (uploads != null){ + @SuppressWarnings("unchecked") + Map params = (Map)(inputParams.get(UWSJob.PARAM_PARAMETERS)); + UploadFile upl; + try{ + for(int i = 0; i < uploads.length(); i++){ + upl = getUploadFile(uploads.getJSONObject(i));; + if (upl != null) + params.put(upl.paramName, upl); + } + }catch(JSONException je){ + getLogger().logUWS(LogLevel.ERROR, json, "RESTORATION", "Incorrect JSON format for the serialization of the job \"" + jobId + "\" (attribute: \"uwsUploads\")!", je); } } // The job list name is REQUIRED: if (jobListName == null || jobListName.isEmpty()) - getLogger().error("[restoration] Missing job list name ! => Can not restore the job " + jobId + " !"); + getLogger().logUWS(LogLevel.ERROR, json, "RESTORATION", "Missing job list name! => Can not restore the job " + jobId + "!", null); // The job list name MUST correspond to an existing job list: else if (uws.getJobList(jobListName) == null) - getLogger().error("[restoration] No job list named " + jobListName + " ! => Can not restore the job " + jobId + " !"); + getLogger().logUWS(LogLevel.ERROR, json, "RESTORATION", "No job list named " + jobListName + "! => Can not restore the job " + jobId + "!", null); // The job ID is REQUIRED: else if (jobId == null || jobId.isEmpty()) - getLogger().error("[restoration] Missing job ID ! => Can not restore a job !"); + getLogger().logUWS(LogLevel.ERROR, json, "RESTORATION", "Missing job ID! => Can not restore a job!", null); // Otherwise: the job can be created and restored: else{ @@ -871,7 +946,7 @@ public class DefaultUWSBackupManager implements UWSBackupManager { // If the specified user is unknown, display a warning and create the job without owner: if (ownerID != null && !ownerID.isEmpty() && owner == null){ - getLogger().error("[restoration] Unknown job owner: " + ownerID + " ! => Can not restore the job " + jobId + " !"); + getLogger().logUWS(LogLevel.ERROR, json, "RESTORATION", "Unknown job owner: " + ownerID + "! => Can not restore the job " + jobId + "!", null); return false; } @@ -880,7 +955,7 @@ public class DefaultUWSBackupManager implements UWSBackupManager { try{ uwsParams = uws.getFactory().createUWSParameters(inputParams); }catch(UWSException ue){ - getLogger().error("[restoration] Error with at least one of the UWS parameters to restore !", ue); + getLogger().logUWS(LogLevel.ERROR, json, "RESTORATION", "Error with at least one of the UWS parameters to restore!", ue); return false; } @@ -923,10 +998,8 @@ public class DefaultUWSBackupManager implements UWSBackupManager { * * @return The corresponding list of parameters * or null if the given object is empty. - * - * @throws UWSException */ - protected Map getParameters(final JSONObject obj) throws UWSException{ + protected Map getParameters(final JSONObject obj){ if (obj == null || obj.length() == 0) return null; @@ -936,12 +1009,37 @@ public class DefaultUWSBackupManager implements UWSBackupManager { try{ params.put(n, obj.get(n)); }catch(JSONException je){ - getLogger().error("Incorrect JSON format for the serialization of the parameter '" + n + "' !", je); + getLogger().logUWS(LogLevel.ERROR, obj, "RESTORATION", "Incorrect JSON format for the serialization of the parameter '" + n + "'!", je); } } return params; } + /** + * Build the upload file corresponding to the given JSON object. + * + * @param obj The JSON representation of the {@link UploadFile} to get. + * + * @return The corresponding {@link UploadFile}. + * + * @since 4.1 + */ + protected UploadFile getUploadFile(final JSONObject obj){ + try{ + UploadFile upl = new UploadFile(obj.getString("paramName"), (obj.has("fileName") ? obj.getString("fileName") : null), obj.getString("location"), uws.getFileManager()); + if (obj.has("mime")) + upl.mimeType = obj.getString("mime"); + try{ + if (obj.has("length")) + upl.length = Long.parseLong(obj.getString("length")); + }catch(NumberFormatException ex){} + return upl; + }catch(JSONException je){ + getLogger().logUWS(LogLevel.ERROR, obj, "RESTORATION", "Incorrect JSON format for the serialization of an uploaded file!", je); + return null; + } + } + /** * Builds the list of results corresponding to the given JSON array. * @@ -952,7 +1050,7 @@ public class DefaultUWSBackupManager implements UWSBackupManager { * * @throws UWSException If there is an error while restoring one of the result. * - * @see {@link #getResult(JSONObject)} + * @see #getResult(JSONObject) */ protected ArrayList getResults(final JSONArray array) throws UWSException{ if (array == null || array.length() == 0) @@ -965,7 +1063,7 @@ public class DefaultUWSBackupManager implements UWSBackupManager { if (r != null) results.add(r); }catch(JSONException je){ - getLogger().error("Incorrect JSON format for the serialization of the " + (i + 1) + "-th result !", je); + getLogger().logUWS(LogLevel.ERROR, array, "RESTORATION", "Incorrect JSON format for the serialization of the " + (i + 1) + "-th result!", je); } } @@ -1004,11 +1102,11 @@ public class DefaultUWSBackupManager implements UWSBackupManager { else if (n.equalsIgnoreCase("size")) size = obj.getLong(n); else - getLogger().warning("[restoration] The result parameter '" + n + "' has been ignored because unknown ! A result may be not completely restored !"); + getLogger().logUWS(LogLevel.WARNING, obj, "RESTORATION", "The result parameter '" + n + "' has been ignored because unknown! A result may be not completely restored!", null); } if (id == null){ - getLogger().error("[restoration] Missing result ID ! => A result can not be restored !"); + getLogger().logUWS(LogLevel.ERROR, obj, "RESTORATION", "Missing result ID! => A result can not be restored!", null); return null; }else{ Result r = new Result(id, type, href, redirection); @@ -1044,13 +1142,13 @@ public class DefaultUWSBackupManager implements UWSBackupManager { else if (n.equalsIgnoreCase("message")) message = obj.getString(n); else - getLogger().warning("[restoration] The error attribute '" + n + "' has been ignored because unknown ! => An error summary may be not completely restored !"); + getLogger().logUWS(LogLevel.WARNING, obj, "RESTORATION", "The error attribute '" + n + "' has been ignored because unknown! => An error summary may be not completely restored!", null); }catch(JSONException je){ - getLogger().error("Incorrect JSON format for an error serialization !", je); + getLogger().logUWS(LogLevel.ERROR, obj, "RESTORATION", "Incorrect JSON format for an error serialization!", je); } } if (message != null) - return new ErrorSummary(message, ErrorType.valueOf(type), details); + return new ErrorSummary(message, ErrorType.valueOf(type.toUpperCase()), details); else return null; } @@ -1199,7 +1297,7 @@ public class DefaultUWSBackupManager implements UWSBackupManager { readNext(); return nextKey; }catch(JSONException je){ - logger.error("Incorrect JSON format in an object !", je); + logger.logUWS(LogLevel.ERROR, null, "RESTORATION", "Incorrect JSON format in an object!", je); endReached = true; return null; } @@ -1296,7 +1394,7 @@ public class DefaultUWSBackupManager implements UWSBackupManager { try{ readNext(); }catch(JSONException je){ - logger.error("Incorrect JSON format in an Array !", je); + logger.logUWS(LogLevel.ERROR, null, "RESTORATION", "Incorrect JSON format in an Array!", je); endReached = true; nextObj = null; } diff --git a/src/uws/service/backup/UWSBackupManager.java b/src/uws/service/backup/UWSBackupManager.java index 52ae1680586b037926bbddab72f4e4b861ccd762..6e509566605a2e4f2cbb3e59afbd206867cf8e3e 100644 --- a/src/uws/service/backup/UWSBackupManager.java +++ b/src/uws/service/backup/UWSBackupManager.java @@ -32,7 +32,7 @@ public interface UWSBackupManager { /** * Enables/Disables the backup of the associated UWS. * - * @param enableBackup true to enable the backup, false otherwise. + * @param enabled true to enable the backup, false otherwise. */ public void setEnabled(final boolean enabled); diff --git a/src/uws/service/error/AbstractServiceErrorWriter.java b/src/uws/service/error/AbstractServiceErrorWriter.java deleted file mode 100644 index a71b45c2013623a3fae48cb431857716fdc10d06..0000000000000000000000000000000000000000 --- a/src/uws/service/error/AbstractServiceErrorWriter.java +++ /dev/null @@ -1,291 +0,0 @@ -package uws.service.error; - -import java.io.IOException; -import java.io.PrintWriter; -import java.util.ArrayList; - -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - -import org.json.JSONException; -import org.json.JSONWriter; - -import uws.AcceptHeader; -import uws.job.ErrorType; -import uws.job.serializer.UWSSerializer; -import uws.job.user.JobOwner; -import uws.service.log.UWSLog; - -/** - *

    Abstract implementation of the {@link ServiceErrorWriter} interface.

    - * - *

    - * The only abstract method is the function {@link #getLogger()}. It MUST return a NON-NULL logger. - * The other functions ({@link #writeError(Throwable, HttpServletResponse, HttpServletRequest, JobOwner, String)} - * and {@link #writeError(String, ErrorType, int, HttpServletResponse, HttpServletRequest, JobOwner, String)}) have - * already a default implementation but may be overridden if needed. Both of them call the function - * {@link #formatError(Throwable, boolean, ErrorType, int, String, JobOwner, HttpServletResponse)} - * to format and write the error in the given {@link HttpServletResponse} in the HTML format with - * the appropriate HTTP error code. The (full) stack trace of the error may be printed if asked. - *

    - * - *

    2 formats are managed by this implementation: HTML (default) and JSON. That means the writer will format and - * write a given error in the best appropriate format. This format is chosen thanks to the "Accept" header of the HTTP request. - * If no request is provided or if there is no known format, the HTML format is chosen by default.

    - * - * @author Grégory Mantelet (CDS) - * @version 06/2012 - */ -public abstract class AbstractServiceErrorWriter implements ServiceErrorWriter { - - protected final String[] managedFormats = new String[]{"application/json","json","text/json","text/html","html"}; - - /** - * Logger to use to display the given errors in the appropriate log files. - * @return A NON-NULL and VALID logger. - */ - protected abstract UWSLog getLogger(); - - @Override - public void writeError(Throwable t, HttpServletResponse response, HttpServletRequest request, JobOwner user, String action) throws IOException{ - if (t != null && response != null){ - formatError(t, true, ErrorType.FATAL, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, action, user, response, (request != null) ? request.getHeader("Accept") : null); - getLogger().error(t); - String errorMsg = t.getMessage(); - if (errorMsg == null || errorMsg.trim().isEmpty()) - errorMsg = t.getClass().getName() + " (no error message)"; - getLogger().httpRequest(request, user, action, HttpServletResponse.SC_INTERNAL_SERVER_ERROR, errorMsg, t); - } - } - - @Override - public void writeError(String message, ErrorType type, int httpErrorCode, HttpServletResponse response, HttpServletRequest request, JobOwner user, String action) throws IOException{ - if (message != null && response != null){ - formatError(new Exception(message), false, type, httpErrorCode, action, user, response, (request != null) ? request.getHeader("Accept") : null); - getLogger().httpRequest(request, user, action, httpErrorCode, message, null); - } - } - - /** - * Parses the header "Accept", splits it in a list of MIME type and compare each one to each managed formats ({@link #managedFormats}). - * If there is a match (not case sensitive), return the corresponding managed format immediately. - * - * @param acceptHeader The header item named "Accept" (which lists all expected response formats). - * @return The first format common to the "Accept" header and the managed formats of this writer. - */ - protected final String chooseFormat(final String acceptHeader){ - if (acceptHeader != null && !acceptHeader.trim().isEmpty()){ - // Parse the given MIME types list: - AcceptHeader accept = new AcceptHeader(acceptHeader); - ArrayList lstMimeTypes = accept.getOrderedMimeTypes(); - for(String acceptedFormat : lstMimeTypes){ - for(String f : managedFormats){ - if (acceptedFormat.equalsIgnoreCase(f)) - return f; - } - } - } - return null; - } - - /** - *

    Formats and writes the given error in the HTTP servlet response.

    - *

    The format is chosen thanks to the Accept header of the HTTP request. - * If unknown, the HTML output is chosen.

    - * - * @param t Exception to format and to write. - * @param printStackTrace true to print the (full) stack trace, false otherwise. - * @param type Type of the error: FATAL or TRANSIENT. - * @param httpErrorCode HTTP error code (i.e. 404, 500). - * @param action Action which generates the error note: displayed only if not NULL and not empty. - * @param user User which is at the origin of the request/action which generates the error. - * @param response Response in which the error must be written. - * @param acceptHeader Value of the header named "Accept" (which lists all allowed response format). - * - * @throws IOException If there is an error while writing the given exception. - * - * @see #formatHTMLError(Throwable, boolean, ErrorType, int, String, JobOwner, HttpServletResponse) - * @see #formatJSONError(Throwable, boolean, ErrorType, int, String, JobOwner, HttpServletResponse) - */ - protected void formatError(final Throwable t, final boolean printStackTrace, final ErrorType type, final int httpErrorCode, final String action, final JobOwner user, final HttpServletResponse response, final String acceptHeader) throws IOException{ - // Reset the whole response to ensure the output stream is free: - if (response.isCommitted()) - return; - response.reset(); - - String format = chooseFormat(acceptHeader); - if (format != null && (format.equalsIgnoreCase("application/json") || format.equalsIgnoreCase("text/json") || format.equalsIgnoreCase("json"))) - formatJSONError(t, printStackTrace, type, httpErrorCode, action, user, response); - else - formatHTMLError(t, printStackTrace, type, httpErrorCode, action, user, response); - } - - /** - *

    Formats and writes the given error in the HTTP servlet response.

    - *

    A full HTML response is printed with: the HTTP error code, the error type, the name of the exception, the message and the full stack trace.

    - * - * @param t Exception to format and to write. - * @param printStackTrace true to print the (full) stack trace, false otherwise. - * @param type Type of the error: FATAL or TRANSIENT. - * @param httpErrorCode HTTP error code (i.e. 404, 500). - * @param action Action which generates the error note: displayed only if not NULL and not empty. - * @param user User which is at the origin of the request/action which generates the error. - * @param response Response in which the error must be written. - * - * @throws IOException If there is an error while writing the given exception. - */ - protected void formatHTMLError(final Throwable t, final boolean printStackTrace, final ErrorType type, final int httpErrorCode, final String action, final JobOwner user, final HttpServletResponse response) throws IOException{ - // Set the HTTP status code and the content type of the response: - response.setStatus(httpErrorCode); - response.setContentType(UWSSerializer.MIME_TYPE_HTML); - - PrintWriter out = response.getWriter(); - - // Header: - out.println("\n\t"); - out.println("\t\t"); - out.println("\t\t"); - out.println("\t\tSERVICE ERROR"); - out.println("\t\n\t"); - - // Title: - String errorColor = (type == ErrorType.FATAL) ? "red" : "orange"; - out.println("\t\t

    SERVICE ERROR - " + httpErrorCode + "

    "); - - // Description part: - out.println("\t\t

    Description

    "); - out.println("\t\t
      "); - out.println("\t\t\t
    • Type: " + type + "
    • "); - if (action != null && !action.trim().isEmpty()) - out.println("\t\t\t
    • Action: " + action + "
    • "); - String context = null; - String msg = t.getMessage(); - if (msg != null && !msg.trim().isEmpty()){ - int start = msg.indexOf("["), end = msg.indexOf("]"); - if (start >= 0 && start < end){ - context = msg.substring(start + 1, end); - msg = msg.substring(end + 1); - } - }else - msg = ""; - if (context != null) - out.println("\t\t\t
    • Context: " + context + "
    • "); - if (printStackTrace) - out.println("\t\t\t
    • Exception: " + t.getClass().getName() + "
    • "); - out.println("\t\t\t
    • Message:

      " + msg + "

    • "); - out.println("\t\t
    "); - - // Stack trace part: - if (printStackTrace){ - out.println("\t\t

    Stack trace

    "); - Throwable cause = t; - do{ - out.println("\t\t"); - out.println("\t\t\t"); - StackTraceElement[] trace = cause.getStackTrace(); - for(int i = 0; i < trace.length; i++) - out.println("\t\t\t"); - out.println("\t\t
    ClassMethodLine
    " + trace[i].getClassName() + "" + trace[i].getMethodName() + "" + trace[i].getLineNumber() + "
    "); - - // Print the stack trace of the "next" error: - cause = cause.getCause(); - if (cause != null){ - out.println("\t\t

    Caused by " + cause.getClass().getName() + ":

    "); - out.println("\t\t

    " + cause.getMessage() + "

    "); - } - }while(cause != null); - } - - out.println("\t\n"); - out.close(); - } - - /** - *

    Formats and writes the given error in the HTTP servlet response.

    - *

    A JSON response is printed with: the HTTP error code, the error type, the name of the exception, the message and the list of all causes' message.

    - * - * @param t Exception to format and to write. - * @param printStackTrace true to print the (full) stack trace, false otherwise. - * @param type Type of the error: FATAL or TRANSIENT. - * @param httpErrorCode HTTP error code (i.e. 404, 500). - * @param action Action which generates the error note: displayed only if not NULL and not empty. - * @param user User which is at the origin of the request/action which generates the error. - * @param response Response in which the error must be written. - * - * @throws IOException If there is an error while writing the given exception. - */ - protected void formatJSONError(final Throwable t, final boolean printStackTrace, final ErrorType type, final int httpErrorCode, final String action, final JobOwner user, final HttpServletResponse response) throws IOException{ - // Set the HTTP status code and the content type of the response: - response.setStatus(httpErrorCode); - response.setContentType(UWSSerializer.MIME_TYPE_JSON); - - PrintWriter out = response.getWriter(); - try{ - JSONWriter json = new JSONWriter(out); - - json.object(); - json.key("errorcode").value(httpErrorCode); - json.key("errortype").value(type.toString()); - json.key("action").value(action); - - String context = null; - String msg = t.getMessage(); - if (msg != null && !msg.trim().isEmpty()){ - int start = msg.indexOf("["), end = msg.indexOf("]"); - if (start >= 0 && start < end){ - context = msg.substring(start + 1, end); - msg = msg.substring(end + 1); - } - }else - msg = ""; - if (context != null) - json.key("context").value(context); - if (printStackTrace) - json.key("exception").value(t.getClass().getName()); - json.key("message").value(msg); - - // Stack trace part: - if (printStackTrace){ - json.key("cause").array(); - Throwable cause = t; - do{ - json.object(); - json.key("exception").value(cause.getClass().getName()); - json.key("stacktrace").array(); - StackTraceElement[] trace = cause.getStackTrace(); - for(int i = 0; i < trace.length; i++){ - json.object(); - json.key("class").value(trace[i].getClassName()); - json.key("method").value(trace[i].getMethodName()); - json.key("line").value(trace[i].getLineNumber()); - json.endObject(); - } - json.endArray().endObject(); - - // Print the stack trace of the "next" error: - cause = cause.getCause(); - }while(cause != null); - json.endArray(); - } - - json.endObject(); - }catch(JSONException je){ - getLogger().error("Impossible to format/write an error in JSON !", je); - throw new IOException("Error while formatting the error in JSON !", je); - }finally{ - out.flush(); - out.close(); - } - } - -} diff --git a/src/uws/service/error/DefaultUWSErrorWriter.java b/src/uws/service/error/DefaultUWSErrorWriter.java index 82529f65d5cbf231655ad006924be7868e91e2c5..71ec87e41423bcb9233a2bc0a39b65e845c6c34a 100644 --- a/src/uws/service/error/DefaultUWSErrorWriter.java +++ b/src/uws/service/error/DefaultUWSErrorWriter.java @@ -1,58 +1,341 @@ package uws.service.error; +/* + * This file is part of UWSLibrary. + * + * UWSLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * UWSLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with UWSLibrary. If not, see . + * + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) + */ + +import java.io.BufferedWriter; import java.io.IOException; +import java.io.OutputStream; +import java.io.OutputStreamWriter; +import java.io.PrintWriter; +import java.util.ArrayList; + import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; +import org.json.JSONException; +import org.json.JSONWriter; + +import tap.TAPException; +import uws.AcceptHeader; import uws.UWSException; import uws.UWSToolBox; +import uws.job.ErrorSummary; +import uws.job.ErrorType; +import uws.job.UWSJob; +import uws.job.serializer.UWSSerializer; import uws.job.user.JobOwner; -import uws.service.UWS; import uws.service.log.UWSLog; +import uws.service.log.UWSLog.LogLevel; /** - *

    Default implementation of {@link ServiceErrorWriter} for a UWS service.

    - * - *

    All errors are written using the function {@link #formatError(Throwable, boolean, uws.job.ErrorType, int, String, JobOwner, HttpServletResponse)} - * of the abstract implementation of the error writer: {@link AbstractServiceErrorWriter}.

    + *

    Default implementation of a {@link ServiceErrorWriter} interface for a UWS service.

    * - *

    A {@link UWSException} may precise the HTTP error code to apply. That's why, {@link #writeError(Throwable, HttpServletResponse, HttpServletRequest, JobOwner, String)} - * has been overridden: to get this error code and submit it to the {@link #formatError(Throwable, boolean, uws.job.ErrorType, int, String, JobOwner, HttpServletResponse)} - * function. Besides, the stack trace of {@link UWSException}s is not printed (except if the message is NULL or empty). - * And this error will be logged only if its error code is {@link UWSException#INTERNAL_SERVER_ERROR}.

    + *

    + * All errors are written using the function {@link #formatError(String, ErrorType, int, String, String, JobOwner, HttpServletResponse, String)} + * in order to format the error in the most appropriate format. 2 formats are managed by default by this implementation: HTML (default) and JSON. + * This format is chosen thanks to the "Accept" header of the HTTP request. If no request is provided or if there is no known format, + * the HTML format is chosen by default. + *

    * - *

    2 formats are managed by this implementation: HTML (default) and JSON. That means the writer will format and - * write a given error in the best appropriate format. This format is chosen thanks to the "Accept" header of the HTTP request. - * If no request is provided or if there is no known format, the HTML format is chosen by default.

    + *

    + * {@link UWSException}s may precise the HTTP error code to apply, + * which will be used to set the HTTP status of the response. If it is a different kind of exception, + * the HTTP status 500 (INTERNAL SERVER ERROR) will be used. + *

    * - * @author Grégory Mantelet (CDS) - * @version 06/2012 + *

    + * Besides, all exceptions except {@link UWSException} and {@link TAPException} will be logged as FATAL in the TAP context + * (with no event and no object). Thus the full stack trace is available to the administrator so that the error can + * be understood as easily and quickly as possible. + * The stack trace is no longer displayed to the user. + *

    * - * @see AbstractServiceErrorWriter + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (04/2015) */ -public class DefaultUWSErrorWriter extends AbstractServiceErrorWriter { +public class DefaultUWSErrorWriter implements ServiceErrorWriter { - protected final UWS uws; + /** List of all managed output formats. */ + protected final String[] managedFormats = new String[]{"application/json","json","text/json","text/html","html"}; - public DefaultUWSErrorWriter(final UWS uws){ - this.uws = uws; - } + /** Logger to use when grave error must be logged or if a JSON error occurs. */ + protected final UWSLog logger; - @Override - protected final UWSLog getLogger(){ - return (uws != null && uws.getLogger() != null) ? uws.getLogger() : UWSToolBox.getDefaultLogger(); + /** + * Build an error writer which will log any error in response of an HTTP request. + * + * @param logger Object to use to log errors. + */ + public DefaultUWSErrorWriter(final UWSLog logger){ + if (logger == null) + throw new NullPointerException("Missing logger! Can not write a default error writer without."); + + this.logger = logger; } @Override - public void writeError(Throwable t, HttpServletResponse response, HttpServletRequest request, JobOwner user, String action) throws IOException{ + public boolean writeError(Throwable t, HttpServletResponse response, HttpServletRequest request, String reqID, JobOwner user, String action){ + if (t == null || response == null) + return true; + + boolean written = false; + // If expected error, just write it: if (t instanceof UWSException){ UWSException ue = (UWSException)t; - formatError(ue, (ue.getMessage() == null || ue.getMessage().trim().isEmpty()), ue.getUWSErrorType(), ue.getHttpErrorCode(), action, user, response, request.getHeader("Accept")); - if (ue.getHttpErrorCode() == UWSException.INTERNAL_SERVER_ERROR) - getLogger().error(ue); - getLogger().httpRequest(request, user, action, ue.getHttpErrorCode(), ue.getMessage(), ue); - }else - super.writeError(t, response, request, user, action); + written = writeError(ue.getMessage(), ue.getUWSErrorType(), ue.getHttpErrorCode(), response, request, reqID, user, action); + } + // Otherwise, log it and write a message to the user: + else{ + // log the error as GRAVE/FATAL (because unexpected/unmanaged): + logger.logUWS(LogLevel.FATAL, null, null, "[REQUEST N°" + reqID + "] " + t.getMessage(), t); + // write a message to the user: + written = writeError("INTERNAL SERVER ERROR! Sorry, this error is unexpected and no explanation can be provided for the moment. Details about this error have been reported in the service log files ; you should try again your request later or notify the administrator(s) by yourself (with the following 'Request ID').", ErrorType.FATAL, UWSException.INTERNAL_SERVER_ERROR, response, request, reqID, user, action); + } + return written; + } + + @Override + public boolean writeError(String message, ErrorType type, int httpErrorCode, HttpServletResponse response, HttpServletRequest request, String reqID, JobOwner user, String action){ + if (message == null || response == null) + return true; + + try{ + // Just format and write the error message: + formatError(message, type, httpErrorCode, reqID, action, user, response, (request != null) ? request.getHeader("Accept") : null); + return true; + }catch(IllegalStateException ise){ + return false; + }catch(IOException ioe){ + return false; + } + } + + @Override + public void writeError(Throwable t, ErrorSummary error, UWSJob job, OutputStream output) throws IOException{ + UWSToolBox.writeErrorFile((t instanceof Exception) ? (Exception)t : new UWSException(t), error, job, output); + } + + @Override + public String getErrorDetailsMIMEType(){ + return "text/plain"; + } + + /** + * Parses the header "Accept", splits it in a list of MIME type and compare each one to each managed formats ({@link #managedFormats}). + * If there is a match (not case sensitive), return the corresponding managed format immediately. + * + * @param acceptHeader The header item named "Accept" (which lists all expected response formats). + * @return The first format common to the "Accept" header and the managed formats of this writer. + */ + protected final String chooseFormat(final String acceptHeader){ + if (acceptHeader != null && !acceptHeader.trim().isEmpty()){ + // Parse the given MIME types list: + AcceptHeader accept = new AcceptHeader(acceptHeader); + ArrayList lstMimeTypes = accept.getOrderedMimeTypes(); + for(String acceptedFormat : lstMimeTypes){ + for(String f : managedFormats){ + if (acceptedFormat.equalsIgnoreCase(f)) + return f; + } + } + } + return null; + } + + /** + *

    Formats and writes the given error in the HTTP servlet response.

    + *

    The format is chosen thanks to the Accept header of the HTTP request. + * If unknown, the HTML output is chosen.

    + * + * @param message Error message to write. + * @param type Type of the error: FATAL or TRANSIENT. + * @param httpErrorCode HTTP error code (i.e. 404, 500). + * @param reqID ID of the request at the origin of the specified error. + * @param action Action which generates the error note: displayed only if not NULL and not empty. + * @param user User which is at the origin of the request/action which generates the error. + * @param response Response in which the error must be written. + * @param acceptHeader Value of the header named "Accept" (which lists all allowed response format). + * + * @throws IOException If there is an error while writing the given exception. + * + * @see #formatHTMLError(String, ErrorType, int, String, String, JobOwner, HttpServletResponse) + * @see #formatJSONError(String, ErrorType, int, String, String, JobOwner, HttpServletResponse) + */ + protected void formatError(final String message, final ErrorType type, final int httpErrorCode, final String reqID, final String action, final JobOwner user, final HttpServletResponse response, final String acceptHeader) throws IOException{ + String format = chooseFormat(acceptHeader); + if (format != null && (format.equalsIgnoreCase("application/json") || format.equalsIgnoreCase("text/json") || format.equalsIgnoreCase("json"))) + formatJSONError(message, type, httpErrorCode, reqID, action, user, response); + else + formatHTMLError(message, type, httpErrorCode, reqID, action, user, response); + } + + /** + *

    Formats and writes the given error in the HTTP servlet response.

    + *

    A full HTML response is printed with: the HTTP error code, the error type, the name of the exception, the message and the full stack trace.

    + * + * @param message Error message to write. + * @param type Type of the error: FATAL or TRANSIENT. + * @param httpErrorCode HTTP error code (i.e. 404, 500). + * @param reqID ID of the request at the origin of the specified error. + * @param action Action which generates the error note: displayed only if not NULL and not empty. + * @param user User which is at the origin of the request/action which generates the error. + * @param response Response in which the error must be written. + * + * @throws IOException If there is an error while writing the given exception. + */ + protected void formatHTMLError(final String message, final ErrorType type, final int httpErrorCode, final String reqID, final String action, final JobOwner user, final HttpServletResponse response) throws IOException{ + try{ + // Erase anything written previously in the HTTP response: + response.reset(); + + // Set the HTTP status: + response.setStatus(httpErrorCode); + + // Set the MIME type of the answer (XML for a VOTable document): + response.setContentType(UWSSerializer.MIME_TYPE_HTML); + + // Set the character encoding: + response.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); + + }catch(IllegalStateException ise){ + /* If it is not possible any more to reset the response header and body, + * the error is anyway written in order to corrupt the HTTP response. + * Thus, it will be obvious that an error occurred and the result is + * incomplete and/or wrong.*/ + } + + PrintWriter out; + try{ + out = response.getWriter(); + }catch(IllegalStateException ise){ + /* This exception may occur just because either the writer or + * the output-stream can be used (because already got before). + * So, we just have to get the output-stream if getting the writer + * throws an error.*/ + out = new PrintWriter(new BufferedWriter(new OutputStreamWriter(response.getOutputStream()))); + } + + // Header: + out.println("\n\t"); + out.println("\t\t"); + out.println("\t\t"); + out.println("\t\tSERVICE ERROR"); + out.println("\t\n\t"); + + // Title: + String errorColor = (type == ErrorType.FATAL) ? "red" : "orange"; + out.println("\t\t

    SERVICE ERROR - " + httpErrorCode + "

    "); + + // Description part: + out.println("\t\t

    Description

    "); + out.println("\t\t
      "); + out.println("\t\t\t
    • Type: " + type + "
    • "); + if (reqID != null) + out.println("\t\t\t
    • Request ID: " + reqID + "
    • "); + if (action != null) + out.println("\t\t\t
    • Action: " + action + "
    • "); + out.println("\t\t\t
    • Message:

      " + message + "

    • "); + out.println("\t\t
    "); + + out.println("\t\n"); + + out.flush(); + } + + /** + *

    Formats and writes the given error in the HTTP servlet response.

    + *

    A JSON response is printed with: the HTTP error code, the error type, the name of the exception, the message and the list of all causes' message.

    + * + * @param message Error message to write. + * @param type Type of the error: FATAL or TRANSIENT. + * @param httpErrorCode HTTP error code (i.e. 404, 500). + * @param reqID ID of the request at the origin of the specified error. + * @param action Action which generates the error note: displayed only if not NULL and not empty. + * @param user User which is at the origin of the request/action which generates the error. + * @param response Response in which the error must be written. + * + * @throws IOException If there is an error while writing the given exception. + */ + protected void formatJSONError(final String message, final ErrorType type, final int httpErrorCode, final String reqID, final String action, final JobOwner user, final HttpServletResponse response) throws IOException{ + try{ + // Erase anything written previously in the HTTP response: + response.reset(); + + // Set the HTTP status: + response.setStatus(httpErrorCode); + + // Set the MIME type of the answer (JSON): + response.setContentType(UWSSerializer.MIME_TYPE_JSON); + + // Set the character encoding: + response.setCharacterEncoding(UWSToolBox.DEFAULT_CHAR_ENCODING); + + }catch(IllegalStateException ise){ + /* If it is not possible any more to reset the response header and body, + * the error is anyway written in order to corrupt the HTTP response. + * Thus, it will be obvious that an error occurred and the result is + * incomplete and/or wrong.*/ + } + + PrintWriter out; + try{ + out = response.getWriter(); + }catch(IllegalStateException ise){ + /* This exception may occur just because either the writer or + * the output-stream can be used (because already got before). + * So, we just have to get the output-stream if getting the writer + * throws an error.*/ + out = new PrintWriter(new BufferedWriter(new OutputStreamWriter(response.getOutputStream()))); + } + + try{ + JSONWriter json = new JSONWriter(out); + + json.object(); + json.key("errorcode").value(httpErrorCode); + json.key("errortype").value(type.toString()); + if (reqID != null) + json.key("requestid").value(reqID); + if (action != null) + json.key("action").value(action); + json.key("message").value(message); + + json.endObject(); + + out.flush(); + + }catch(JSONException je){ + logger.logUWS(LogLevel.ERROR, null, "FORMAT_ERROR", "Impossible to format/write an error in JSON!", je); + throw new IOException("Error while formatting the error in JSON!", je); + } } } diff --git a/src/uws/service/error/ServiceErrorWriter.java b/src/uws/service/error/ServiceErrorWriter.java index 9c19a50a692db3c310da52980bdc48d34e4cc49d..04c2936cd3af8bec21c1229e80bdd3d2314f4aef 100644 --- a/src/uws/service/error/ServiceErrorWriter.java +++ b/src/uws/service/error/ServiceErrorWriter.java @@ -1,47 +1,137 @@ package uws.service.error; +/* + * This file is part of UWSLibrary. + * + * UWSLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * UWSLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with UWSLibrary. If not, see . + * + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) + */ + import java.io.IOException; +import java.io.OutputStream; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; +import uws.job.ErrorSummary; import uws.job.ErrorType; +import uws.job.UWSJob; import uws.job.user.JobOwner; /** - * Let's writing/formatting any Exception/Throwable in a {@link HttpServletResponse}. + * Let's writing/formatting any Exception/Throwable in an {@link HttpServletResponse} or in an error summary. * - * @author Grégory Mantelet (CDS) - * @version 06/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (04/2015) */ public interface ServiceErrorWriter { /** - * Writes the given exception in the given response. + *

    Write the given exception in the given response.

    + * + *

    Note: + * If this function is called without at least an exception and an HTTP response, nothing should be done. + * No error may be thrown. + *

    * - * @param t Exception to write/format. - * @param response Response in which the given exception must be written. - * @param request Request at the origin of the error (MAY BE NULL). - * @param user User which sends the given request (which generates the error) (MAY BE NULL). - * @param action Type/Name of the action which generates the error (MAY BE NULL). + *

    IMPORTANT: + * If any {@link IOException} occurs while writing the error in the given {@link HttpServletResponse} output stream, + * this function should stop and return false. In such case, the error which was supposed to be written + * may be logged. + *

    * - * @throws IOException If there is an error while writing the response. + * @param t Exception to write/format. + * @param response Response in which the given exception must be written. + * @param request Request at the origin of the error (MAY BE NULL). + * @param reqID ID of the request (which let the user and the administrator identify the failed request). (MAY BE NULL if the request is not provided) + * @param user User which sends the given request (which generates the error) (MAY BE NULL). + * @param action Type/Name of the action which generates the error (MAY BE NULL). + * + * @return true if the given error message has been successfully written in the given {@link HttpServletResponse}, + * false otherwise. */ - public void writeError(final Throwable t, final HttpServletResponse response, final HttpServletRequest request, final JobOwner user, final String action) throws IOException; + public boolean writeError(final Throwable t, final HttpServletResponse response, final HttpServletRequest request, final String reqID, final JobOwner user, final String action); /** - * Writes the described error in the given response. + *

    Write the described error in the given response.

    + * + *

    Note: + * If this function is called without at least a message and an HTTP response, nothing should be done. + * No error may be thrown. + *

    + * + *

    IMPORTANT: + * If any {@link IOException} occurs while writing the error in the given {@link HttpServletResponse} output stream, + * this function should stop and return false. In such case, the error which was supposed to be written + * may be logged. + *

    * * @param message Message to display. * @param type Type of the error: FATAL or TRANSIENT. * @param httpErrorCode HTTP error code (i.e. 404, 500). * @param response Response in which the described error must be written. * @param request Request which causes this error. + * @param reqID ID of the request (which let the user and the administrator identify the failed request). * @param user User which sends the HTTP request. * @param action Action corresponding to the given request. * - * @throws IOException If there is an error while writing the response. + * @return true if the given error message has been successfully written in the given {@link HttpServletResponse}, + * false otherwise. + */ + public boolean writeError(final String message, final ErrorType type, final int httpErrorCode, final HttpServletResponse response, final HttpServletRequest request, final String reqID, final JobOwner user, final String action); + + /** + *

    Write the given error in the given output stream.

    + * + *

    + * This function is used only for the error summary of a job (that's to say to report in the + * ../error/details parameter any error which occurs while executing a job). + *

    + * + *

    Important note: + * The error details written in the given output MUST always have the same MIME type. + * This latter MUST be returned by {@link #getErrorDetailsMIMEType()}. + *

    + * + * @param t Error to write. If error is not null, it will be displayed instead of the message of this throwable. + * @param error Summary of the error. It may particularly contain a message different from the one of the given exception. In this case, it will displayed instead of the exception's message. + * @param job The job which fails. + * @param output Stream in which the error must be written. + * + * @throws IOException If there an error while writing the error in the given stream. + * + * @see #getErrorDetailsMIMEType() + * + * @since 4.1 + */ + public void writeError(final Throwable t, final ErrorSummary error, final UWSJob job, final OutputStream output) throws IOException; + + /** + *

    Get the MIME type of the error details written by {@link #writeError(Throwable, ErrorSummary, UWSJob, OutputStream)} in the error summary.

    + * + *

    Important note: + * If NULL is returned, the MIME type will be considered as text/plain. + *

    + * + * @return MIME type of the error details document. If NULL, it will be considered as text/plain. + * + * @see #writeError(Throwable, ErrorSummary, UWSJob, OutputStream) + * + * @since 4.1 */ - public void writeError(final String message, final ErrorType type, final int httpErrorCode, final HttpServletResponse response, final HttpServletRequest request, final JobOwner user, final String action) throws IOException; + public String getErrorDetailsMIMEType(); } diff --git a/src/uws/service/file/EventFrequency.java b/src/uws/service/file/EventFrequency.java new file mode 100644 index 0000000000000000000000000000000000000000..a2c11f9a8e02192aa862d327057774815a7837f2 --- /dev/null +++ b/src/uws/service/file/EventFrequency.java @@ -0,0 +1,498 @@ +package uws.service.file; + +/* + * This file is part of UWSLibrary. + * + * UWSLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * UWSLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with UWSLibrary. If not, see . + * + * Copyright 2014-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) + */ + +import java.text.DateFormat; +import java.text.DecimalFormat; +import java.text.NumberFormat; +import java.text.SimpleDateFormat; +import java.util.Calendar; +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.Scanner; +import java.util.regex.MatchResult; + +/** + *

    Let interpret and computing a frequency.

    + * + *

    Frequency syntax

    + * + *

    The frequency is expressed as a string at initialization of this object. This string must respect the following syntax:

    + *
      + *
    • 'D' hh mm : daily schedule at hh:mm
    • + *
    • 'W' dd hh mm : weekly schedule at the given day of the week (1:sunday, 2:monday, ..., 7:saturday) at hh:mm
    • + *
    • 'M' dd hh mm : monthly schedule at the given day of the month at hh:mm
    • + *
    • 'h' mm : hourly schedule at the given minute
    • + *
    • 'm' : scheduled every minute (for completness :-))
    • + *
    + *

    Where: hh = integer between 0 and 23, mm = integer between 0 and 59, dd (for 'W') = integer between 1 and 7 (1:sunday, 2:monday, ..., 7:saturday), + * dd (for 'M') = integer between 1 and 31.

    + * + *

    Warning: + * The frequency type is case sensitive! Then you should particularly pay attention at the case + * when using the frequency types 'M' (monthly) and 'm' (every minute). + *

    + * + *

    + * Parsing errors are not thrown but "resolved" silently. The "solution" depends of the error. + * 2 cases of errors are considered: + *

    + *
      + *
    • Frequency type mismatch: It happens when the first character is not one of the expected (D, W, M, h, m). + * That means: bad case (i.e. 'd' rather than 'D'), another character. + * In this case, the frequency will be: daily at 00:00.
    • + * + *
    • Parameter(s) missing or incorrect: With the "daily" frequency ('D'), at least 2 parameters must be provided ; + * 3 for "weekly" ('W') and "monthly" ('M') ; only 1 for "hourly" ('h') ; none for "every minute" ('m'). + * This number of parameters is a minimum: only the n first parameters will be considered while + * the others will be ignored. + * If this minimum number of parameters is not respected or if a parameter value is incorrect, + * all parameters will be set to their default value + * (which is 0 for all parameter except dd for which it is 1).
    • + *
    + * + *

    Examples:

    + *
      + *
    • "" or NULL = every day at 00:00
    • + *
    • "D 06 30" or "D 6 30" = every day at 06:30
    • + *
    • "D 24 30" = every day at 00:00, because hh must respect the rule: 0 ≤ hh ≤ 23
    • + *
    • "d 06 30" or "T 06 30" = every day at 00:00, because the frequency type "d" (lower case of "D") or "T" do not exist
    • + *
    • "W 2 6 30" = every week on Tuesday at 06:30
    • + *
    • "W 8 06 30" = every week on Sunday at 00:00, because with 'W' dd must respect the rule: 1 ≤ dd ≤ 7
    • + *
    • "M 2 6 30" = every month on the 2nd at 06:30
    • + *
    • "M 32 6 30" = every month on the 1st at 00:00, because with 'M' dd must respect the rule: 1 ≤ dd ≤ 31
    • + *
    • "M 5 6 30 12" = every month on the 5th at 06:30, because at least 3 parameters are expected and so considered: "12" and eventual other parameters are ignored
    • + *
    + * + *

    Computing next event date

    + * + *

    + * When this class is initialized with a frequency, it is able to compute the date of the event following a given date. + * The functions {@link #nextEvent()} and {@link #nextEvent(Date)} will compute this next event date + * from, respectively, now (current date/time) and the given date (the date of the last event). Both are computing the date of the next + * event by "adding" the frequency to the given date. And finally, the computed date is stored and returned. + *

    + * + *

    Then, you have 2 possibilities to trigger the desired event:

    + *
      + *
    • By calling {@link #isTimeElapsed()}, you can test whether at the current moment the date of the next event has been reached or not. + * In function of the value returned by this function you will be then able to process the desired action or not.
    • + *
    • By creating a Timer with the next date event. Thus, the desired action will be automatically triggered at the exact moment.
    • + *

      + * + * + * @author Marc Wenger (CDS) + * @author Grégory Mantelet (ARI) + * @version 4.1 (02/2015) + * @since 4.1 + */ +public final class EventFrequency { + + /** String format of a hour or a minute number. */ + private static final NumberFormat NN = new DecimalFormat("00"); + + /** Date-Time format to use in order to identify a frequent event. */ + private static final DateFormat EVENT_ID_FORMAT = new SimpleDateFormat("yyyyMMdd_HHmm"); + + /** Ordered list of all week days (there, the first week day is Sunday). */ + private static final String[] WEEK_DAYS = {"Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"}; + + /** Ordinal day number suffix (1st, 2nd, 3rd and th for the others). */ + private static final String[] DAY_SUFFIX = {"st","nd","rd","th"}; + + /** Frequency type (D, W, M, h, m). Default value: 'D' */ + private char dwm = 'D'; + + /** "day" (dd) parameter of the frequency. */ + private int day = 0; + /** "hour" (hh) parameter of the frequency. */ + private int hour = 0; + /** "minute" (mm) parameter of the frequency. */ + private int min = 0; + + /** ID of the next event. By default, it is built using the date of the last event with the format {@link #EVENT_ID_FORMAT}. */ + private String eventID = ""; + + /** Date (in millisecond) of the next event. */ + private long nextEvent = -1; + + /** + *

      Create a new event frequency.

      + * + *

      The frequency string must respect the following syntax:

      + *
        + *
      • 'D' hh mm : daily schedule at hh:mm
      • + *
      • 'W' dd hh mm : weekly schedule at the given day of the week (1:sunday, 2:monday, ..., 7:saturday) at hh:mm
      • + *
      • 'M' dd hh mm : monthly schedule at the given day of the month at hh:mm
      • + *
      • 'h' mm : hourly schedule at the given minute
      • + *
      • 'm' : scheduled every minute (for completness :-))
      • + *
      + *

      Where: hh = integer between 0 and 23, mm = integer between 0 and 59, dd (for 'W') = integer between 1 and 7 (1:sunday, 2:monday, ..., 7:saturday), + * dd (for 'M') = integer between 1 and 31.

      + * + *

      Warning: + * The frequency type is case sensitive! Then you should particularly pay attention at the case + * when using the frequency types 'M' (monthly) and 'm' (every minute). + *

      + * + *

      + * Parsing errors are not thrown but "resolved" silently. The "solution" depends of the error. + * 2 cases of errors are considered: + *

      + *
        + *
      • Frequency type mismatch: It happens when the first character is not one of the expected (D, W, M, h, m). + * That means: bad case (i.e. 'd' rather than 'D'), another character. + * In this case, the frequency will be: daily at 00:00.
      • + * + *
      • Parameter(s) missing or incorrect: With the "daily" frequency ('D'), at least 2 parameters must be provided ; + * 3 for "weekly" ('W') and "monthly" ('M') ; only 1 for "hourly" ('h') ; none for "every minute" ('m'). + * This number of parameters is a minimum: only the n first parameters will be considered while + * the others will be ignored. + * If this minimum number of parameters is not respected or if a parameter value is incorrect, + * all parameters will be set to their default value + * (which is 0 for all parameter except dd for which it is 1).
      • + *
      + * + *

      Examples:

      + *
        + *
      • "" or NULL = every day at 00:00
      • + *
      • "D 06 30" or "D 6 30" = every day at 06:30
      • + *
      • "D 24 30" = every day at 00:00, because hh must respect the rule: 0 ≤ hh ≤ 23
      • + *
      • "d 06 30" or "T 06 30" = every day at 00:00, because the frequency type "d" (lower case of "D") or "T" do not exist
      • + *
      • "W 2 6 30" = every week on Tuesday at 06:30
      • + *
      • "W 8 06 30" = every week on Sunday at 00:00, because with 'W' dd must respect the rule: 1 ≤ dd ≤ 7
      • + *
      • "M 2 6 30" = every month on the 2nd at 06:30
      • + *
      • "M 32 6 30" = every month on the 1st at 00:00, because with 'M' dd must respect the rule: 1 ≤ dd ≤ 31
      • + *
      • "M 5 6 30 12" = every month on the 5th at 06:30, because at least 3 parameters are expected and so considered: "12" and eventual other parameters are ignored
      • + *
      + * + * @param interval A string defining the event frequency (see above for the string format). + */ + public EventFrequency(String interval){ + String str; + + // Determine the separation between the frequency type character (D, W, M, h, m) and the parameters + // and normalize the given interval: + int p = -1; + if (interval == null) + interval = ""; + else{ + interval = interval.replaceAll("[ \t]+", " ").trim(); + p = interval.indexOf(' '); + } + + // Parse the given interval ONLY IF a frequency type is provided (even if there is no parameter): + if (p == 1 || interval.length() == 1){ + MatchResult result; + Scanner scan = null; + + // Extract and identify the frequency type: + dwm = interval.charAt(0); + str = interval.substring(p + 1); + scan = new Scanner(str); + + // Extract the parameters in function of the frequency type: + switch(dwm){ + // CASE: DAILY + case 'D': + scan.findInLine("(\\d{1,2}) (\\d{1,2})"); + try{ + result = scan.match(); + hour = parseHour(result.group(1)); + min = parseMinute(result.group(2)); + }catch(IllegalStateException ise){ + day = hour = min = 0; + } + break; + + // CASE: WEEKLY AND MONTHLY + case 'W': + case 'M': + scan.findInLine("(\\d{1,2}) (\\d{1,2}) (\\d{1,2})"); + try{ + result = scan.match(); + day = (dwm == 'W') ? parseDayOfWeek(result.group(1)) : parseDayOfMonth(result.group(1)); + hour = parseHour(result.group(2)); + min = parseMinute(result.group(3)); + }catch(IllegalStateException ise){ + day = (dwm == 'W') ? 0 : 1; + hour = min = 0; + } + break; + + // CASE: HOURLY + case 'h': + scan.findInLine("(\\d{1,2})"); + try{ + result = scan.match(); + min = parseMinute(result.group(1)); + }catch(IllegalStateException ise){ + min = 0; + } + break; + + // CASE: EVERY MINUTE + case 'm': + // no other data needed + break; + + // CASE: UNKNOWN FREQUENCY TYPE + default: + dwm = 'D'; + day = hour = min = 0; + } + if (scan != null) + scan.close(); + } + } + + /** + * Parse a string representing the day of the week (as a number). + * + * @param dayNbStr String containing an integer representing a week day. + * + * @return The identified week day. (integer between 0 and 6 (included)) + * + * @throws IllegalStateException If the given string does not contain an integer or is not between 1 and 7 (included). + */ + private int parseDayOfWeek(final String dayNbStr) throws IllegalStateException{ + try{ + int d = Integer.parseInt(dayNbStr); + if (d >= 1 && d <= WEEK_DAYS.length) + return d - 1; + }catch(Exception e){} + throw new IllegalStateException("Incorrect day of week (" + dayNbStr + ") ; it should be between 1 and 7 (both included)!"); + } + + /** + * Parse a string representing the day of the month. + * + * @param dayStr String containing an integer representing a month day. + * + * @return The identified month day. (integer between 1 and 31 (included)) + * + * @throws IllegalStateException If the given string does not contain an integer or is not between 1 and 31 (included). + */ + private int parseDayOfMonth(final String dayStr) throws IllegalStateException{ + try{ + int d = Integer.parseInt(dayStr); + if (d >= 1 && d <= 31) + return d; + }catch(Exception e){} + throw new IllegalStateException("Incorrect day of month (" + dayStr + ") ; it should be between 1 and 31 (both included)!"); + } + + /** + * Parse a string representing the hour part of a time (hh:mm). + * + * @param hourStr String containing an integer representing an hour. + * + * @return The identified hour. (integer between 0 and 23 (included)) + * + * @throws IllegalStateException If the given string does not contain an integer or is not between 0 and 23 (included). + */ + private int parseHour(final String hourStr) throws IllegalStateException{ + try{ + int h = Integer.parseInt(hourStr); + if (h >= 0 && h <= 23) + return h; + }catch(Exception e){} + throw new IllegalStateException("Incorrect hour number(" + hourStr + ") ; it should be between 0 and 23 (both included)!"); + } + + /** + * Parse a string representing the minute part of a time (hh:mm). + * + * @param minStr String containing an integer representing a minute. + * + * @return The identified minute. (integer between 0 and 59 (included)) + * + * @throws IllegalStateException If the given string does not contain an integer or is not between 0 and 59 (included). + */ + private int parseMinute(final String minStr) throws IllegalStateException{ + try{ + int m = Integer.parseInt(minStr); + if (m >= 0 && m <= 59) + return m; + }catch(Exception e){} + throw new IllegalStateException("Incorrect minute number (" + minStr + ") ; it should be between 0 and 59 (both included)!"); + } + + /** + * Tell whether the interval between the last event and now is greater or equals to the frequency represented by this object. + * + * @return true if the next event date has been reached, false otherwise. + */ + public boolean isTimeElapsed(){ + return (nextEvent <= 0) || (System.currentTimeMillis() >= nextEvent); + } + + /** + * Get the date of the next event. + * + * @return Date of the next event, or NULL if no date has yet been computed. + */ + public Date getNextEvent(){ + return (nextEvent <= 0) ? null : new Date(nextEvent); + } + + /** + *

      Get a string which identity the period between the last event and the next one (whose the date has been computed by this object).

      + * + *

      This ID is built by formatting in string the given date of the last event.

      + * + * @return ID of the period before the next event. + */ + public String getEventID(){ + return eventID; + } + + /** + *

      Compute the date of the event, by adding the interval represented by this object to the current date/time.

      + * + *

      + * The role of this function is to compute the next event date, not to get it. After computation, you can get this date + * thanks to {@link #getNextEvent()}. Furthermore, using {@link #isTimeElapsed()} after having called this function will + * let you test whether the next event should (have) occur(red). + *

      + * + *

      Note: + * This function computes the next event date by taking the current date as the date of the last event. However, + * if the last event occurred at a different date, you should use {@link #nextEvent(Date)}. + *

      + * + * @return Date at which the next event should occur. (basically, it is: NOW + frequency) + */ + public Date nextEvent(){ + return nextEvent(new Date()); + } + + /** + *

      Compute the date of the event, by adding the interval represented by this object to the given date/time.

      + * + *

      + * The role of this function is to compute the next event date, not to get it. After computation, you can get this date + * thanks to {@link #getNextEvent()}. Furthermore, using {@link #isTimeElapsed()} after having called this function will + * let you test whether the next event should (have) occur(red). + *

      + * + * @return Date at which the next event should occur. (basically, it is lastEventDate + frequency) + */ + public Date nextEvent(final Date lastEventDate){ + // Set the calendar to the given date: + GregorianCalendar date = new GregorianCalendar(); + date.setTime(lastEventDate); + + // Compute the date of the next event: + switch(dwm){ + // CASE: DAILY + case 'D': + date.add(Calendar.DAY_OF_YEAR, 1); + date.set(Calendar.HOUR_OF_DAY, hour); + date.set(Calendar.MINUTE, min); + date.set(Calendar.SECOND, 0); + break; + + // CASE: WEEKLY + case 'W': + // find the next right day to trigger the rotation + int weekday = date.get(Calendar.DAY_OF_WEEK); // sunday=1, ... saturday=7 + if (weekday == day){ + date.add(Calendar.WEEK_OF_YEAR, 1); + }else{ + // for the first scheduling which can happen any day + int delta = day - weekday; + if (delta <= 0) + delta += 7; + date.add(Calendar.DAY_OF_YEAR, delta); + } + date.set(Calendar.HOUR_OF_DAY, hour); + date.set(Calendar.MINUTE, min); + date.set(Calendar.SECOND, 0); + break; + + // CASE: MONTHLY + case 'M': + date.add(Calendar.MONTH, 1); + date.set(Calendar.DAY_OF_MONTH, day); + date.set(Calendar.HOUR_OF_DAY, hour); + date.set(Calendar.MINUTE, min); + date.set(Calendar.SECOND, 0); + break; + + // CASE: HOURLY + case 'h': + date.add(Calendar.HOUR_OF_DAY, 1); + date.set(Calendar.MINUTE, min); + date.set(Calendar.SECOND, 0); + break; + + // CASE: EVERY MINUTE + case 'm': + date.add(Calendar.MINUTE, 1); + date.set(Calendar.SECOND, 0); + break; + + /* OTHERWISE, the next event date is the given date! */ + } + + // Save it in millisecond for afterward comparison with the current time (so that telling whether the time is elapsed or not): + nextEvent = date.getTimeInMillis(); + + // Build the ID of this waiting period (the period between the last event and the next one): + eventID = EVENT_ID_FORMAT.format(new Date()); + + // Return the date of the next event: + return date.getTime(); + } + + /** + * Display in a human readable way the frequency represented by this object. + * + * @return a string, i.e. weekly on Sunday at HH:MM + */ + @Override + public String toString(){ + StringBuilder str = new StringBuilder(); + switch(dwm){ + case 'D': + str.append("daily"); + str.append(" at ").append(NN.format(hour)).append(':').append(NN.format(min)); + break; + case 'W': + str.append("weekly on ").append(WEEK_DAYS[day % 7]); + str.append(" at ").append(NN.format(hour)).append(':').append(NN.format(min)); + break; + case 'M': + str.append("monthly on the ").append(day).append(DAY_SUFFIX[Math.min(day - 1, 3)]); + str.append(" at ").append(NN.format(hour)).append(':').append(NN.format(min)); + break; + case 'h': + str.append("hourly at ").append(NN.format(min)); + break; + case 'm': + str.append("every minute"); + break; + } + + return str.toString(); + } +} \ No newline at end of file diff --git a/src/uws/service/file/LocalUWSFileManager.java b/src/uws/service/file/LocalUWSFileManager.java index 41f89f79d6b53b168d19a5a9018a4204333ec9d7..0bbb628cb3b848594d3606ae7a41a5f2d6a9ef7c 100644 --- a/src/uws/service/file/LocalUWSFileManager.java +++ b/src/uws/service/file/LocalUWSFileManager.java @@ -16,10 +16,14 @@ package uws.service.file; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; import java.io.File; +import java.io.FileFilter; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; @@ -27,63 +31,74 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintWriter; - +import java.net.URI; +import java.net.URISyntaxException; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; -import java.util.HashMap; import java.util.Iterator; -import java.util.Map; import java.util.NoSuchElementException; -import java.io.FileFilter; - import uws.UWSException; import uws.UWSToolBox; - import uws.job.ErrorSummary; import uws.job.Result; import uws.job.UWSJob; - import uws.job.user.JobOwner; - -import uws.service.log.UWSLogType; +import uws.service.log.UWSLog.LogLevel; +import uws.service.request.UploadFile; /** *

      All UWS files are stored in the local machine into the specified directory.

      + * *

      * The name of the log file, the result files and the backup files may be customized by overriding the following functions: - * {@link #getLogFileName()}, {@link #getResultFileName(Result, UWSJob)}, {@link #getBackupFileName(JobOwner)} and {@link #getBackupFileName()}. + * {@link #getLogFileName(uws.service.log.UWSLog.LogLevel, String)}, {@link #getResultFileName(Result, UWSJob)}, {@link #getBackupFileName(JobOwner)} and {@link #getBackupFileName()}. *

      + * *

      * By default, results and backups are grouped by owner/user and owners/users are grouped thanks to {@link DefaultOwnerGroupIdentifier}. * By using the appropriate constructor, you can change these default behaviors. *

      * - * @author Grégory Mantelet (CDS) - * @version 06/2012 + *

      + * A log file rotation is set by default so that avoiding a too big log file after several months/years of use. + * By default the rotation is done every month on the 1st at 6am. This frequency can be changed easily thanks to the function + * {@link #setLogRotationFreq(String)}. + *

      + * + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (02/2015) */ public class LocalUWSFileManager implements UWSFileManager { /** Format to use to format dates. */ private DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); - protected static final String DEFAULT_HTTP_LOG_FILE_NAME = "service_http_activity.log"; - protected static final String DEFAULT_DEBUG_LOG_FILE_NAME = "service_debug.log"; - protected static final String DEFAULT_LOG_FILE_NAME = "service_activity.log"; - protected static final String DEFAULT_BACKUP_FILE_NAME = "uws.backup"; - - private static final String UNKNOWN_LOG_TYPE_GROUP = "???"; + /** Default name of the log file. */ + protected static final String DEFAULT_LOG_FILE_NAME = "service.log"; + /** Default name of the general UWS backup file. */ + protected static final String DEFAULT_BACKUP_FILE_NAME = "service.backup"; + /** Directory in which all files managed by this class will be written and read. */ protected final File rootDirectory; + /** Output toward the service log file. */ + protected PrintWriter logOutput = null; + /** Frequency at which the log file must be "rotated" (the file is renamed with the date of its first write and a new log file is created). + * Thus, too big log files can be avoided. */ + protected EventFrequency logRotation = new EventFrequency("D 0 0"); // Log file rotation every day at midnight. + + /** Indicate whether a directory must be used to gather all jobs, results and errors related to one identified user. + * If FALSE, all jobs, results and errors will be in only one directory, whoever owns them. */ protected final boolean oneDirectoryForEachUser; + /** Gather user directories, set by set. At the end, several user group directories may be created. + * This option is considered only if {@link #oneDirectoryForEachUser} is TRUE. */ protected final boolean groupUserDirectories; + /** Object giving the policy about how to group user directories. */ protected final OwnerGroupIdentifier ownerGroupId; - protected Map logOutputs = new HashMap(); - /** *

      Builds a {@link UWSFileManager} which manages all UWS files in the given directory.

      *

      @@ -93,7 +108,8 @@ public class LocalUWSFileManager implements UWSFileManager { * * @param root UWS root directory. * - * @throws UWSException If the given root directory is null, is not a directory or has not the READ and WRITE permissions. + * @throws NullPointerException If the given root directory is null. + * @throws UWSException If the given file is not a directory or has not the READ and WRITE permissions. * * @see #LocalUWSFileManager(File, boolean, boolean, OwnerGroupIdentifier) */ @@ -113,7 +129,8 @@ public class LocalUWSFileManager implements UWSFileManager { * @param groupUserDirectories true to group user directories, false otherwise. * note: this value is ignored if the previous parameter is false. * - * @throws UWSException If the given root directory is null, is not a directory or has not the READ and WRITE permissions. + * @throws NullPointerException If the given root directory is null. + * @throws UWSException If the given file is not a directory or has not the READ and WRITE permissions. * * @see #LocalUWSFileManager(File, boolean, boolean, OwnerGroupIdentifier) */ @@ -135,7 +152,8 @@ public class LocalUWSFileManager implements UWSFileManager { * {@link DefaultOwnerGroupIdentifier} will be chosen as default group identifier. *

    * - * @throws UWSException If the given root directory is null, is not a directory or has not the READ and WRITE permissions. + * @throws NullPointerException If the given root directory is null. + * @throws UWSException If the given file is not a directory or has not the READ and WRITE permissions. */ public LocalUWSFileManager(final File root, final boolean oneDirectoryForEachUser, final boolean groupUserDirectories, final OwnerGroupIdentifier ownerGroupIdentifier) throws UWSException{ if (root == null) @@ -225,64 +243,109 @@ public class LocalUWSFileManager implements UWSFileManager { /* ******************* */ /* LOG FILE MANAGEMENT */ /* ******************* */ + /** - *

    Lets grouping log messages by log type.

    - *

    For instance: by default all messages of type INFO, WARNING and ERROR are written in the same file.

    + * Get the frequency of the log file rotation + * in a human readable way. * - * @param logType Type of the message to log. + * @return A human readable frequency of the log file rotation. + */ + public final String getLogRotationFreq(){ + return logRotation.toString(); + } + + /** + *

    Set the frequency at which a rotation of the log file must be done.

    + * + *

    + * "rotation" means here, to close the currently used log file, to rename it so that suffixing it + * with the date at which the first log has been written in it, and to create a new log file. + *

    + * + *

    The frequency string must respect the following syntax:

    + *
      + *
    • 'D' hh mm : daily schedule at hh:mm
    • + *
    • 'W' dd hh mm : weekly schedule at the given day of the week (1:sunday, 2:monday, ..., 7:saturday) at hh:mm
    • + *
    • 'M' dd hh mm : monthly schedule at the given day of the month at hh:mm
    • + *
    • 'h' mm : hourly schedule at the given minute
    • + *
    • 'm' : scheduled every minute (for completness :-))
    • + *
    + *

    Where: hh = integer between 0 and 23, mm = integer between 0 and 59, dd (for 'W') = integer between 1 and 7 (1:sunday, 2:monday, ..., 7:saturday), + * dd (for 'M') = integer between 1 and 31.

    + * + *

    Warning: + * The frequency type is case sensitive! Then you should particularly pay attention at the case + * when using the frequency types 'M' (monthly) and 'm' (every minute). + *

    * - * @return Name of the log type group. + *

    + * Parsing errors are not thrown but "resolved" silently. The "solution" depends of the error. + * 2 cases of errors are considered: + *

    + *
      + *
    • Frequency type mismatch: It happens when the first character is not one of the expected (D, W, M, h, m). + * That means: bad case (i.e. 'd' rather than 'D'), another character. + * In this case, the frequency will be: daily at 00:00.
    • + * + *
    • Parameter(s) missing or incorrect: With the "daily" frequency ('D'), at least 2 parameters must be provided ; + * 3 for "weekly" ('W') and "monthly" ('M') ; only 1 for "hourly" ('h') ; none for "every minute" ('m'). + * This number of parameters is a minimum: only the n first parameters will be considered while + * the others will be ignored. + * If this minimum number of parameters is not respected or if a parameter value is incorrect, + * all parameters will be set to their default value + * (which is 0 for all parameter except dd for which it is 1).
    • + *
    + * + *

    Examples:

    + *
      + *
    • "" or NULL = every day at 00:00
    • + *
    • "D 06 30" or "D 6 30" = every day at 06:30
    • + *
    • "D 24 30" = every day at 00:00, because hh must respect the rule: 0 ≤ hh ≤ 23
    • + *
    • "d 06 30" or "T 06 30" = every day at 00:00, because the frequency type "d" (lower case of "D") or "T" do not exist
    • + *
    • "W 2 6 30" = every week on Tuesday at 06:30
    • + *
    • "W 8 06 30" = every week on Sunday at 00:00, because with 'W' dd must respect the rule: 1 ≤ dd ≤ 7
    • + *
    • "M 2 6 30" = every month on the 2nd at 06:30
    • + *
    • "M 32 6 30" = every month on the 1st at 00:00, because with 'M' dd must respect the rule: 1 ≤ dd ≤ 31
    • + *
    • "M 5 6 30 12" = every month on the 5th at 06:30, because at least 3 parameters are expected and so considered: "12" and eventual other parameters are ignored
    • + *
    + * + * @param interval Interval between two log rotations. */ - protected String getLogTypeGroup(final UWSLogType logType){ - switch(logType){ - case INFO: - case WARNING: - case ERROR: - return "DefaultLog"; - case DEBUG: - case HTTP_ACTIVITY: - return logType.toString(); - case CUSTOM: - return logType.getCustomType(); - default: - return UNKNOWN_LOG_TYPE_GROUP; - } + public final void setLogRotationFreq(final String interval){ + logRotation = new EventFrequency(interval); } /** *

    Gets the name of the UWS log file.

    - *

    By default: {@link #DEFAULT_LOG_FILE_NAME} or {@link #DEFAULT_HTTP_LOG_FILE_NAME} (to log an activity message, that's to say: thread status or http request).

    * - * @param logType Type of message to log. + *

    By default: {@link #DEFAULT_LOG_FILE_NAME}.

    + * + * @param level Level of the message to log (DEBUG, INFO, WARNING, ERROR, FATAL). + * @param context Context of the message to log (UWS, HTTP, THREAD, JOB, ...). * * @return The name of the UWS log file. */ - protected String getLogFileName(final String logTypeGroup){ - if (logTypeGroup == UWSLogType.HTTP_ACTIVITY.toString()) - return DEFAULT_HTTP_LOG_FILE_NAME; - else if (logTypeGroup.equals(UWSLogType.DEBUG.toString())) - return DEFAULT_DEBUG_LOG_FILE_NAME; - else - return DEFAULT_LOG_FILE_NAME; + protected String getLogFileName(final LogLevel level, final String context){ + return DEFAULT_LOG_FILE_NAME; } /** * Gets the UWS log file. * - * @param logType Type of message to log. + * @param level Level of the message to log (DEBUG, INFO, WARNING, ERROR, FATAL). + * @param context Context of the message to log (UWS, HTTP, THREAD, JOB, ...). * * @return The UWS log file. * - * @see #getLogFileName() + * @see #getLogFileName(uws.service.log.UWSLog.LogLevel, String) */ - protected File getLogFile(final String logTypeGroup){ - return new File(rootDirectory, getLogFileName(logTypeGroup)); + protected File getLogFile(final LogLevel level, final String context){ + return new File(rootDirectory, getLogFileName(level, context)); } @Override - public InputStream getLogInput(final UWSLogType logType) throws IOException{ - String logTypeGroup = getLogTypeGroup(logType); - File logFile = getLogFile(logTypeGroup); + public InputStream getLogInput(final LogLevel level, final String context) throws IOException{ + File logFile = getLogFile(level, context); if (logFile.exists()) return new FileInputStream(logFile); else @@ -290,24 +353,52 @@ public class LocalUWSFileManager implements UWSFileManager { } @Override - public PrintWriter getLogOutput(final UWSLogType logType) throws IOException{ - String logTypeGroup = getLogTypeGroup(logType); - PrintWriter output = logOutputs.get(logTypeGroup); - if (output == null){ - File logFile = getLogFile(logTypeGroup); + public synchronized PrintWriter getLogOutput(final LogLevel level, final String context) throws IOException{ + // If a file rotation is needed... + if (logOutput != null && logRotation != null && logRotation.isTimeElapsed()){ + // ...Close the output stream: + logOutput.close(); + logOutput = null; + + // ...Rename this log file: + // get the file: + File logFile = getLogFile(level, context); + // and its name: + String logFileName = logFile.getName(); + // separate the file name from the extension: + String fileExt = ""; + int indFileExt = logFileName.lastIndexOf('.'); + if (indFileExt >= 0){ + fileExt = logFileName.substring(indFileExt); + logFileName = logFileName.substring(0, indFileExt); + } + // build the new file name and rename the log file: + logFile.renameTo(new File(logFile.getParentFile(), logFileName + "_" + logRotation.getEventID() + fileExt)); + } + + // If the log output is not yet set or if a file rotation has been done... + if (logOutput == null){ + // ...Create the output: + File logFile = getLogFile(level, context); createParentDir(logFile); - output = new PrintWriter(new FileOutputStream(logFile, true), true); - printLogHeader(output); - logOutputs.put(logTypeGroup, output); + logOutput = new PrintWriter(new FileOutputStream(logFile, true), true); + + // ...Write a log header: + printLogHeader(logOutput); + + // ...Set the date of the next rotation: + if (logRotation != null) + logRotation.nextEvent(); } - return output; + + return logOutput; } /** * Print a header into the log file so that separating older log messages to the new ones. */ protected void printLogHeader(final PrintWriter out){ - String msgHeader = "########################################### LOG STARTS " + dateFormat.format(new Date()) + " ###########################################"; + String msgHeader = "########################################### LOG STARTS " + dateFormat.format(new Date()) + " (file rotation: " + logRotation + ") ###########################################"; StringBuffer buf = new StringBuffer(""); for(int i = 0; i < msgHeader.length(); i++) buf.append('#'); @@ -320,6 +411,120 @@ public class LocalUWSFileManager implements UWSFileManager { out.flush(); } + /* ************************* */ + /* UPLOADED FILES MANAGEMENT */ + /* ************************* */ + + /** + * Create a File instance from the given upload file description. + * This function is able to deal with location as URI and as file path. + * + * @param upload Description of an uploaded file. + * + * @return The corresponding File object. + * + * @since 4.1 + */ + protected final File getFile(final UploadFile upload){ + if (upload.getLocation().startsWith("file:")){ + try{ + return new File(new URI(upload.getLocation())); + }catch(URISyntaxException use){ + return new File(upload.getLocation()); + } + }else + return new File(upload.getLocation()); + } + + @Override + public InputStream getUploadInput(final UploadFile upload) throws IOException{ + // Check the source file: + File source = getFile(upload); + if (!source.exists()) + throw new FileNotFoundException("The uploaded file submitted with the parameter \"" + upload.paramName + "\" can not be found any more on the server!"); + // Return the stream: + return new FileInputStream(source); + } + + @Override + public InputStream openURI(final URI uri) throws UnsupportedURIProtocolException, IOException{ + String scheme = uri.getScheme(); + if (scheme.equalsIgnoreCase("http") || scheme.equalsIgnoreCase("ftp")) + return uri.toURL().openStream(); + else + throw new UnsupportedURIProtocolException(uri); + } + + @Override + public void deleteUpload(final UploadFile upload) throws IOException{ + File f = getFile(upload); + if (!f.exists()) + return; + else if (f.isDirectory()) + throw new IOException("Incorrect location! An uploaded file must be a regular file, not a directory. (file location: \"" + f.getPath() + "\")"); + else{ + try{ + if (!f.delete()) + throw new IOException("Can not delete the file!"); + }catch(SecurityException se){ + throw new IOException("Unexpected permission restriction on the uploaded file \"" + f.getPath() + "\" => can not delete it!"); + } + } + } + + @Override + public String moveUpload(final UploadFile upload, final UWSJob destination) throws IOException{ + // Check the source file: + File source = getFile(upload); + if (!source.exists()) + throw new FileNotFoundException("The uploaded file submitted with the parameter \"" + upload.paramName + "\" can not be found any more on the server!"); + + // Build the final location (in the owner directory, under the name "UPLOAD_{job-id}_{param-name}": + File ownerDir = getOwnerDirectory(destination.getOwner()); + File copy = new File(ownerDir, "UPLOAD_" + destination.getJobId() + "_" + upload.paramName); + + OutputStream output = null; + InputStream input = null; + boolean done = false; + try{ + // open the input and output: + input = new BufferedInputStream(getUploadInput(upload)); + output = new BufferedOutputStream(new FileOutputStream(copy)); + // proceed to the copy: + byte[] buffer = new byte[2048]; + int len; + while((len = input.read(buffer)) > 0) + output.write(buffer, 0, len); + output.flush(); + output.close(); + output = null; + // close the input and delete the source file: + input.close(); + input = null; + source.delete(); + // return the new location: + done = true; + return copy.toURI().toString(); + }finally{ + if (output != null){ + try{ + output.close(); + }catch(IOException ioe){} + } + if (input != null){ + try{ + input.close(); + }catch(IOException ioe){} + } + // In case of problem, the copy must be deleted: + if (!done && copy.exists()){ + try{ + copy.delete(); + }catch(SecurityException ioe){} + } + } + } + /* *********************** */ /* RESULT FILES MANAGEMENT */ /* *********************** */ @@ -477,7 +682,7 @@ public class LocalUWSFileManager implements UWSFileManager { */ protected String getBackupFileName(final JobOwner owner) throws IllegalArgumentException{ if (owner == null || owner.getID() == null || owner.getID().trim().isEmpty()) - throw new IllegalArgumentException("Missing owner ! Can not get the backup file of an unknown owner. See LocalUWSFileManager.getBackupFile(JobOwner)"); + throw new IllegalArgumentException("Missing owner! Can not get the backup file of an unknown owner."); return owner.getID().replaceAll(File.separator, "_") + ".backup"; } diff --git a/src/uws/service/file/UWSFileManager.java b/src/uws/service/file/UWSFileManager.java index bc8520b60b1ffa9cf3d2165b6399c94478da680f..8dbc7f7471c29141f1be524ec4d321968769c1e5 100644 --- a/src/uws/service/file/UWSFileManager.java +++ b/src/uws/service/file/UWSFileManager.java @@ -16,23 +16,25 @@ package uws.service.file; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ +import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintWriter; - +import java.net.URI; +import java.net.URL; import java.util.Iterator; import uws.job.ErrorSummary; -import uws.job.UWSJob; import uws.job.Result; - +import uws.job.UWSJob; import uws.job.user.JobOwner; - -import uws.service.log.UWSLogType; +import uws.service.log.UWSLog.LogLevel; +import uws.service.request.UploadFile; /** *

    Lets accessing any file managed by a UWS service.

    @@ -42,8 +44,8 @@ import uws.service.log.UWSLogType; * the results, log or backup file generated and read by a UWS. *

    * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (11/2014) * * @see LocalUWSFileManager */ @@ -55,20 +57,117 @@ public interface UWSFileManager { /** * Gets an input stream on the log file of this UWS. - * @param logType Type of the message to log. + * + * @param level Level of the message to log (DEBUG, INFO, WARNING, ERROR or FATAL). + * @param context Context of the message to log (UWS, HTTP, JOB, THREAD, ...). + * * @return An input on the log file or null if there is no log file. + * * @throws IOException If there is an error while opening an input stream on the log file. */ - public InputStream getLogInput(final UWSLogType logType) throws IOException; + public InputStream getLogInput(final LogLevel level, final String context) throws IOException; /** *

    Gets an output stream on the log file of this UWS.

    *

    note: The log file must be automatically created if needed.

    - * @param logType Type of the message to log. + * + * @param level Level of the message to log (DEBUG, INFO, WARNING, ERROR or FATAL). + * @param context Context of the message to log (UWS, HTTP, JOB, THREAD, ...). + * * @return An output on the log file. + * * @throws IOException If there is an error while creating the log file or while opening an output stream on it. */ - public PrintWriter getLogOutput(final UWSLogType logType) throws IOException; + public PrintWriter getLogOutput(final LogLevel level, final String context) throws IOException; + + /* ************************* */ + /* UPLOADED FILES MANAGEMENT */ + /* ************************* */ + + /**

    Temporary directory in which uploaded files will be stored when parsing the HTTP request.

    + *

    IMPORTANT 1: + * Uploaded files should be then moved using {@link UploadFile#move(UWSJob)} when the job creation or update is validated. + *

    + *

    IMPORTANT 2: + * As qualified above, this directory is temporary. It means that it should be emptied sometimes. + * It is particularly important because when a delete or move operation fails on uploaded files, no log or error might + * be published. + *

    + *

    Note: + * The default value is the temporary directory of the system (i.e. \tmp or \var\tmp on Unix/Linux/MacOS, c:\temp on Windows). + *

    + * @since 4.1 */ + public static File TMP_UPLOAD_DIR = new File(System.getProperty("java.io.tmpdir")); + + /** + * Open a stream toward the specified file, submitted inline in an HTTP request. + * + * @param upload Description of the uploaded file. + * + * @return Input to the specified uploaded file. + * + * @throws IOException If any error occurs while opening the stream. + * + * @since 4.1 + */ + public InputStream getUploadInput(final UploadFile upload) throws IOException; + + /** + *

    Open a stream toward the given URI.

    + * + *

    + * Most of the time, the given URI uses the protocol http, https or ftp, which makes + * the URI perfectly understandable by {@link URL} which is then able to open easily + * a stream (cf {@link URL#openStream()}). However, a different scheme/protocol could + * be used ; particularly VO ones like "ivo" and "vos". It is for these particular + * cases that this function has been designed: in order to provide an implementation + * supporting additional protocols. + *

    + * + * @param uri URI of any resource to read. + * + * @return Input to the specified resource. + * + * @throws UnsupporteURIProtocol If the protocol is not supported by this implementation. + * @throws IOException If another error occurs while opening the stream. + * + * @since 4.1 + */ + public InputStream openURI(final URI uri) throws UnsupportedURIProtocolException, IOException; + + /** + * Delete definitely the specified file, submitted inline in an HTTP request. + * + * @param upload Description of the uploaded file. + * + * @throws IOException If any error occurs while deleting the file. + * + * @since 4.1 + */ + public void deleteUpload(final UploadFile upload) throws IOException; + + /** + *

    Move the specified file from its current location to a location related to the given job.

    + * + *

    Note: + * This function is generally used only once: after the HTTP request parsing, when creating or updating a job and only if the action has been accepted. + *

    + * + *

    IMPORTANT: + * This function might not be able to update the location inside the given {@link UploadFile}. For this reason, + * it is strongly recommended to not call directly this function, but to use {@link UploadFile#move(UWSJob)}. + *

    + * + * @param upload Description of the uploaded file to move. + * @param destination Job in which the uploaded file will be used. + * + * @return The new location of the uploaded file. + * + * @throws IOException If any error occurs while moving the file. + * + * @since 4.1 + */ + public String moveUpload(final UploadFile upload, final UWSJob destination) throws IOException; /* *********************** */ /* RESULT FILES MANAGEMENT */ diff --git a/src/uws/service/file/UnsupportedURIProtocolException.java b/src/uws/service/file/UnsupportedURIProtocolException.java new file mode 100644 index 0000000000000000000000000000000000000000..cc6a2da89862ef00d0a182a2c5822763a6a77623 --- /dev/null +++ b/src/uws/service/file/UnsupportedURIProtocolException.java @@ -0,0 +1,45 @@ +package uws.service.file; + +/* + * This file is part of UWSLibrary. + * + * UWSLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * UWSLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with UWSLibrary. If not, see . + * + * Copyright 2014 - Astronomisches Rechen Institut (ARI) + */ + +import java.net.URI; + +import uws.UWSException; + +/** + * Error sent when trying to read a remote file using a URI whose the scheme/protocol is not supported. + * + * @author Grégory Mantelet (ARI) + * @version 4.1 (11/2014) + * @since 4.1 + */ +public class UnsupportedURIProtocolException extends UWSException { + private static final long serialVersionUID = 1L; + + /** + * Build an {@link UnsupportedURIProtocolException}. + * + * @param uri The URI whose the scheme/protocol is incorrect. + */ + public UnsupportedURIProtocolException(final URI uri){ + super(UWSException.BAD_REQUEST, "Unsupported protocol: \"" + (uri != null ? uri.getScheme() : "") + "\"! => can not open the resource \"" + uri + "\"."); + } + +} diff --git a/src/uws/service/log/DefaultUWSLog.java b/src/uws/service/log/DefaultUWSLog.java index 5fd9586ef9d5052a1e8393df7c16e36ffe539bb6..f0ff54c82652f76b029212a6d66b02374f4dee53 100644 --- a/src/uws/service/log/DefaultUWSLog.java +++ b/src/uws/service/log/DefaultUWSLog.java @@ -16,37 +16,34 @@ package uws.service.log; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012-2015 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ import java.io.IOException; import java.io.OutputStream; import java.io.PrintWriter; - import java.text.DateFormat; import java.text.SimpleDateFormat; - import java.util.Date; -import java.util.Enumeration; +import java.util.Map; +import java.util.Map.Entry; import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import uws.UWSException; - -import uws.job.JobList; +import uws.UWSToolBox; import uws.job.UWSJob; - import uws.job.user.JobOwner; - import uws.service.UWS; - import uws.service.file.UWSFileManager; /** *

    Default implementation of {@link UWSLog} interface which lets logging any message about a UWS.

    * - * @author Grégory Mantelet (CDS) - * @version 06/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (04/2015) */ public class DefaultUWSLog implements UWSLog { @@ -57,15 +54,21 @@ public class DefaultUWSLog implements UWSLog { protected final UWSFileManager fileManager; protected final PrintWriter defaultOutput; - /** - *

    The minimum value of the HTTP status code required to print the stack trace of a HTTP error.

    - *

    note: This value is used only by the function {@link #httpRequest(HttpServletRequest, JobOwner, String, int, String, Throwable)}.

    - */ - protected int minResponseCodeForStackTrace = 500; + /**

    Minimum level that a message must have in order to be logged.

    + *

    The default behavior is the following:

    + *
      + *
    • DEBUG: every messages are logged.
    • + *
    • INFO: every messages EXCEPT DEBUG are logged.
    • + *
    • WARNING: every messages EXCEPT DEBUG and INFO are logged.
    • + *
    • ERROR: only ERROR and FATAL messages are logged.
    • + *
    • FATAL: only FATAL messages are logged.
    • + *
    + * @since 4.1 */ + protected LogLevel minLogLevel = LogLevel.DEBUG; /** *

    Builds a {@link UWSLog} which will use the file manager - * of the given UWS to get the log output (see {@link UWSFileManager#getLogOutput(UWSLogType)}).

    + * of the given UWS to get the log output (see {@link UWSFileManager#getLogOutput(uws.service.log.UWSLog.LogLevel, String)}).

    * *

    note 1: This constructor is particularly useful if the file manager of the given UWS may change.

    *

    note 2: If no output can be found in the file manager (or if there is no file manager), @@ -81,10 +84,11 @@ public class DefaultUWSLog implements UWSLog { /** *

    Builds a {@link UWSLog} which will use the given file - * manager to get the log output (see {@link UWSFileManager#getLogOutput(UWSLogType)}).

    + * manager to get the log output (see {@link UWSFileManager#getLogOutput(uws.service.log.UWSLog.LogLevel, String)}).

    * *

    note 1: This constructor is particularly useful if the way of managing log output may change in the given file manager. - * Indeed, the output may change in function of the type of message to log ({@link UWSLogType}).

    + * Indeed, the output may change in function of the type of message to log ({@link uws.service.log.UWSLog.LogLevel}).

    + * *

    note 2 If no output can be found in the file manager the standard error output ({@link System#err}) * will be chosen automatically for all log messages.

    * @@ -100,7 +104,7 @@ public class DefaultUWSLog implements UWSLog { *

    Builds a {@link UWSLog} which will print all its * messages into the given stream.

    * - *

    note: the given output will be used whatever is the type of message to log ({@link UWSLogType}).

    + *

    note: the given output will be used whatever is the type of message to log ({@link uws.service.log.UWSLog.LogLevel}).

    * * @param output An output stream. */ @@ -114,7 +118,7 @@ public class DefaultUWSLog implements UWSLog { *

    Builds a {@link UWSLog} which will print all its * messages into the given stream.

    * - *

    note: the given output will be used whatever is the type of message to log ({@link UWSLogType}).

    + *

    note: the given output will be used whatever is the type of message to log ({@link uws.service.log.UWSLog.LogLevel}).

    * * @param writer A print writer. */ @@ -124,6 +128,51 @@ public class DefaultUWSLog implements UWSLog { defaultOutput = writer; } + /** + *

    Get the minimum level that a message must have in order to be logged.

    + * + *

    The default behavior is the following:

    + *
      + *
    • DEBUG: every messages are logged.
    • + *
    • INFO: every messages EXCEPT DEBUG are logged.
    • + *
    • WARNING: every messages EXCEPT DEBUG and INFO are logged.
    • + *
    • ERROR: only ERROR and FATAL messages are logged.
    • + *
    • FATAL: only FATAL messages are logged.
    • + *
    + * + * @return The minimum log level. + * + * @since 4.1 + */ + public final LogLevel getMinLogLevel(){ + return minLogLevel; + } + + /** + *

    Set the minimum level that a message must have in order to be logged.

    + * + *

    The default behavior is the following:

    + *
      + *
    • DEBUG: every messages are logged.
    • + *
    • INFO: every messages EXCEPT DEBUG are logged.
    • + *
    • WARNING: every messages EXCEPT DEBUG and INFO are logged.
    • + *
    • ERROR: only ERROR and FATAL messages are logged.
    • + *
    • FATAL: only FATAL messages are logged.
    • + *
    + * + *

    Note: + * If the given level is NULL, this function has no effect. + *

    + * + * @param newMinLevel The new minimum log level. + * + * @since 4.1 + */ + public final void setMinLogLevel(final LogLevel newMinLevel){ + if (newMinLevel != null) + minLogLevel = newMinLevel; + } + /** * Gets the date formatter/parser to use for any date read/write into this logger. * @return A date formatter/parser. @@ -141,44 +190,24 @@ public class DefaultUWSLog implements UWSLog { this.dateFormat = dateFormat; } - /** - *

    Gets the minimum value of the HTTP status code required to print the stack trace of a HTTP error.

    - * - *

    note: This value is used only by the function {@link #httpRequest(HttpServletRequest, JobOwner, String, int, String, Throwable)}.

    - * - * @return A HTTP response status code. - */ - public int getMinResponseCodeForStackTrace(){ - return minResponseCodeForStackTrace; - } - - /** - *

    Sets the minimum value of the HTTP status code required to print the stack trace of a HTTP error.

    - * - *

    note: This value is used only by the function {@link #httpRequest(HttpServletRequest, JobOwner, String, int, String, Throwable)}.

    - * - * @param httpCode A HTTP response status code. - */ - public void setMinResponseCodeForStackTrace(final int httpCode){ - minResponseCodeForStackTrace = httpCode; - } - /** *

    Gets an output for the given type of message to print.

    * *

    The {@link System#err} output is used if none can be found in the {@link UWS} or the {@link UWSFileManager} * given at the creation, or if the given output stream or writer is NULL.

    * - * @param logType Type of the message to print; + * @param level Level of the message to print (DEBUG, INFO, WARNING, ERROR or FATAL). + * @param context Context of the message to print (UWS, HTTP, JOB, THREAD). + * * @return A writer. */ - protected PrintWriter getOutput(final UWSLogType logType){ + protected PrintWriter getOutput(final LogLevel level, final String context){ try{ if (uws != null){ if (uws.getFileManager() != null) - return uws.getFileManager().getLogOutput(logType); + return uws.getFileManager().getLogOutput(level, context); }else if (fileManager != null) - return fileManager.getLogOutput(logType); + return fileManager.getLogOutput(level, context); else if (defaultOutput != null) return defaultOutput; }catch(IOException ioe){ @@ -192,249 +221,414 @@ public class DefaultUWSLog implements UWSLog { /* *********************** */ /** - * Logs the given message (and exception, if any). + *

    Normalize a log message.

    + * + *

    + * Since a log entry will a tab-separated concatenation of information, additional tabulations or new-lines + * would corrupt a log entry. This function replaces such characters by one space. Only \r are definitely deleted. + *

    + * + * @param message Log message to normalize. + * + * @return The normalized log message. + * + * @since 4.1 + */ + protected String normalizeMessage(final String message){ + if (message == null) + return null; + else + return message.replaceAll("[\n\t]", " ").replaceAll("\r", ""); + } + + /** + *

    Tells whether a message with the given error level can be logged or not.

    + * + *

    In function of the minimum log level of this class, the default behavior is the following:

    + *
      + *
    • DEBUG: every messages are logged.
    • + *
    • INFO: every messages EXCEPT DEBUG are logged.
    • + *
    • WARNING: every messages EXCEPT DEBUG and INFO are logged.
    • + *
    • ERROR: only ERROR and FATAL messages are logged.
    • + *
    • FATAL: only FATAL messages are logged.
    • + *
    + * + * @param msgLevel Level of the message which has been asked to log. Note: if NULL, it will be considered as DEBUG. + * + * @return true if the message associated with the given log level can be logged, false otherwise. + * + * @since 4.1 + */ + protected boolean canLog(LogLevel msgLevel){ + // No level specified => DEBUG + if (msgLevel == null) + msgLevel = LogLevel.DEBUG; + + // Decide in function of the minimum log level set in this class: + switch(minLogLevel){ + case INFO: + return (msgLevel != LogLevel.DEBUG); + case WARNING: + return (msgLevel != LogLevel.DEBUG && msgLevel != LogLevel.INFO); + case ERROR: + return (msgLevel == LogLevel.ERROR || msgLevel == LogLevel.FATAL); + case FATAL: + return (msgLevel == LogLevel.FATAL); + case DEBUG: + default: + return true; + } + } + + @Override + public void log(LogLevel level, final String context, final String message, final Throwable error){ + log(level, context, null, null, message, null, error); + } + + /** + *

    Logs a full message and/or error.

    + * + *

    Note: + * If no message and error is provided, nothing will be written. + *

    * - * @param type Type of the message to print. note: (If NULL, it will be ERROR if an exception is given, INFO otherwise.) - * @param msg Message to print. (may be NULL) - * @param t Exception to print. (may be NULL) + * @param level Level of the error (DEBUG, INFO, WARNING, ERROR, FATAL). SHOULD NOT be NULL + * @param context Context of the error (UWS, HTTP, THREAD, JOB). MAY be NULL + * @param event Context event during which this log is emitted. MAY be NULL + * @param ID ID of the job or HTTP request (it may also be an ID of anything else). MAY BE NULL + * @param message Message of the error. MAY be NULL + * @param addColumn Additional column to append after the message and before the stack trace. + * @param error Error at the origin of the log error/warning/fatal. MAY be NULL + * + * @since 4.1 */ - public void log(UWSLogType type, final String msg, final Throwable t){ + protected final void log(LogLevel level, final String context, final String event, final String ID, final String message, final String addColumn, final Throwable error){ + // If no message and no error is provided, nothing to log, so nothing to write: + if ((message == null || message.length() <= 0) && error == null) + return; + // If the type is missing: - if (type == null) - type = (t != null) ? UWSLogType.ERROR : UWSLogType.INFO; + if (level == null) + level = (error != null) ? LogLevel.ERROR : LogLevel.INFO; - PrintWriter out = getOutput(type); + // Log or not? + if (!canLog(level)) + return; + + StringBuffer buf = new StringBuffer(); // Print the date/time: - out.print(dateFormat.format(new Date())); - out.print('\t'); - out.print(String.format("%1$-13s", type.toString())); - out.print('\t'); + buf.append(dateFormat.format(new Date())).append('\t'); + // Print the level of error (debug, info, warning, error, fatal): + buf.append(level.toString()).append('\t'); + // Print the context of the error (uws, thread, job, http): + buf.append((context == null) ? "" : context).append('\t'); + // Print the context event: + buf.append((event == null) ? "" : event).append('\t'); + // Print an ID (jobID, requestID): + buf.append((ID == null) ? "" : ID).append('\t'); // Print the message: - if (msg != null) - out.println(msg); - else if (t != null && t instanceof UWSException){ - UWSException uwsEx = (UWSException)t; - out.println("EXCEPTION " + uwsEx.getClass().getName() + "\t" + uwsEx.getUWSErrorType() + "\tHTTP-" + uwsEx.getHttpErrorCode() + "\t" + uwsEx.getMessage()); - }else - out.println(); + if (message != null) + buf.append(normalizeMessage(message)); + else if (error != null) + buf.append("[EXCEPTION ").append(error.getClass().getName()).append("] ").append(normalizeMessage(error.getMessage())); + // Print the additional column, if any: + if (addColumn != null) + buf.append('\t').append(normalizeMessage(addColumn)); + + // Write the whole log line: + PrintWriter out = getOutput(level, context); + out.println(buf.toString()); + // Print the stack trace, if any: - if (t != null) - t.printStackTrace(out); + printException(error, out); + out.flush(); } + /** + *

    Format and print the given exception inside the given writer.

    + * + *

    This function does nothing if the given error is NULL.

    + * + *

    The full stack trace is printed ONLY for unknown exceptions.

    + * + *

    The printed text has the following format for known exceptions:

    + *
    +	 * Caused by a {ExceptionClassName} {ExceptionOrigin}
    +	 *     {ExceptionMessage}
    +	 * 
    + * + *

    The printed text has the following format for unknown exceptions:

    + *
    +	 * Caused by a {ExceptionFullStackTrace}
    +	 * 
    + * + * @param error The exception to print. + * @param out The output in which the exception must be written. + * + * @see #getExceptionOrigin(Throwable) + * + * @since 4.1 + */ + protected void printException(final Throwable error, final PrintWriter out){ + if (error != null){ + if (error instanceof UWSException){ + if (error.getCause() != null) + printException(error.getCause(), out); + else{ + out.println("Caused by a " + error.getClass().getName() + " " + getExceptionOrigin(error)); + if (error.getMessage() != null) + out.println("\t" + error.getMessage()); + } + }else{ + out.print("Caused by a "); + error.printStackTrace(out); + } + } + } + + /** + *

    Format and return the origin of the given error. + * "Origin" means here: "where the error has been thrown from?" (from which class? method? file? line?).

    + * + *

    This function does nothing if the given error is NULL or if the origin information is missing.

    + * + *

    The returned text has the following format:

    + *
    +	 * at {OriginClass}.{OriginMethod}({OriginFile}:{OriginLine})
    +	 * 
    + * + *

    {OriginFile} and {OriginLine} are written only if provided.

    + * + * @param error Error whose the origin should be returned. + * + * @return A string which contains formatted information about the origin of the given error. + * + * @since 4.1 + */ + protected String getExceptionOrigin(final Throwable error){ + if (error != null && error.getStackTrace() != null && error.getStackTrace().length > 0){ + StackTraceElement src = error.getStackTrace()[0]; + return "at " + src.getClassName() + "." + src.getMethodName() + ((src.getFileName() != null) ? "(" + src.getFileName() + ((src.getLineNumber() >= 0) ? ":" + src.getLineNumber() : "") + ")" : ""); + }else + return ""; + } + @Override public void debug(String msg){ - log(UWSLogType.DEBUG, msg, null); + log(LogLevel.DEBUG, null, msg, null); } @Override public void debug(Throwable t){ - log(UWSLogType.DEBUG, null, t); + log(LogLevel.DEBUG, null, null, t); } @Override public void debug(String msg, Throwable t){ - log(UWSLogType.DEBUG, msg, t); + log(LogLevel.DEBUG, null, msg, t); } @Override public void info(String msg){ - log(UWSLogType.INFO, msg, null); + log(LogLevel.INFO, null, msg, null); } @Override public void warning(String msg){ - log(UWSLogType.WARNING, msg, null); + log(LogLevel.WARNING, null, msg, null); } @Override public void error(String msg){ - log(UWSLogType.ERROR, msg, null); + log(LogLevel.ERROR, null, msg, null); } @Override public void error(Throwable t){ - log(UWSLogType.ERROR, null, t); + log(LogLevel.ERROR, null, null, t); } @Override public void error(String msg, Throwable t){ - log(UWSLogType.ERROR, msg, t); + log(LogLevel.ERROR, null, msg, t); } - /* **************************** */ - /* METHODS ABOUT THE UWS STATUS */ - /* **************************** */ + /* ************* */ + /* HTTP ACTIVITY */ + /* ************* */ /** - * Gets the name of the UWS, if any. - * - * @param uws UWS whose the name must be returned. + *

    A message/error logged with this function will have the following format:

    + *
    <TIMESTAMP>	<LEVEL>	HTTP	REQUEST_RECEIVED	<REQUEST_ID>	<MESSAGE>	<HTTP_METHOD> in <CONTENT_TYPE> at <URL> from <IP_ADDR> using <USER_AGENT> with parameters (<PARAM1>=<VAL1>&...)
    * - * @return Name of the given UWS (followed by a space: " ") or an empty string (""). + * @see uws.service.log.UWSLog#logHttp(uws.service.log.UWSLog.LogLevel, javax.servlet.http.HttpServletRequest, java.lang.String, java.lang.String, java.lang.Throwable) */ - protected final static String getUWSName(final UWS uws){ - return ((uws != null && uws.getName() != null && !uws.getName().trim().isEmpty()) ? (uws.getName() + " ") : ""); - } - @Override - public void uwsInitialized(UWS uws){ - if (uws != null){ - String msg = "UWS " + getUWSName(uws) + "INITIALIZED !"; - info(msg); - log(UWSLogType.HTTP_ACTIVITY, msg, null); - } - } + public void logHttp(LogLevel level, final HttpServletRequest request, final String requestId, final String message, final Throwable error){ + // IF A REQUEST IS PROVIDED, write its details after the message in a new column: + if (request != null){ + // If the type is missing: + if (level == null) + level = (error != null) ? LogLevel.ERROR : LogLevel.INFO; - @Override - public void ownerJobsSaved(JobOwner owner, int[] report){ - if (owner != null){ - String strReport = (report == null || report.length != 2) ? "???" : (report[0] + "/" + report[1]); - String ownerPseudo = (owner.getPseudo() != null && !owner.getPseudo().trim().isEmpty() && !owner.getID().equals(owner.getPseudo())) ? (" (alias " + owner.getPseudo() + ")") : ""; - info(strReport + " saved jobs for the user " + owner.getID() + ownerPseudo + " !"); - } - } + // Log or not? + if (!canLog(level)) + return; - @Override - public void uwsRestored(UWS uws, int[] report){ - if (uws != null){ - String strReport = (report == null || report.length != 4) ? "[Unknown report format !]" : (report[0] + "/" + report[1] + " restored jobs and " + report[2] + "/" + report[3] + " restored users"); - info("UWS " + getUWSName(uws) + "RESTORED => " + strReport); - } - } + StringBuffer str = new StringBuffer(); - @Override - public void uwsSaved(UWS uws, int[] report){ - if (uws != null){ - String strReport = (report == null || report.length != 4) ? "[Unknown report format !]" : (report[0] + "/" + report[1] + " saved jobs and " + report[2] + "/" + report[3] + " saved users"); - info("UWS " + getUWSName(uws) + "SAVED => " + strReport); - } - } + // Write the request type, content type and the URL: + str.append(request.getMethod()); + str.append(" as "); + if (request.getContentType() != null){ + if (request.getContentType().indexOf(';') > 0) + str.append(request.getContentType().substring(0, request.getContentType().indexOf(';'))); + else + str.append(request.getContentType()); + } + str.append(" at ").append(request.getRequestURL()); - @Override - public void jobCreated(UWSJob job){ - if (job != null){ - String jlName = (job.getJobList() != null) ? job.getJobList().getName() : null; - info("JOB " + job.getJobId() + " CREATED" + ((jlName != null) ? (" and added into " + jlName) : "") + " !"); - } - } + // Write the IP address: + str.append(" from ").append(request.getRemoteAddr()); - @Override - public void jobDestroyed(UWSJob job, JobList jl){ - if (job != null){ - String jlName = (jl != null) ? jl.getName() : null; - info("JOB " + job.getJobId() + " DESTROYED" + ((jlName != null) ? (" and removed from " + jlName) : "") + " !"); - } - } + // Write the user agent: + str.append(" using ").append(request.getHeader("User-Agent") == null ? "" : request.getHeader("User-Agent")); - @Override - public void jobStarted(UWSJob job){ - if (job != null){ - info("JOB " + job.getJobId() + " STARTED !"); + // Write the posted parameters: + str.append(" with parameters ("); + Map params = UWSToolBox.getParamsMap(request); + int i = -1; + for(Entry p : params.entrySet()){ + if (++i > 0) + str.append('&'); + str.append(p.getKey()).append('=').append((p.getValue() != null) ? p.getValue() : ""); + } + str.append(')'); + + // Send the log message to the log file: + log(level, "HTTP", "REQUEST_RECEIVED", requestId, (message != null ? message : str.toString()), (message != null ? str.toString() : null), error); } + // OTHERWISE, just write the given message: + else + log(level, "HTTP", "REQUEST_RECEIVED", requestId, message, null, error); } + /** + *

    A message/error logged with this function will have the following format:

    + *
    <TIMESTAMP>	<LEVEL>	HTTP	RESPONSE_SENT	<REQUEST_ID>	<MESSAGE>	HTTP-<STATUS_CODE> to the user <USER> as <CONTENT_TYPE>
    + *

    ,where <USER> may be either "(id:<USER_ID>;pseudo:<USER_PSEUDO>)" or "ANONYMOUS".

    + * + * @see uws.service.log.UWSLog#logHttp(uws.service.log.UWSLog.LogLevel, javax.servlet.http.HttpServletResponse, java.lang.String, uws.job.user.JobOwner, java.lang.String, java.lang.Throwable) + */ @Override - public void jobFinished(UWSJob job){ - if (job != null){ - long endTime = (job.getEndTime() == null) ? -1 : job.getEndTime().getTime(); - long startTime = (job.getStartTime() == null) ? -1 : job.getStartTime().getTime(); - long duration = (endTime > 0 && startTime > 0) ? (endTime - startTime) : -1; - info("JOB " + job.getJobId() + " FINISHED with the phase " + job.getPhase() + ((duration > 0) ? " after an execution of " + duration + "ms" : "") + " !"); - } - } + public void logHttp(LogLevel level, HttpServletResponse response, String requestId, JobOwner user, String message, Throwable error){ + if (response != null){ + // If the type is missing: + if (level == null) + level = (error != null) ? LogLevel.ERROR : LogLevel.INFO; - /* ************* */ - /* HTTP ACTIVITY */ - /* ************* */ + // Log or not? + if (!canLog(level)) + return; - @SuppressWarnings("unchecked") - public void httpRequest(final HttpServletRequest request, final JobOwner user, final String uwsAction, final int responseStatusCode, final String responseMsg, final Throwable responseError){ - if (request != null){ StringBuffer str = new StringBuffer(); - // Write the executed UWS action: - if (uwsAction == null || uwsAction.trim().isEmpty()) - str.append("???"); - else - str.append(uwsAction); - str.append('\t'); - // Write the response status code: - if (responseStatusCode > 0) - str.append("HTTP-").append(responseStatusCode); - else - str.append("HTTP-???"); - str.append('\t'); - - // Write the "response" message: - if (responseMsg != null) - str.append('[').append(responseMsg).append(']'); - else - str.append("[]"); - str.append('\t'); - - // Write the request type and the URL: - str.append("[HTTP-").append(request.getMethod()).append("] ").append(request.getRequestURL()).append('\t'); - - // Write the posted parameters: - Enumeration paramNames = request.getParameterNames(); - while(paramNames.hasMoreElements()){ - String param = paramNames.nextElement(); - String paramValue = request.getParameter(param); - if (paramValue != null) - paramValue = paramValue.replaceAll("[\t\n\r]", " "); - else - paramValue = ""; - str.append(param).append('=').append(paramValue); - if (paramNames.hasMoreElements()) - str.append('&'); - } - str.append('\t'); + str.append("HTTP-").append(response.getStatus()); - // Write the IP address and the corresponding user: - str.append(request.getRemoteAddr()).append('['); + // Write the user to whom the response is sent: + str.append(" to the user "); if (user != null){ - str.append("id:").append(user.getID()); + str.append("(id:").append(user.getID()); if (user.getPseudo() != null) str.append(";pseudo:").append(user.getPseudo()); + str.append(')'); }else - str.append("???"); - str.append("]\t"); + str.append("ANONYMOUS"); - // Write the user agent: - str.append(request.getHeader("User-Agent")); + // Write the response's MIME type: + if (response.getContentType() != null) + str.append(" as ").append(response.getContentType()); // Send the log message to the log file: - log(UWSLogType.HTTP_ACTIVITY, str.toString(), (responseStatusCode >= minResponseCodeForStackTrace) ? responseError : null); + log(level, "HTTP", "RESPONSE_SENT", requestId, message, str.toString(), error); } + // OTHERWISE, just write the given message: + else + log(level, "HTTP", "RESPONSE_SENT", requestId, message, null, error); } - /* ********************** */ - /* THREAD STATUS MESSAGES */ - /* ********************** */ + /* ************ */ + /* UWS ACTIVITY */ + /* ************ */ @Override - public void threadStarted(Thread t, String task){ - if (t != null) - info("THREAD " + t.getId() + " STARTED\t" + t.getName() + "\t" + t.getState() + "\t" + t.getThreadGroup().activeCount() + " active threads"); + public void logUWS(LogLevel level, Object obj, String event, String message, Throwable error){ + // If the type is missing: + if (level == null) + level = (error != null) ? LogLevel.ERROR : LogLevel.INFO; + + // Log or not? + if (!canLog(level)) + return; + + // CASE "BACKUPED": Append to the message the backup report: + String report = null; + if (event != null && event.equalsIgnoreCase("BACKUPED") && obj != null && obj.getClass().getName().equals("[I")){ + int[] backupReport = (int[])obj; + report = "(" + backupReport[0] + "/" + backupReport[1] + " jobs backuped ; " + backupReport[2] + "/" + backupReport[3] + " users backuped)"; + }else if (event != null && event.equalsIgnoreCase("RESTORED") && obj != null && obj.getClass().getName().equals("[I")){ + int[] restoreReport = (int[])obj; + report = "(" + restoreReport[0] + "/" + restoreReport[1] + " jobs restored ; " + restoreReport[2] + "/" + restoreReport[3] + " users restored)"; + } + + // Log the message + log(level, "UWS", event, null, message, report, error); } + /* ************ */ + /* JOB ACTIVITY */ + /* ************ */ + @Override - public void threadFinished(Thread t, String task){ - if (t != null) - info("THREAD " + t.getId() + " ENDED\t" + t.getName() + "\t" + t.getState() + "\t" + t.getThreadGroup().activeCount() + " active threads"); + public void logJob(LogLevel level, UWSJob job, String event, String message, Throwable error){ + log(level, "JOB", event, (job == null) ? null : job.getJobId(), message, null, error); } + /* ********************** */ + /* THREAD STATUS MESSAGES */ + /* ********************** */ + @Override - public void threadInterrupted(Thread t, String task, Throwable error){ - if (t != null){ - if (error == null || error instanceof InterruptedException) - info("THREAD " + t.getId() + " CANCELLED\t" + t.getName() + "\t" + t.getState() + "\t" + t.getThreadGroup().activeCount() + " active threads"); - else - error("THREAD " + t.getId() + " INTERRUPTED\t" + t.getName() + "\t" + t.getState() + "\t" + t.getThreadGroup().activeCount() + " active threads", error); - } + public void logThread(LogLevel level, Thread thread, String event, String message, Throwable error){ + if (thread != null){ + // If the type is missing: + if (level == null) + level = (error != null) ? LogLevel.ERROR : LogLevel.INFO; + + // Log or not? + if (!canLog(level)) + return; + + StringBuffer str = new StringBuffer(); + + // Write the thread name and ID: + str.append(thread.getName()).append(" (thread ID: ").append(thread.getId()).append(")"); + + // Write the thread state: + str.append(" is ").append(thread.getState()); + + // Write its thread group name: + str.append(" in the group " + thread.getThreadGroup().getName()); + + // Write the number of active threads: + str.append(" where ").append(thread.getThreadGroup().activeCount()).append(" threads are active"); + + log(level, "THREAD", event, thread.getName(), message, str.toString(), error); + + }else + log(level, "THREAD", event, null, message, null, error); } } diff --git a/src/uws/service/log/UWSLog.java b/src/uws/service/log/UWSLog.java index 350fe0244bca769ca5e4881ad4074784dcaae624..0448234a6ae0ec0b8acb8afab17377cfda4d1afd 100644 --- a/src/uws/service/log/UWSLog.java +++ b/src/uws/service/log/UWSLog.java @@ -1,5 +1,19 @@ package uws.service.log; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.json.JSONArray; +import org.json.JSONObject; + +import uws.job.ErrorSummary; +import uws.job.JobList; +import uws.job.Result; +import uws.job.UWSJob; +import uws.job.user.JobOwner; +import uws.service.UWS; +import uws.service.UWSUrl; + /* * This file is part of UWSLibrary. * @@ -16,174 +30,276 @@ package uws.service.log; * You should have received a copy of the GNU Lesser General Public License * along with UWSLibrary. If not, see . * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) + * Copyright 2012,2014 - UDS/Centre de Données astronomiques de Strasbourg (CDS), + * Astronomisches Rechen Institut (ARI) */ -import javax.servlet.http.HttpServletRequest; - -import uws.job.JobList; -import uws.job.UWSJob; - -import uws.job.user.JobOwner; - -import uws.service.UWS; - /** - * Lets logging any kind of message about a UWS. + * Let log any kind of message about a UWS service. * - * @author Grégory Mantelet (CDS) - * @version 05/2012 + * @author Grégory Mantelet (CDS;ARI) + * @version 4.1 (12/2014) */ public interface UWSLog { + /** + * Indicate the level of the error: debug, info, warning or error. + * + * @author Grégory Mantelet (ARI) + * @version 4.1 (09/2014) + * @since 4.1 + */ + public static enum LogLevel{ + DEBUG, INFO, WARNING, ERROR, FATAL; + } + /* *********************** */ /* GENERAL LOGGING METHODS */ /* *********************** */ /** - * Logs a debug message. + *

    Generic way to log a message and/or an exception.

    + * + *

    Note: + * The other functions of this class or extension, MAY be equivalent to a call to this function with some specific parameter values. + * It should be especially the case for the debug(...), info(...), warning(...) and error(...) functions. + *

    + * + * @param level Level of the error (info, warning, error, ...). SHOULD NOT be NULL, but if NULL anyway, the level SHOULD be considered as INFO + * @param context Context of the log item (HTTP, Thread, Job, UWS, ...). MAY be NULL + * @param message Message to log. MAY be NULL + * @param error Error/Exception to log. MAY be NULL + * + * @since 4.1 + */ + public void log(final LogLevel level, final String context, final String message, final Throwable error); + + /** + *

    Logs a debug message.

    + * + *

    Note: + * This function should be equals to: log(LogLevel.WARNING, null, msg, null). + *

    + * * @param msg A DEBUG message. */ public void debug(final String msg); /** - * Logs an exception as a debug message. + *

    Logs an exception as a debug message.

    + * + *

    Note: + * This function should be equals to: log(LogLevel.WARNING, null, null, t). + *

    + * * @param t An exception. */ public void debug(final Throwable t); /** - * Logs a full (message+exception) debug message. + *

    Logs a full (message+exception) debug message.

    + * + *

    Note: + * This function should be equals to: log(LogLevel.WARNING, null, msg, t). + *

    + * * @param msg A DEBUG message. * @param t An exception. */ public void debug(final String msg, final Throwable t); /** - * Logs the given information. + *

    Logs the given information.

    + * + *

    Note: + * This function should be equals to: log(LogLevel.INFO, null, msg, null). + *

    + * * @param msg An INFO message. */ public void info(final String msg); /** - * Logs the given warning. + *

    Logs the given warning.

    + * + *

    Note: + * This function should be equals to: log(LogLevel.WARNING, null, msg, null). + *

    + * * @param msg A WARNING message. */ public void warning(final String msg); /** - * Logs the given error. + *

    Logs the given error.

    + * + *

    Note: + * This function should be equals to: log(LogLevel.ERROR, null, msg, null). + *

    + * * @param msg An ERROR message. */ public void error(final String msg); /** - * Logs the given exception as an error. + *

    Logs the given exception as an error.

    + * + *

    Note: + * This function should be equals to: log(LogLevel.ERROR, null, null, t). + *

    + * * @param t An exception. */ public void error(final Throwable t); /** - * Logs a full (message+exception) error message. + *

    Logs a full (message+exception) error message.

    + * + *

    Note: + * This function should be equals to: log(LogLevel.ERROR, null, msg, t). + *

    + * * @param msg An ERROR message. * @param t An exception. */ public void error(final String msg, final Throwable t); - /* *************************************** */ - /* LOGGING METHODS TO WATCH THE UWS STATUS */ - /* *************************************** */ - - /** - *

    Logs the fact that the given UWS has just been initialized.

    - *

    note: Theoretically, no restoration has been done when this method is called.

    - * @param uws The UWS which has just been initialized. - */ - public void uwsInitialized(final UWS uws); - - /** - * Logs the fact that the given UWS has just been restored. - * @param uws The restored UWS. - * @param report Report of the restoration (in the order: nb restored jobs, nb jobs, nb restored users, nb users). - */ - public void uwsRestored(final UWS uws, final int[] report); - - /** - * Logs the fact that the given UWS has just been saved. - * @param uws The saved UWS. - * @param report Report of the save (in the order: nb saved jobs, nb jobs, nb saved users, nb users). - */ - public void uwsSaved(final UWS uws, final int[] report); - - /** - * Logs the fact that all the jobs of the given user have just been saved. - * @param owner The owner whose all the jobs have just been saved. - * @param report Report of the save (in the order: nb saved jobs, nb jobs). - */ - public void ownerJobsSaved(final JobOwner owner, final int[] report); - - /** - * Logs the fact that the given job has just been created. - * @param job The created job. - */ - public void jobCreated(final UWSJob job); + /* ****************** */ + /* SPECIFIC FUNCTIONS */ + /* ****************** */ /** - * Logs the fact that the given job has just started. - * @param job The started job. + *

    Log a message and/or an error in the general context of UWS.

    + * + *

    + * One of the parameter is of type {@link Object}. This object can be used to provide more information to the log function + * in order to describe as much as possible the state and/or result event. + *

    + * + *

    List of all events sent by the library (case sensitive):

    + *
      + *
    • INIT (with "obj" as an instance of {@link UWS} except in case of error where "obj" is NULL)
    • + *
    • ADD_JOB_LIST (with "obj" as an instance of {@link JobList})
    • + *
    • DESTROY_JOB_LIST (with "obj" as an instance of {@link JobList})
    • + *
    • DESTROY_JOB (with "obj" as an instance of {@link UWSUrl})
    • + *
    • SERIALIZE (with "obj" as an instance of {@link UWSUrl})
    • + *
    • SET_PARAM (with "obj" as an instance of {@link HttpServletRequest} in case of error)
    • + *
    • GET_RESULT (with "obj" as an instance of {@link Result})
    • + *
    • GET_ERROR (with "obj" as an instance of {@link ErrorSummary})
    • + *
    • RESTORATION (with "obj" the raw object to de-serialize (may be {@link JSONObject} or {@link JSONArray} or NULL))
    • + *
    • BACKUP (with "obj" the object to backup ; may be {@link JobOwner}, a {@link UWSJob}, ...)
    • + *
    • RESTORED (with "obj" as an integer array of 4 items: nb of restored jobs, total nb of jobs, nb of restored users, total nb of users)
    • + *
    • BACKUPED (with "obj" as an integer array of 4 items: nb of saved jobs, total nb of jobs, nb of saved users, total nb of users or with just 2 items (the two last ones))
    • + *
    • FORMAT_ERROR (with a NULL "obj")
    • + *
    • STOP (with "obj" as an instance of {@link UWS})
    • + *
    + * + * @param level Level of the log (info, warning, error, ...). SHOULD NOT be NULL, but if NULL anyway, the level SHOULD be considered as INFO + * @param obj Object providing more information about the event/object at the origin of this log. MAY be NULL + * @param event Event at the origin of this log or action currently executed by UWS while this log is sent. MAY be NULL + * @param message Message to log. MAY be NULL + * @param error Error/Exception to log. MAY be NULL + * + * @since 4.1 */ - public void jobStarted(final UWSJob job); + public void logUWS(final LogLevel level, final Object obj, final String event, final String message, final Throwable error); /** - * Logs the fact that the given job has just finished. - * @param job The finished job. + *

    Log a message and/or an error in the HTTP context. + * This log function is called when a request is received by the service. Consequently, the event is: REQUEST_RECEIVED.

    + * + *

    Note: + * When a request is received, this function is called, and then, when the response has been written and sent to the client, + * {@link #logHttp(LogLevel, HttpServletResponse, String, JobOwner, String, Throwable)} should be called. + * These functions should always work together. + *

    + * + * @param level Level of the log (info, warning, error, ...). SHOULD NOT be NULL, but if NULL anyway, the level SHOULD be considered as INFO + * @param request HTTP request received by the service. SHOULD NOT be NULL + * @param requestId ID to use to identify this request until its response is sent. + * @param message Message to log. MAY be NULL + * @param error Error/Exception to log. MAY be NULL + * + * @see #logHttp(LogLevel, HttpServletResponse, String, JobOwner, String, Throwable) + * + * @since 4.1 */ - public void jobFinished(final UWSJob job); + public void logHttp(final LogLevel level, final HttpServletRequest request, final String requestId, final String message, final Throwable error); /** - * Logs the fact that the given job has just been destroyed. - * @param job The destroyed job. - * @param jl The job list from which the given job has just been removed. + *

    Log a message and/or an error in the HTTP context. + * This log function is called when a response is sent to the client by the service. Consequently, the event is: RESPONSE_SENT.

    + * + *

    Note: + * When a request is received, {@link #logHttp(LogLevel, HttpServletRequest, String, String, Throwable)} is called, and then, + * when the response has been written and sent to the client, this function should be called. + * These functions should always work together. + *

    + * + * @param level Level of the log (info, warning, error, ...). SHOULD NOT be NULL, but if NULL anyway, the level SHOULD be considered as INFO + * @param response HTTP response sent by the service to the client. MAY be NULL if an error occurs while writing the response + * @param requestId ID to use to identify the request to which the given response is answering. + * @param user Identified user which has sent the received request. + * @param message Message to log. MAY be NULL + * @param error Error/Exception to log. MAY be NULL + * + * @see #logHttp(LogLevel, HttpServletRequest, String, String, Throwable) + * + * @since 4.1 */ - public void jobDestroyed(final UWSJob job, final JobList jl); - - /* ************* */ - /* HTTP ACTIVITY */ - /* ************* */ + public void logHttp(final LogLevel level, final HttpServletResponse response, final String requestId, final JobOwner user, final String message, final Throwable error); /** - * Logs any HTTP request received by the UWS and also the send response. - * @param request The HTTP request received by the UWS. - * @param user The identified user which sends this request. (MAY BE NULL) - * @param uwsAction The identified UWS action. (MAY BE NULL) - * @param responseStatusCode The HTTP status code of the response given by the UWS. - * @param responseMsg The message (or a summary of the message) returned by the UWS. (MAY BE NULL) - * @param responseError The error sent by the UWS. (MAY BE NULL) + *

    Log a message and/or an error in the JOB context.

    + * + *

    List of all events sent by the library (case sensitive):

    + *
      + *
    • CREATED
    • + *
    • QUEUE
    • + *
    • START
    • + *
    • ABORT
    • + *
    • ERROR
    • + *
    • EXECUTING
    • + *
    • CHANGE_PHASE
    • + *
    • NOTIFY
    • + *
    • END
    • + *
    • SERIALIZE
    • + *
    • MOVE_UPLOAD
    • + *
    • ADD_RESULT
    • + *
    • SET_DESTRUCTION
    • + *
    • SET_ERROR
    • + *
    • CLEAR_RESOURCES
    • + *
    • DESTROY
    • + *
    + * + * @param level Level of the log (info, warning, error, ...). SHOULD NOT be NULL, but if NULL anyway, the level SHOULD be considered as INFO + * @param job Job from which this log comes. MAY be NULL + * @param event Event at the origin of this log or action executed by the given job while this log is sent. MAY be NULL + * @param message Message to log. MAY be NULL + * @param error Error/Exception to log. MAY be NULL + * + * @since 4.1 */ - public void httpRequest(final HttpServletRequest request, final JobOwner user, final String uwsAction, final int responseStatusCode, final String responseMsg, final Throwable responseError); + public void logJob(final LogLevel level, final UWSJob job, final String event, final String message, final Throwable error); - /* ********************** */ - /* THREAD STATUS MESSAGES */ - /* ********************** */ /** - * Logs the fact that the given thread has just started. - * @param t The started thread. - * @param task Name/Description of the task that the given thread is executing. + *

    Log a message and/or an error in the THREAD context.

    + * + *

    List of all events sent by the library (case sensitive):

    + *
      + *
    • START
    • + *
    • SET_ERROR
    • + *
    • END
    • + *
    + * + * @param level Level of the log (info, warning, error, ...). SHOULD NOT be NULL, but if NULL anyway, the level SHOULD be considered as INFO + * @param thread Thread from which this log comes. MAY be NULL + * @param event Event at the origin of this log or action currently executed by the given thread while this log is sent. MAY be NULL + * @param message Message to log. MAY be NULL + * @param error Error/Exception to log. MAY be NULL + * + * @since 4.1 */ - public void threadStarted(final Thread t, final String task); + public void logThread(final LogLevel level, final Thread thread, final String event, final String message, final Throwable error); - /** - * Logs the fact that the given thread has just been interrupted. - * @param t The interrupted thread. - * @param task Name/Description of the task that the given thread was trying to execute. - * @param error Exception that has interrupted the given thread. - */ - public void threadInterrupted(final Thread t, final String task, final Throwable error); - - /** - * Logs the fact that the given thread has just finished. - * @param t The finished thread. - * @param task Name/Description of the task that the given thread was executing. - */ - public void threadFinished(final Thread t, final String task); } diff --git a/src/uws/service/log/UWSLogType.java b/src/uws/service/log/UWSLogType.java deleted file mode 100644 index 1aed19bd4c58e5a55f33989b2892db1a60f40d03..0000000000000000000000000000000000000000 --- a/src/uws/service/log/UWSLogType.java +++ /dev/null @@ -1,53 +0,0 @@ -package uws.service.log; - -/* - * This file is part of UWSLibrary. - * - * UWSLibrary is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * UWSLibrary is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with UWSLibrary. If not, see . - * - * Copyright 2012 - UDS/Centre de Données astronomiques de Strasbourg (CDS) - */ - -/** - * Different types of log messages. - * - * @author Grégory Mantelet (CDS) - * @version 06/2012 - * - * @see UWSLog - * @see DefaultUWSLog - */ -public enum UWSLogType{ - HTTP_ACTIVITY, DEBUG, INFO, WARNING, ERROR, CUSTOM; - - protected String customType = this.name(); - - public final String getCustomType(){ - return customType; - } - - public static final UWSLogType createCustomLogType(final String customType){ - UWSLogType type = UWSLogType.CUSTOM; - type.customType = customType; - return type; - } - - @Override - public String toString(){ - if (this == CUSTOM) - return customType; - else - return name(); - } -} diff --git a/src/uws/service/request/FormEncodedParser.java b/src/uws/service/request/FormEncodedParser.java new file mode 100644 index 0000000000000000000000000000000000000000..d9882d3a2439a924e03b051737bd1b0b7ec6540a --- /dev/null +++ b/src/uws/service/request/FormEncodedParser.java @@ -0,0 +1,178 @@ +package uws.service.request; + +/* + * This file is part of UWSLibrary. + * + * UWSLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * UWSLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with UWSLibrary. If not, see . + * + * Copyright 2014 - Astronomisches Rechen Institut (ARI) + */ + +import java.io.BufferedInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.UnsupportedEncodingException; +import java.net.URLDecoder; +import java.nio.charset.Charset; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.Map; +import java.util.Scanner; + +import javax.servlet.http.HttpServletRequest; + +import uws.UWSException; + +/** + *

    Extract parameters encoded using the HTTP-GET method or the Content-type application/x-www-form-urlencoded + * with the HTTP-POST or HTTP-PUT method in an {@link HttpServletRequest}.

    + * + *

    + * By default, this {@link RequestParser} overwrite parameter occurrences in the map: that's to say if a parameter is provided several times, + * only the last value will be kept. This behavior can be changed by overwriting the function {@link #consumeParameter(String, Object, Map)} + * of this class. + *

    + * + *

    Note: + * When HTTP-POST is used, these parameters are actually already extracted by the server application (like Apache/Tomcat) + * and are available with {@link HttpServletRequest#getParameterMap()}. + * However, when using HTTP-PUT, the parameters are extracted manually from the request content. + *

    + * + * @author Grégory Mantelet (ARI) + * @version 4.1 (11/2014) + * @since 4.1 + */ +public class FormEncodedParser implements RequestParser { + + /** HTTP content-type for HTTP request formated in url-form-encoded. */ + public final static String EXPECTED_CONTENT_TYPE = "application/x-www-form-urlencoded"; + + @Override + public final Map parse(HttpServletRequest request) throws UWSException{ + if (request == null) + return new HashMap(); + + HashMap params = new HashMap(); + + // Normal extraction for HTTP-POST and other HTTP methods: + if (request.getMethod() == null || !request.getMethod().equalsIgnoreCase("put")){ + Enumeration names = request.getParameterNames(); + String paramName; + String[] values; + int i; + while(names.hasMoreElements()){ + paramName = names.nextElement(); + values = request.getParameterValues(paramName); + // search for the last non-null occurrence: + i = values.length - 1; + while(i >= 0 && values[i] == null) + i--; + // if there is one, keep it: + if (i >= 0) + consumeParameter(paramName, values[i], params); + } + } + /* Parameters are not extracted when using the HTTP-PUT method. + * This block is doing this extraction manually. */ + else{ + InputStream input = null; + try{ + + // Get the character encoding: + String charEncoding = request.getCharacterEncoding(); + try{ + if (charEncoding == null || charEncoding.trim().length() == 0 || Charset.isSupported(charEncoding)) + charEncoding = "UTF-8"; + }catch(Exception ex){ + charEncoding = "UTF-8"; + } + + // Get a stream on the request content: + input = new BufferedInputStream(request.getInputStream()); + // Read the stream by iterating on each parameter pairs: + Scanner scanner = new Scanner(input); + scanner.useDelimiter("&"); + String pair; + int indSep; + while(scanner.hasNext()){ + // get the pair: + pair = scanner.next(); + // split it between the parameter name and value: + indSep = pair.indexOf('='); + try{ + if (indSep >= 0) + consumeParameter(URLDecoder.decode(pair.substring(0, indSep), charEncoding), URLDecoder.decode(pair.substring(indSep + 1), charEncoding), params); + else + consumeParameter(URLDecoder.decode(pair, charEncoding), "", params); + }catch(UnsupportedEncodingException uee){ + if (indSep >= 0) + consumeParameter(pair.substring(0, indSep), pair.substring(indSep + 1), params); + else + consumeParameter(pair, "", params); + } + } + + }catch(IOException ioe){}finally{ + if (input != null){ + try{ + input.close(); + }catch(IOException ioe2){} + } + } + } + + return params; + } + + /** + *

    Consume the specified parameter: add it inside the given map.

    + * + *

    + * By default, this function is just putting the given value inside the map. So, if the parameter already exists in the map, + * its old value will be overwritten by the given one. + *

    + * + * @param name Name of the parameter to consume. + * @param value Its value. + * @param allParams The list of all parameters read until now. + */ + protected void consumeParameter(final String name, final Object value, final Map allParams){ + allParams.put(name, value); + } + + /** + *

    Utility method that determines whether the content of the given request is a application/x-www-form-urlencoded.

    + * + *

    Important: + * This function just test the content-type of the request. The HTTP method (e.g. GET, POST, ...) is not tested. + *

    + * + * @param request The servlet request to be evaluated. Must be non-null. + * + * @return true if the request is url-form-encoded, + * false otherwise. + */ + public final static boolean isFormEncodedRequest(final HttpServletRequest request){ + // Extract the content type and determine if it is a url-form-encoded request: + String contentType = request.getContentType(); + if (contentType == null) + return false; + else if (contentType.toLowerCase().equals(EXPECTED_CONTENT_TYPE)) + return true; + else + return false; + } + +} diff --git a/src/uws/service/request/MultipartParser.java b/src/uws/service/request/MultipartParser.java new file mode 100644 index 0000000000000000000000000000000000000000..a6760aca3527bab2871ac0b17378e92c5880fb4e --- /dev/null +++ b/src/uws/service/request/MultipartParser.java @@ -0,0 +1,250 @@ +package uws.service.request; + +/* + * This file is part of UWSLibrary. + * + * UWSLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * UWSLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with UWSLibrary. If not, see . + * + * Copyright 2014 - Astronomisches Rechen Institut (ARI) + */ + +import java.io.File; +import java.io.IOException; +import java.util.Date; +import java.util.Enumeration; +import java.util.LinkedHashMap; +import java.util.Map; + +import javax.servlet.http.HttpServletRequest; + +import uws.UWSException; +import uws.service.UWS; +import uws.service.file.UWSFileManager; + +import com.oreilly.servlet.MultipartRequest; +import com.oreilly.servlet.multipart.FileRenamePolicy; + +/** + *

    Extract parameters encoded using the Content-type multipart/form-data + * in an {@link HttpServletRequest}.

    + * + *

    + * The created file(s) is(are) stored in the temporary upload directory ({@link UWSFileManager#TMP_UPLOAD_DIR} ; this attribute can be modified if needed). + * This directory is supposed to be emptied regularly in case it is forgotten at any moment by the UWS service implementation to delete unused request files. + *

    + * + *

    + * The size of the full request body is limited by the static attribute {@link #SIZE_LIMIT} before the creation of the file. + * Its default value is: {@link #DEFAULT_SIZE_LIMIT}={@value #DEFAULT_SIZE_LIMIT} bytes. + *

    + * + *

    + * By default, this {@link RequestParser} overwrite parameter occurrences in the map: that's to say if a parameter is provided several times, + * only the last value will be kept. This behavior can be changed by overwriting the function {@link #consumeParameter(String, Object, Map)} + * of this class. + *

    + * + * @author Grégory Mantelet (ARI) + * @version 4.1 (12/2014) + * @since 4.1 + */ +public class MultipartParser implements RequestParser { + + /** HTTP content-type for HTTP request formated in multipart. */ + public static final String EXPECTED_CONTENT_TYPE = "multipart/form-data"; + + /** Default maximum allowed size for an HTTP request content: 10 MiB. */ + public static final int DEFAULT_SIZE_LIMIT = 10 * 1024 * 1024; + + /**

    Maximum allowed size for an HTTP request content. Over this limit, an exception is thrown and the request is aborted.

    + *

    Note: + * The default value is {@link #DEFAULT_SIZE_LIMIT} (= {@value #DEFAULT_SIZE_LIMIT} MiB). + *

    + *

    Note: + * This limit is expressed in bytes and can not be negative. + * Its smallest possible value is 0. If the set value is though negative, + * it will be ignored and {@link #DEFAULT_SIZE_LIMIT} will be used instead. + *

    */ + public static int SIZE_LIMIT = DEFAULT_SIZE_LIMIT; + + /** Indicates whether this parser should allow inline files or not. */ + public final boolean allowUpload; + + /** File manager to use to create {@link UploadFile} instances. + * It is required by this new object to execute open, move and delete operations whenever it could be asked. */ + protected final UWSFileManager fileManager; + + /** + *

    Build a {@link MultipartParser} forbidding uploads (i.e. inline files).

    + * + *

    + * With this parser, when an upload (i.e. submitted inline files) is detected, an exception is thrown by {@link #parse(HttpServletRequest)} + * which cancels immediately the request. + *

    + */ + public MultipartParser(){ + this(false, null); + } + + /** + * Build a {@link MultipartParser} allowing uploads (i.e. inline files). + * + * @param fileManager The file manager to use in order to store any eventual upload. MUST NOT be NULL + */ + public MultipartParser(final UWSFileManager fileManager){ + this(true, fileManager); + } + + /** + *

    Build a {@link MultipartParser}.

    + * + *

    + * If the first parameter is false, then when an upload (i.e. submitted inline files) is detected, an exception is thrown + * by {@link #parse(HttpServletRequest)} which cancels immediately the request. + *

    + * + * @param uploadEnabled true to allow uploads (i.e. inline files), false otherwise. + * If false, the two other parameters are useless. + * @param fileManager The file manager to use in order to store any eventual upload. MUST NOT be NULL + */ + protected MultipartParser(final boolean uploadEnabled, final UWSFileManager fileManager){ + if (uploadEnabled && fileManager == null) + throw new NullPointerException("Missing file manager although the upload capability is enabled => can not create a MultipartParser!"); + + this.allowUpload = uploadEnabled; + this.fileManager = fileManager; + } + + @Override + @SuppressWarnings("unchecked") + public final Map parse(final HttpServletRequest request) throws UWSException{ + LinkedHashMap parameters = new LinkedHashMap(); + MultipartRequest multipart = null; + + try{ + + // Parse the request body: + multipart = new MultipartRequest(request, UWSFileManager.TMP_UPLOAD_DIR.getPath(), (SIZE_LIMIT < 0 ? DEFAULT_SIZE_LIMIT : SIZE_LIMIT), new FileRenamePolicy(){ + @Override + public File rename(File file){ + Object reqID = request.getAttribute(UWS.REQ_ATTRIBUTE_ID); + if (reqID == null || !(reqID instanceof String)) + reqID = (new Date()).getTime(); + char uniq = 'A'; + File f = new File(file.getParentFile(), "UPLOAD_" + reqID + uniq + "_" + file.getName()); + while(f.exists()){ + uniq++; + f = new File(file.getParentFile(), "UPLOAD_" + reqID + "_" + file.getName()); + } + return f; + } + }); + + // Extract all "normal" parameters: + String param; + Enumeration e = multipart.getParameterNames(); + while(e.hasMoreElements()){ + param = e.nextElement(); + for(String occurence : multipart.getParameterValues(param)) + consumeParameter(param, occurence, parameters); + } + + // Extract all inline files as additional parameters: + e = multipart.getFileNames(); + if (!allowUpload && e.hasMoreElements()) + throw new UWSException(UWSException.BAD_REQUEST, "Uploads are not allowed by this service!"); + while(e.hasMoreElements()){ + param = e.nextElement(); + if (multipart.getFile(param) == null) + continue; + + /* + * TODO !!!POSSIBLE ISSUE!!! + * MultipartRequest is not able to deal with multiple files having the same parameter name. However, all files are created/uploaded + * but only the last one is accessible through this object....so only the last can be deleted, which could be a problem later + * (hence the usage of the system temporary directory). + */ + + // build its description/pointer: + UploadFile lob = new UploadFile(param, multipart.getOriginalFileName(param), multipart.getFile(param).toURI().toString(), fileManager); + lob.mimeType = multipart.getContentType(param); + lob.length = multipart.getFile(param).length(); + // add it inside the parameters map: + consumeParameter(param, lob, parameters); + } + + }catch(IOException ioe){ + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, ioe, "Internal Error => Impossible to extract parameters from the Multipart HTTP request!"); + }catch(IllegalArgumentException iae){ + String confError = iae.getMessage(); + if (UWSFileManager.TMP_UPLOAD_DIR == null) + confError = "Missing upload directory!"; + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, iae, "Internal Error: Incorrect UPLOAD configuration: " + confError); + } + + return parameters; + } + + /** + *

    Consume the specified parameter: add it inside the given map.

    + * + *

    + * By default, this function is just putting the given value inside the map. So, if the parameter already exists in the map, + * its old value will be overwritten by the given one. + *

    + * + *

    Note: + * If the old value was a file, it will be deleted from the file system before its replacement in the map. + *

    + * + * @param name Name of the parameter to consume. + * @param value Its value. + * @param allParams The list of all parameters read until now. + */ + protected void consumeParameter(final String name, final Object value, final Map allParams){ + // If the old value was a file, delete it before replacing its value: + if (allParams.containsKey(name) && allParams.get(name) instanceof UploadFile){ + try{ + ((UploadFile)allParams.get(name)).deleteFile(); + }catch(IOException ioe){} + } + + // Put the given value in the given map: + allParams.put(name, value); + } + + /** + *

    Utility method that determines whether the content of the given request is a multipart/form-data.

    + * + *

    Important: + * This function just test the content-type of the request. The HTTP method (e.g. GET, POST, ...) is not tested. + *

    + * + * @param request The servlet request to be evaluated. Must be non-null. + * + * @return true if the request is multipart, + * false otherwise. + */ + public static final boolean isMultipartContent(final HttpServletRequest request){ + // Extract the content type and determine if it is a multipart request (its content type should start by multipart/form-data"): + String contentType = request.getContentType(); + if (contentType == null) + return false; + else if (contentType.toLowerCase().startsWith(EXPECTED_CONTENT_TYPE)) + return true; + else + return false; + } + +} diff --git a/src/uws/service/request/NoEncodingParser.java b/src/uws/service/request/NoEncodingParser.java new file mode 100644 index 0000000000000000000000000000000000000000..6ebcbd19e0cad6c5cc02dc3f8de98e4e8a975cee --- /dev/null +++ b/src/uws/service/request/NoEncodingParser.java @@ -0,0 +1,165 @@ +package uws.service.request; + +/* + * This file is part of UWSLibrary. + * + * UWSLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * UWSLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with UWSLibrary. If not, see . + * + * Copyright 2014 - Astronomisches Rechen Institut (ARI) + */ + +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; + +import javax.servlet.http.HttpServletRequest; + +import uws.UWSException; +import uws.service.UWS; +import uws.service.file.UWSFileManager; + +/** + *

    This parser merely copies the whole HTTP request content inside a file. + * It names this file: "JDL" (Job Description Language).

    + * + *

    + * The created file is stored in the temporary upload directory ({@link UWSFileManager#TMP_UPLOAD_DIR} ; this attribute can be modified if needed). + * This directory is supposed to be emptied regularly in case it is forgotten at any moment by the UWS service implementation to delete unused request files. + *

    + * + *

    + * The size of the JDL is limited by the static attribute {@link #SIZE_LIMIT} before the creation of the file. + * Its default value is: {@link #DEFAULT_SIZE_LIMIT}={@value #DEFAULT_SIZE_LIMIT} bytes. + *

    + * + * @author Grégory Mantelet (ARI) + * @version 4.1 (11/2014) + * @since 4.1 + */ +public class NoEncodingParser implements RequestParser { + + /** Default maximum allowed size for an HTTP request content: 2 MiB. */ + public static final int DEFAULT_SIZE_LIMIT = 2 * 1024 * 1024; + + /**

    Maximum allowed size for an HTTP request content. Over this limit, an exception is thrown and the request is aborted.

    + *

    Note: + * The default value is {@link #DEFAULT_SIZE_LIMIT} (= {@value #DEFAULT_SIZE_LIMIT} MiB). + *

    + *

    Note: + * This limit is expressed in bytes and can not be negative. + * Its smallest possible value is 0. If the set value is though negative, + * it will be ignored and {@link #DEFAULT_SIZE_LIMIT} will be used instead. + *

    */ + public static int SIZE_LIMIT = DEFAULT_SIZE_LIMIT; + + /** File manager to use to create {@link UploadFile} instances. + * It is required by this new object to execute open, move and delete operations whenever it could be asked. */ + protected final UWSFileManager fileManager; + + /** + * Build the request parser. + * + * @param fileManager A file manager. MUST NOT be NULL + */ + public NoEncodingParser(final UWSFileManager fileManager){ + if (fileManager == null) + throw new NullPointerException("Missing file manager => can not create a SingleDataParser!"); + this.fileManager = fileManager; + } + + @Override + public Map parse(final HttpServletRequest request) throws UWSException{ + // Check the request size: + if (request.getContentLength() <= 0) + return new HashMap(); + else if (request.getContentLength() > (SIZE_LIMIT < 0 ? DEFAULT_SIZE_LIMIT : SIZE_LIMIT)) + throw new UWSException("JDL too big (>" + SIZE_LIMIT + " bytes) => Request rejected! You should see with the service administrator to extend this limit."); + + // Build the parameter name: + String paramName; + if (request.getMethod() != null && request.getMethod().equalsIgnoreCase("put")){ + paramName = request.getRequestURI(); + if (paramName.lastIndexOf('/') + 1 > 0) + paramName = paramName.substring(paramName.lastIndexOf('/') + 1); + }else + paramName = "JDL"; + + // Build the file by copy of the whole request body: + Object reqID = request.getAttribute(UWS.REQ_ATTRIBUTE_ID); + if (reqID == null || !(reqID instanceof String)) + reqID = (new Date()).getTime(); + File f = new File(UWSFileManager.TMP_UPLOAD_DIR, "REQUESTBODY_" + reqID); + OutputStream output = null; + InputStream input = null; + long totalLength = 0; + try{ + output = new BufferedOutputStream(new FileOutputStream(f)); + input = new BufferedInputStream(request.getInputStream()); + + byte[] buffer = new byte[2049]; + int len = input.read(buffer); + if (len <= 0){ + output.close(); + f.delete(); + HashMap params = new HashMap(1); + params.put(paramName, ""); + return params; + }else if (len <= 2048 && request.getMethod() != null && request.getMethod().equalsIgnoreCase("put") && request.getContentType() != null && request.getContentType().toLowerCase().startsWith("text/plain")){ + output.close(); + f.delete(); + HashMap params = new HashMap(1); + params.put(paramName, new String(buffer, 0, len)); + return params; + }else{ + do{ + output.write(buffer, 0, len); + totalLength += len; + }while((len = input.read(buffer)) > 0); + output.flush(); + } + }catch(IOException ioe){ + throw new UWSException(UWSException.INTERNAL_SERVER_ERROR, ioe, "Internal error => Impossible to get the JDL from the HTTP request!"); + }finally{ + if (input != null){ + try{ + input.close(); + }catch(IOException ioe2){} + } + if (output != null){ + try{ + output.close(); + }catch(IOException ioe2){} + } + } + + // Build its description: + UploadFile lob = new UploadFile(paramName, f.toURI().toString(), fileManager); + lob.mimeType = request.getContentType(); + lob.length = totalLength; + + // Create the parameters map: + HashMap parameters = new HashMap(); + parameters.put(paramName, lob); + + return parameters; + } + +} diff --git a/src/uws/service/request/RequestParser.java b/src/uws/service/request/RequestParser.java new file mode 100644 index 0000000000000000000000000000000000000000..fc1b23a3c61c1962b7beddaba929039aa14e9cd8 --- /dev/null +++ b/src/uws/service/request/RequestParser.java @@ -0,0 +1,93 @@ +package uws.service.request; + +/* + * This file is part of UWSLibrary. + * + * UWSLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * UWSLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with UWSLibrary. If not, see . + * + * Copyright 2014 - Astronomisches Rechen Institut (ARI) + */ + +import java.util.Map; + +import javax.servlet.http.HttpServletRequest; + +import uws.UWSException; +import uws.job.parameters.InputParamController; +import uws.job.parameters.UWSParameters; + +/** + *

    This parser lets extract parameters from an {@link HttpServletRequest}. + * + *

    + * These parameters can be indeed provided in several ways. Among these ways, + * application/x-www-form-urlencoded and multipart/form-data are the most famous. + * Both are already fully supported by the UWS library by default in {@link UWSParameters}. + *

    + * + *

    IMPORTANT: + * A {@link RequestParser} extension MUST NOT be used to check the parameters' value. + * It only aims to parse an {@link HttpServletRequest} in order to extract parameters. + *

    + * + * @author Grégory Mantelet (ARI) + * @version 4.1 (11/2014) + * @since 4.1 + * + * @see UWSParameters + */ +public interface RequestParser { + + /** + *

    Extract parameters from the given HTTP request.

    + * + *

    + * These parameters can be fetched from {@link HttpServletRequest#getParameterMap()} + * or directly from the full request content. In this last case, a parsing is necessary ; + * hence this function. + *

    + * + *

    + * In case a parameter is provided several times with the same time and the same case, + * the request parser can choose to keep only the last occurrence or all occurrences. + * If all occurrences are kept, this function MUST return an array of {@link Object}s + * (in which types may be mixed), otherwise a map value MUST be an elementary object. + *

    + * + *

    Note: + * A parameter item can be a simple value (e.g. String, integer, ...) + * or a more complex object (e.g. File, InputStream, ...). + *

    + * + *

    IMPORTANT: + * This function MUST NOT be used to check the parameters' value. + * It only aims to parse the given request in order to extract its embedded parameters. + *
    + * Consequently, if this function throws an exception, it could be only because the request + * can not be read, and not because a parameter format or value is incorrect. + *
    + * Parameter checks should be done in {@link UWSParameters} and more particularly by + * an {@link InputParamController}. + *

    + * + * @param request An HTTP request. + * + * @return A map listing all extracted parameters. Values are either an elementary object (whatever is the type), + * or an array of {@link Object}s (in which types can be mixed). + * + * @throws UWSException If any error provides this function to read the parameters. + */ + public Map parse(final HttpServletRequest request) throws UWSException; + +} diff --git a/src/uws/service/request/UWSRequestParser.java b/src/uws/service/request/UWSRequestParser.java new file mode 100644 index 0000000000000000000000000000000000000000..89ea7d9eb3d41d159d9e2d9dae1a9337077cf4b8 --- /dev/null +++ b/src/uws/service/request/UWSRequestParser.java @@ -0,0 +1,141 @@ +package uws.service.request; + +/* + * This file is part of UWSLibrary. + * + * UWSLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * UWSLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with UWSLibrary. If not, see . + * + * Copyright 2014 - Astronomisches Rechen Institut (ARI) + */ + +import java.util.HashMap; +import java.util.Map; + +import javax.servlet.http.HttpServletRequest; + +import uws.UWSException; +import uws.UWSToolBox; +import uws.service.file.UWSFileManager; + +/** + *

    This parser adapts the request parser to use in function of the request content-type:

    + *
      + *
    • application/x-www-form-urlencoded: {@link FormEncodedParser}
    • + *
    • multipart/form-data: {@link MultipartParser}
    • + *
    • other: {@link NoEncodingParser} (the whole request body will be stored as one single parameter)
    • + *
    + * + *

    + * The request body size is limited for the multipart AND the no-encoding parsers. If you want to change this limit, + * you MUST do it for each of these parsers, setting the following static attributes: resp. {@link MultipartParser#SIZE_LIMIT} + * and {@link NoEncodingParser#SIZE_LIMIT}. + *

    + * + *

    Note: + * If you want to change the support other request parsing, you will have to write your own {@link RequestParser} implementation. + *

    + * + * @author Grégory Mantelet (ARI) + * @version 4.1 (12/2014) + * @since 4.1 + */ +public final class UWSRequestParser implements RequestParser { + + /** File manager to use to create {@link UploadFile} instances. + * It is required by this new object to execute open, move and delete operations whenever it could be asked. */ + private final UWSFileManager fileManager; + + /** {@link RequestParser} to use when a application/x-www-form-urlencoded request must be parsed. This attribute is set by {@link #parse(HttpServletRequest)} + * only when needed, by calling the function {@link #getFormParser()}. */ + private RequestParser formParser = null; + + /** {@link RequestParser} to use when a multipart/form-data request must be parsed. This attribute is set by {@link #parse(HttpServletRequest)} + * only when needed, by calling the function {@link #getMultipartParser()}. */ + private RequestParser multipartParser = null; + + /** {@link RequestParser} to use when none of the other parsers can be used ; it will then transform the whole request body in a parameter called "JDL" + * (Job Description Language). This attribute is set by {@link #parse(HttpServletRequest)} only when needed, by calling the function + * {@link #getNoEncodingParser()}. */ + private RequestParser noEncodingParser = null; + + /** + * Build a {@link RequestParser} able to choose the most appropriate {@link RequestParser} in function of the request content-type. + * + * @param fileManager The file manager to use in order to store any eventual upload. MUST NOT be NULL + */ + public UWSRequestParser(final UWSFileManager fileManager){ + if (fileManager == null) + throw new NullPointerException("Missing file manager => can not create a UWSRequestParser!"); + this.fileManager = fileManager; + } + + @Override + public Map parse(final HttpServletRequest req) throws UWSException{ + if (req == null) + return new HashMap(); + + // Get the method: + String method = (req.getMethod() == null) ? "" : req.getMethod().toLowerCase(); + + if (method.equals("post") || method.equals("put")){ + Map params = null; + + // Get the parameters: + if (FormEncodedParser.isFormEncodedRequest(req)) + params = getFormParser().parse(req); + else if (MultipartParser.isMultipartContent(req)) + params = getMultipartParser().parse(req); + else + params = getNoEncodingParser().parse(req); + + // Only for POST requests, the parameters specified in the URL must be added: + if (method.equals("post")) + params = UWSToolBox.addGETParameters(req, (params == null) ? new HashMap() : params); + + return params; + }else + return UWSToolBox.addGETParameters(req, new HashMap()); + } + + /** + * Get the {@link RequestParser} to use for application/x-www-form-urlencoded HTTP requests. + * This parser may be created if not already done. + * + * @return The {@link RequestParser} to use for application/x-www-form-urlencoded requests. Never NULL + */ + private synchronized final RequestParser getFormParser(){ + return (formParser == null) ? (formParser = new FormEncodedParser()) : formParser; + } + + /** + * Get the {@link RequestParser} to use for multipart/form-data HTTP requests. + * This parser may be created if not already done. + * + * @return The {@link RequestParser} to use for multipart/form-data requests. Never NULL + */ + private synchronized final RequestParser getMultipartParser(){ + return (multipartParser == null) ? (multipartParser = new MultipartParser(fileManager)) : multipartParser; + } + + /** + * Get the {@link RequestParser} to use for HTTP requests whose the content type is neither application/x-www-form-urlencoded nor multipart/form-data. + * This parser may be created if not already done. + * + * @return The {@link RequestParser} to use for requests whose the content-type is not supported. Never NULL + */ + private synchronized final RequestParser getNoEncodingParser(){ + return (noEncodingParser == null) ? (noEncodingParser = new NoEncodingParser(fileManager)) : noEncodingParser; + } + +} diff --git a/src/uws/service/request/UploadFile.java b/src/uws/service/request/UploadFile.java new file mode 100644 index 0000000000000000000000000000000000000000..0ea800e4d904bf0cafa69b2e1b9b564314f76889 --- /dev/null +++ b/src/uws/service/request/UploadFile.java @@ -0,0 +1,207 @@ +package uws.service.request; + +/* + * This file is part of UWSLibrary. + * + * UWSLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * UWSLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with UWSLibrary. If not, see . + * + * Copyright 2014 - Astronomisches Rechen Institut (ARI) + */ + +import java.io.IOException; +import java.io.InputStream; + +import uws.job.UWSJob; +import uws.job.parameters.UWSParameters; +import uws.service.file.UWSFileManager; + +/** + *

    This class lets represent a file submitted inline in an HTTP request.

    + * + *

    + * To read this special kind of parameter, an {@link InputStream} must be open. This class lets do it + * by its function {@link #open()}. + *

    + * + *

    + * When not used any more this file should be deleted, in order to save server disk space. + * This can be easily done thanks to {@link #deleteFile()}. This function actually just call the corresponding function + * of the file manager, which is the only one to known how to deal with this file on the server. Indeed, even if most + * of the time this file is stored on the local file system, it could also be stored on a distant server by a VOSpace. + * In this case, the way to proceed is different, hence the use of the file manager. + *

    + * + * @author Grégory Mantelet (ARI) + * @version 4.1 (11/2014) + * + * @see UWSParameters + * @see MultipartParser + */ +public class UploadFile { + /** Name of the parameter in which the file was submitted. */ + public final String paramName; + + /** File name. It is the name provided in the HTTP request. */ + public final String fileName; + + /** Location at which the content of this upload has been stored. + * It can be a local file path, but also any other path or ID allowing + * the {@link UWSFileManager} to access its content. */ + protected String location; + + /** Jobs that owns this uploaded file. */ + protected UWSJob owner = null; + + /** Indicate whether this file has been or is used by a UWSJob. + * In other words, it is true when an open, move or delete operation has been performed. + * An unused {@link UploadFile} instance shall be physically deleted from the file system. */ + protected boolean used = false; + + /** MIME type of the file. */ + public String mimeType = null; + + /** Length in bytes of the file. + * If negative, the length should be considered as unknown. */ + public long length = -1; + + /** File manager to use in order to open, move or delete this uploaded file. */ + protected final UWSFileManager fileManager; + + /** + * Build the description of an uploaded file. + * + * @param paramName Name of the HTTP request parameter in which the uploaded content was stored. MUST NOT be NULL + * @param location Location of the file on the server. This String is then used by the given file manager in order to open, + * move or delete the uploaded file. Thus, it can be a path, an ID or any other String meaningful to the file manager. + * @param fileManager File manager to use in order to open, move or delete this uploaded file from the server. + */ + public UploadFile(final String paramName, final String location, final UWSFileManager fileManager){ + this(paramName, null, location, fileManager); + } + + /** + * Build the description of an uploaded file. + * + * @param paramName Name of the HTTP request parameter in which the uploaded content was stored. MUST NOT be NULL + * @param fileName Filename as provided by the HTTP request. MAY be NULL + * @param location Location of the file on the server. This String is then used by the given file manager in order to open, + * move or delete the uploaded file. Thus, it can be a path, an ID or any other String meaningful to the file manager. + * @param fileManager File manager to use in order to open, move or delete this uploaded file from the server. + */ + public UploadFile(final String paramName, final String fileName, final String location, final UWSFileManager fileManager){ + if (paramName == null) + throw new NullPointerException("Missing name of the parameter in which the uploaded file content was => can not create UploadFile!"); + else if (location == null) + throw new NullPointerException("Missing server location of the uploaded file => can not create UploadFile!"); + else if (fileManager == null) + throw new NullPointerException("Missing file manager => can not create the UploadFile!"); + + this.paramName = paramName; + this.fileName = (fileName == null) ? "" : fileName; + this.location = location; + this.fileManager = fileManager; + } + + /** + *

    Get the location (e.g. URI, file path) of this file on the server.

    + * + *

    Important note: + * This function SHOULD be used only by the {@link UWSFileManager} when open, move and delete operations are executed. + * The {@link RequestParser} provided by the library set this location to the file URI (i.e. "file://{local-file-path}") + * since the default behavior is to store uploaded file on the system temporary directory. + *

    + * + * @return Location (e.g. URI) or ID or any other meaningful String used by the file manager to access to the uploaded file. + */ + public String getLocation(){ + return location; + } + + /** + * Get the job that uses this uploaded file. + * + * @return The owner of this file. + */ + public UWSJob getOwner(){ + return owner; + } + + /** + *

    Tell whether this uploaded file has been or will be used. + * That's to say, whether an open, delete or move operation has been executed (even if it failed) on this {@link UploadFile} instance.

    + * + * @return true if the file must be preserved, false otherwise. + */ + public final boolean isUsed(){ + return used; + } + + /** + * Open a stream toward this uploaded file. + * + * @return Stream toward this upload content. + * + * @throws IOException If an error occurs while opening the stream. + * + * @see UWSFileManager#getUploadInput(UploadFile) + */ + public InputStream open() throws IOException{ + used = true; + return fileManager.getUploadInput(this); + } + + /** + * Delete definitely this uploaded file from the server. + * + * @throws IOException If the delete operation can not be performed. + * + * @see UWSFileManager#deleteUpload(UploadFile) + */ + public void deleteFile() throws IOException{ + fileManager.deleteUpload(this); + used = true; + } + + /** + *

    Move this uploaded file in a location related to the given {@link UWSJob}. + * It is particularly useful if at reception of an HTTP request uploaded files are stored in a temporary + * directory (e.g. /tmp on Unix/Linux systems).

    + * + *

    + * This function calls {@link UWSFileManager#moveUpload(UploadFile, UWSJob)} to process to the physical + * moving of the file, but it then, it updates its location in this {@link UploadFile} instance. + * The file manager does NOT update this location! That's why it must not be called directly, but + * through {@link #move(UWSJob)}. + *

    + * + * @param destination The job by which this uploaded file will be exclusively used. + * + * @throws IOException If the move operation can not be performed. + * + * @see UWSFileManager#moveUpload(UploadFile, UWSJob) + */ + public void move(final UWSJob destination) throws IOException{ + if (destination == null) + throw new NullPointerException("Missing move destination (i.e. the job in which the uploaded file must be stored)!"); + + location = fileManager.moveUpload(this, destination); + used = true; + owner = destination; + } + + @Override + public String toString(){ + return (owner != null && owner.getJobList() != null && owner.getUrl() != null) ? owner.getUrl().jobParameter(owner.getJobList().getName(), owner.getJobId(), paramName).toString() : fileName; + } +} diff --git a/test/adql/SearchColumnListTest.java b/test/adql/SearchColumnListTest.java deleted file mode 100644 index 776a3435b09af5b21a2a71623e5594e28db1fbe6..0000000000000000000000000000000000000000 --- a/test/adql/SearchColumnListTest.java +++ /dev/null @@ -1,240 +0,0 @@ -package adql; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; - -import adql.db.DBColumn; -import adql.db.DBCommonColumn; -import adql.db.DBTable; -import adql.db.SearchColumnList; -import adql.db.exception.UnresolvedJoin; -import adql.parser.ParseException; -import adql.query.IdentifierField; -import adql.query.operand.ADQLColumn; -import tap.metadata.TAPColumn; -import tap.metadata.TAPSchema; -import tap.metadata.TAPTable; - -public class SearchColumnListTest { - - public static void main(String[] args) throws ParseException{ - - /* SET THE TABLES AND COLUMNS NEEDED FOR THE TEST */ - // Describe the available table: - TAPTable tableA = new TAPTable("A", "TABLE", "NATURAL JOIN Test table", null); - TAPTable tableB = new TAPTable("B", "TABLE", "NATURAL JOIN Test table", null); - TAPTable tableC = new TAPTable("C", "TABLE", "NATURAL JOIN Test table", null); - TAPTable tableD = new TAPTable("D", "TABLE", "NATURAL JOIN Test table", null); - - // Describe its columns: - tableA.addColumn(new TAPColumn("id", "Object ID")); - tableA.addColumn(new TAPColumn("txta", "Text of table A")); - tableB.addColumn(new TAPColumn("id", "Object ID")); - tableB.addColumn(new TAPColumn("txtb", "Text of table B")); - tableC.addColumn(new TAPColumn("Id", "Object ID")); - tableC.addColumn(new TAPColumn("txta", "Text of table A")); - tableC.addColumn(new TAPColumn("txtc", "Text of table C")); - tableD.addColumn(new TAPColumn("id", "Object ID")); - tableD.addColumn(new TAPColumn("txta", "Text of table A")); - tableD.addColumn(new TAPColumn("txtd", "Text of table D")); - - // List all available tables: - TAPSchema schema = new TAPSchema("public"); - schema.addTable(tableA); - schema.addTable(tableB); - schema.addTable(tableC); - schema.addTable(tableD); - - // Build the corresponding SearchColumnList: - SearchColumnList listA = new SearchColumnList(); - for(DBColumn col : tableA) - listA.add(col); - SearchColumnList listB = new SearchColumnList(); - for(DBColumn col : tableB) - listB.add(col); - SearchColumnList listC = new SearchColumnList(); - for(DBColumn col : tableC) - listC.add(col); - SearchColumnList listD = new SearchColumnList(); - for(DBColumn col : tableD) - listD.add(col); - - /* TEST OF NATURAL JOIN */ - System.out.println("### CROSS JOIN ###"); - SearchColumnList crossJoin = join(listA, listB, false, null); - - // DEBUG - for(DBColumn dbCol : crossJoin){ - if (dbCol instanceof DBCommonColumn){ - System.out.print("\t- " + dbCol.getADQLName() + " in " + ((dbCol.getTable() == null) ? "" : dbCol.getTable().getADQLName()) + " (= " + dbCol.getDBName() + " in "); - Iterator it = ((DBCommonColumn)dbCol).getCoveredTables(); - DBTable table; - while(it.hasNext()){ - table = it.next(); - System.out.print((table == null) ? "" : table.getDBName() + ", "); - } - System.out.println(")"); - }else - System.out.println("\t- " + dbCol.getADQLName() + " in " + ((dbCol.getTable() == null) ? "" : dbCol.getTable().getADQLName()) + " (= " + dbCol.getDBName() + " in " + ((dbCol.getTable() == null) ? "" : dbCol.getTable().getDBName()) + ")"); - } - System.out.println(); - - /* TEST OF NATURAL JOIN */ - System.out.println("### NATURAL JOIN ###"); - SearchColumnList join1 = join(listA, listB, true, null); - SearchColumnList join2 = join(listC, listD, true, null); - //SearchColumnList join3 = join(join1, join2, true, null); - - // DEBUG - for(DBColumn dbCol : join2){ - if (dbCol instanceof DBCommonColumn){ - System.out.print("\t- " + dbCol.getADQLName() + " in " + ((dbCol.getTable() == null) ? "" : dbCol.getTable().getADQLName()) + " (= " + dbCol.getDBName() + " in "); - Iterator it = ((DBCommonColumn)dbCol).getCoveredTables(); - DBTable table; - while(it.hasNext()){ - table = it.next(); - System.out.print((table == null) ? "" : table.getDBName() + ", "); - } - System.out.println(")"); - }else - System.out.println("\t- " + dbCol.getADQLName() + " in " + ((dbCol.getTable() == null) ? "" : dbCol.getTable().getADQLName()) + " (= " + dbCol.getDBName() + " in " + ((dbCol.getTable() == null) ? "" : dbCol.getTable().getDBName()) + ")"); - } - System.out.println(); - - /* TEST OF JOIN USING 1 */ - System.out.println("\n### USING JOIN 1 ###"); - ArrayList usingList = new ArrayList(); - usingList.add(new ADQLColumn("id")); - SearchColumnList joinUsing1 = join(join1, join2, false, usingList); - - // DEBUG - for(DBColumn dbCol : joinUsing1){ - if (dbCol instanceof DBCommonColumn){ - System.out.print("\t- " + dbCol.getADQLName() + " in " + ((dbCol.getTable() == null) ? "" : dbCol.getTable().getADQLName()) + " (= " + dbCol.getDBName() + " in "); - Iterator it = ((DBCommonColumn)dbCol).getCoveredTables(); - DBTable table; - while(it.hasNext()){ - table = it.next(); - System.out.print((table == null) ? "" : table.getDBName() + ", "); - } - System.out.println(")"); - }else - System.out.println("\t- " + dbCol.getADQLName() + " in " + ((dbCol.getTable() == null) ? "" : dbCol.getTable().getADQLName()) + " (= " + dbCol.getDBName() + " in " + ((dbCol.getTable() == null) ? "" : dbCol.getTable().getDBName()) + ")"); - } - System.out.println(); - - /* TEST OF JOIN USING 1 * - System.out.println("\n### USING JOIN 2 ###"); - usingList.clear(); - usingList.add(new TAPColumn("id")); - SearchColumnList joinUsing2 = joinUsing(listA, join3, usingList); - - // DEBUG - for(DBColumn dbCol : joinUsing2){ - System.out.println("\t- "+dbCol.getADQLName()+" in "+((dbCol.getTable()==null)?"":dbCol.getTable().getADQLName())+" (= "+dbCol.getDBName()+" in "+((dbCol.getTable()==null)?"":dbCol.getTable().getDBName())+")"); - } - System.out.println();*/ - - } - - public static final SearchColumnList join(final SearchColumnList leftList, final SearchColumnList rightList, final boolean natural, final ArrayList usingList) throws UnresolvedJoin{ - - SearchColumnList list = new SearchColumnList(); - /*SearchColumnList leftList = leftTable.getDBColumns(); - SearchColumnList rightList = rightTable.getDBColumns();*/ - - /* 1. Figure out duplicated columns */ - HashMap mapDuplicated = new HashMap(); - // CASE: NATURAL - if (natural){ - // Find duplicated items between the two lists and add one common column in mapDuplicated for each - DBColumn rightCol; - for(DBColumn leftCol : leftList){ - // search for at most one column with the same name in the RIGHT list - // and throw an exception is there are several matches: - rightCol = findAtMostOneColumn(leftCol.getADQLName(), (byte)0, rightList, false); - // if there is one... - if (rightCol != null){ - // ...check there is only one column with this name in the LEFT list, - // and throw an exception if it is not the case: - findExactlyOneColumn(leftCol.getADQLName(), (byte)0, leftList, true); - // ...create a common column: - mapDuplicated.put(leftCol.getADQLName().toLowerCase(), new DBCommonColumn(leftCol, rightCol)); - } - } - - } - // CASE: USING - else if (usingList != null && !usingList.isEmpty()){ - // For each columns of usingList, check there is in each list exactly one matching column, and then, add it in mapDuplicated - DBColumn leftCol, rightCol; - for(ADQLColumn usingCol : usingList){ - // search for exactly one column with the same name in the LEFT list - // and throw an exception if there is none, or if there are several matches: - leftCol = findExactlyOneColumn(usingCol.getColumnName(), usingCol.getCaseSensitive(), leftList, true); - // idem in the RIGHT list: - rightCol = findExactlyOneColumn(usingCol.getColumnName(), usingCol.getCaseSensitive(), rightList, false); - // create a common column: - mapDuplicated.put((usingCol.isCaseSensitive(IdentifierField.COLUMN) ? ("\"" + usingCol.getColumnName() + "\"") : usingCol.getColumnName().toLowerCase()), new DBCommonColumn(leftCol, rightCol)); - } - - } - // CASE: NO DUPLICATION TO FIGURE OUT - else{ - // Return the union of both lists: - list.addAll(leftList); - list.addAll(rightList); - return list; - } - - /* 2. Add all columns of the left list except the ones identified as duplications */ - addAllExcept(leftList, list, mapDuplicated); - - /* 3. Add all columns of the right list except the ones identified as duplications */ - addAllExcept(rightList, list, mapDuplicated); - - /* 4. Add all common columns of mapDuplicated */ - list.addAll(mapDuplicated.values()); - - return list; - - } - - public final static void addAllExcept(final SearchColumnList itemsToAdd, final SearchColumnList target, final Map exception){ - for(DBColumn col : itemsToAdd){ - if (!exception.containsKey(col.getADQLName().toLowerCase()) && !exception.containsKey("\"" + col.getADQLName() + "\"")) - target.add(col); - } - } - - public final static DBColumn findExactlyOneColumn(final String columnName, final byte caseSensitive, final SearchColumnList list, final boolean leftList) throws UnresolvedJoin{ - DBColumn result = findAtMostOneColumn(columnName, caseSensitive, list, leftList); - if (result == null) - throw new UnresolvedJoin("Column \"" + columnName + "\" specified in USING clause does not exist in " + (leftList ? "left" : "right") + " table!"); - else - return result; - } - - public final static DBColumn findAtMostOneColumn(final String columnName, final byte caseSensitive, final SearchColumnList list, final boolean leftList) throws UnresolvedJoin{ - ArrayList result = list.search(null, null, null, columnName, caseSensitive); - if (result.isEmpty()) - return null; - else if (result.size() > 1) - throw new UnresolvedJoin("Common column name \"" + columnName + "\" appears more than once in " + (leftList ? "left" : "right") + " table!"); - else - return result.get(0); - } - - /** - * Tells whether the given column is a common column (that's to say, a unification of several columns of the same name). - * - * @param col A DBColumn. - * @return true if the given column is a common column, false otherwise (particularly if col = null). - */ - public static final boolean isCommonColumn(final DBColumn col){ - return (col != null && col instanceof DBCommonColumn); - } - -} diff --git a/test/adql/SearchIterator.java b/test/adql/SearchIterator.java deleted file mode 100644 index c36588209dedb161c47d46a7e3deaefdb731798d..0000000000000000000000000000000000000000 --- a/test/adql/SearchIterator.java +++ /dev/null @@ -1,76 +0,0 @@ -package adql; - -/* - * This file is part of ADQLLibrary. - * - * ADQLLibrary is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * ADQLLibrary is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with ADQLLibrary. If not, see . - * - * Copyright 2011 - UDS/Centre de Données astronomiques de Strasbourg (CDS) - */ - -import java.util.Iterator; -import java.util.NoSuchElementException; -import java.util.Vector; - -/** - * Lets iterate on each "real" result ({@link SearchResult} objects whose the {@link SearchResult#isResult() isResult()} function returns true). - * - * @author Grégory Mantelet (CDS) - * @version 11/2010 - * - * @see SearchResult - */ -public class SearchIterator implements Iterator { - - /** List of the next SearchResult objects which has at least one result (themselves or included SearchResult). */ - protected Vector toExplore; - - public SearchIterator(SearchResult r){ - toExplore = new Vector(); - if (r != null && r.hasResult()) - toExplore.add(r); - } - - public boolean hasNext(){ - return !toExplore.isEmpty(); - } - - public SearchResult next() throws NoSuchElementException{ - SearchResult next = null; - - while(next == null && !toExplore.isEmpty()){ - SearchResult r = toExplore.remove(0); - if (!r.isLeaf()){ - Iterator children = r.getChildren(); - while(children.hasNext()){ - SearchResult child = children.next(); - if (child != null && child.hasResult()) - toExplore.add(child); - } - } - if (r.isResult()) - next = r; - } - - if (next == null) - throw new NoSuchElementException("No more search result !"); - - return next; - } - - public void remove() throws UnsupportedOperationException{ - throw new UnsupportedOperationException("The REMOVE operation is not possible in a search result !"); - } - -} diff --git a/test/adql/SearchResult.java b/test/adql/SearchResult.java deleted file mode 100644 index 3a373d8868c40ee83ab0baa3f75e1f40da44f6a6..0000000000000000000000000000000000000000 --- a/test/adql/SearchResult.java +++ /dev/null @@ -1,251 +0,0 @@ -package adql; - -/* - * This file is part of ADQLLibrary. - * - * ADQLLibrary is free software: you can redistribute it and/or modify - * it under the terms of the GNU Lesser General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * ADQLLibrary is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public License - * along with ADQLLibrary. If not, see . - * - * Copyright 2011 - UDS/Centre de Données astronomiques de Strasbourg (CDS) - */ - -import java.util.Iterator; -import java.util.Vector; - -import adql.query.ADQLObject; - -/** - *

    Results of a research in an ADQL query.

    - * - *

    This class is built as a tree. A node (leaf or not) corresponds to an item of a part of an ADQL query or merely of a whole ADQL query. - * It represents a step of the research. That means a node can represents a matched ADQL item and/or a list of other SearchResults (which are the results of the same research into the corresponding ADQL object). - * Thus it is possible to know the parent (into the ADQL query) of a matched ADQL item.

    - * - *

    Here are some useful functions of this class: - *

      - *
    • {@link SearchResult#isResult() isResult()}: indicates whether the current node corresponds to a matched ADQL item
    • - *
    • {@link SearchResult#getResult() getResult()}: returns the value of this node
    • - *
    • {@link SearchResult#getParent() getParent()}: returns the result (node) which encapsulates this result (node)
    • - *
    • {@link SearchResult#isLeaf() isLeaf()}: indicates whether this node encapsulates other results (nodes) or not
    • - *
    • {@link SearchResult#getChildren() getChildren()}: returns an iterator on all encapsulated results (nodes)
    • - *

    - * - *

    You have two different ways to navigate in a SearchResult object: - *

      - *
    1. As said previously a SearchResult is a hierarchical structure. So you can explore it as a tree with the functions {@link SearchResult#getResult() getResult()} (to get the node value), {@link SearchResult#getParent() getParent()} (to get the direct parent node), {@link SearchResult#getChildren() getChildren()} (to explore the children list) and {@link SearchResult#isLeaf() isLeaf()} (to determine if the current node is a leaf or not).
    2. - *
    3. However you can also iterate directly on each matched ADQL item (leaf or not) thanks to the {@link SearchResult#iterator() iterator()} function. All iterated object corresponds to a matched ADQL object (so {@link SearchResult#isResult() isResult()} always returns true for all iterated results).
    4. - *

    - * - *

    Important: Be aware that any SearchResult (leaf or not) may contain a matched ADQL object: to know that, use the function {@link SearchResult#isResult() isResult()}.

    - * - * @author Grégory Mantelet (CDS) - * @version 11/2010 - * - * @see SearchIterator - */ -public final class SearchResult implements Iterable { - - /** Parent node. */ - private SearchResult parent; - - /** Child nodes. */ - private final Vector children; - - /** Total number of results from this node (included). */ - private int nbResults = 0; - - /** The node value (may be the matched ADQL object). */ - private final ADQLObject value; - - /** Indicates whether this node corresponds to a matched ADQL object or not. */ - private final boolean result; - - /** If it is impossible to replace an ADQL object by another one, a SearchResult must be created (with result = true) and this field must contain an error description. */ - private String error = null; - - /** - *

    Builds a SearchResult (node) with its value (node value).

    - *

    Note: By using this constructor the created SearchResult will not correspond to a matched ADQL object.

    - * - * @param nodeValue Value (ADQL object) associated with this node. - */ - public SearchResult(ADQLObject nodeValue){ - this(nodeValue, false); - } - - /** - * Builds a SearchResult (node) with its value (node value) and an indication on its interpretation (~ "matched ADQL object ?"). - * - * @param nodeValue Value (ADQL object) associated with this node. - * @param isResult Indicates whether the given ADQL object is a match or not. - */ - public SearchResult(ADQLObject nodeValue, boolean isResult){ - this.parent = null; - children = new Vector(); - - value = nodeValue; - result = (nodeValue != null) && isResult; - if (result) - nbResults = 1; - } - - /** - * Gets the ADQL object associated with this node. - * It may be a matched ADQL item (it depends of what returns the {@link SearchResult#isResult() isResult()} function). - * - * @return The node value. - */ - public final ADQLObject getResult(){ - return value; - } - - /** - * Indicates whether the ADQL object (returned by {@link SearchResult#getResult() getResult()}) is a match or not. - * - * @return true if this SearchResult corresponds to a matched ADQL item, false otherwise. - */ - public final boolean isResult(){ - return result; - } - - /** - * Gets the error that occurs when replacing the matched item. - * - * @return Replacing error. - */ - public final String getError(){ - return error; - } - - /** - * Indicates whether there was an error during the replacement of the matched item. - * - * @return true if there was an error during the replacement, false else. - */ - public final boolean hasError(){ - return error != null; - } - - /** - * Sets the explanation of why the matched item has not been replaced. - * - * @param msg Error description. - */ - public final void setError(String msg){ - if (msg != null){ - msg = msg.trim(); - error = (msg.length() == 0) ? null : msg; - }else - error = null; - } - - /** - * Gets the parent node. - * - * @return Its parent node. - */ - public final SearchResult getParent(){ - return parent; - } - - /** - * Changes the parent node. - * - * @param newParent Its new parent node. - */ - private final void setParent(SearchResult newParent){ - parent = newParent; - } - - /** - * Gets an iterator on the children list of this SearchResult. - * - * @return An iterator on its children. - */ - public final Iterator getChildren(){ - return children.iterator(); - } - - /** - * Indicates whether this node is a leaf (that is to say if it has children). - * - * @return true if this node is a leaf, false otherwise. - */ - public final boolean isLeaf(){ - return children.isEmpty(); - } - - /** - * Lets adding a child to this node. - * - * @param result The SearchResult to add. - */ - public final void add(SearchResult result){ - if (result != null){ - // Add the given result: - children.add(result); - - // Set its parent: - result.setParent(this); - - // Update the total number of results from this node: - updateNbResults(); - } - } - - /** - * Counts exactly the total number of results from this node (included). - * Once the counting phase finished the direct parent node is notify that it must update its own number of results. - */ - private final void updateNbResults(){ - synchronized(this){ - // Count all results from this node: - nbResults = isResult() ? 1 : 0; - for(SearchResult r : children) - nbResults += r.getNbResults(); - } - - // Notify the direct parent node: - if (parent != null) - parent.updateNbResults(); - } - - /** - *

    Indicates whether this node is and/or contains some results (SearchResult objects whose the function isResult() returns true).

    - * - * @return true if this SearchResult is a result or if one of its children is a result, false otherwise. - */ - public final boolean hasResult(){ - return nbResults > 0; - } - - /** - *

    Tells exactly the number of SearchResult which are really results.

    - * - * @return The number of matched ADQL item. - */ - public final int getNbResults(){ - return nbResults; - } - - /** - * Lets iterating on all contained SearchResult objects (itself included) which are really a result (whose the function isResult() returns true). - * - * @see java.lang.Iterable#iterator() - * @see SearchIterator - */ - public final Iterator iterator(){ - return new SearchIterator(this); - } - -} diff --git a/test/adql/TestADQLQuery.java b/test/adql/TestADQLQuery.java index 7fac283d388e9b9db1e92921d3667455e912389a..54a704ecdef9a2759b37aaee65df4207f34bb3b2 100644 --- a/test/adql/TestADQLQuery.java +++ b/test/adql/TestADQLQuery.java @@ -1,6 +1,13 @@ package adql; +import static org.junit.Assert.assertEquals; + +import java.util.ArrayList; import java.util.Iterator; +import java.util.List; + +import org.junit.Before; +import org.junit.Test; import adql.query.ADQLObject; import adql.query.ADQLOrder; @@ -9,13 +16,10 @@ import adql.query.ClauseADQL; import adql.query.ClauseConstraints; import adql.query.ClauseSelect; import adql.query.SelectItem; - import adql.query.constraint.Comparison; import adql.query.constraint.ComparisonOperator; import adql.query.constraint.ConstraintsGroup; - import adql.query.from.ADQLTable; - import adql.query.operand.ADQLColumn; import adql.query.operand.Concatenation; import adql.query.operand.NumericConstant; @@ -23,24 +27,45 @@ import adql.query.operand.Operation; import adql.query.operand.OperationType; import adql.query.operand.StringConstant; import adql.query.operand.WrappedOperand; - +import adql.search.IReplaceHandler; import adql.search.ISearchHandler; import adql.search.SearchColumnHandler; +import adql.search.SimpleReplaceHandler; public class TestADQLQuery { - public static final void main(String[] args) throws Exception{ - ADQLQuery query = new ADQLQuery(); + private ADQLQuery query = null; + private List columns = new ArrayList(8); + private List typeObjColumns = new ArrayList(3); + + @Before + public void setUp(){ + query = new ADQLQuery(); + columns.clear(); + typeObjColumns.clear(); + + columns.add(new ADQLColumn("O", "nameObj")); // 0 = O.nameObj + columns.add(new ADQLColumn("O", "typeObj")); // 1 = O.typeObj + columns.add(new ADQLColumn("O", "ra")); // 2 = O.ra + columns.add(new ADQLColumn("O", "dec")); // 3 = O.dec + columns.add(new ADQLColumn("ra")); // 4 = ra + columns.add(new ADQLColumn("dec")); // 5 = dec + columns.add(new ADQLColumn("typeObj")); // 6 = typeObj + columns.add(new ADQLColumn("typeObj")); // 7 = typeObj + + typeObjColumns.add(columns.get(1)); + typeObjColumns.add(columns.get(6)); + typeObjColumns.add(columns.get(7)); // SELECT: ClauseSelect select = query.getSelect(); Concatenation concatObj = new Concatenation(); - concatObj.add(new ADQLColumn("O", "nameObj")); + concatObj.add(columns.get(0)); // O.nameObj concatObj.add(new StringConstant(" (")); - concatObj.add(new ADQLColumn("O", "typeObj")); + concatObj.add(columns.get(1)); // O.typeObj concatObj.add(new StringConstant(")")); select.add(new SelectItem(new WrappedOperand(concatObj), "Nom objet")); - select.add(new ADQLColumn("O", "ra")); - select.add(new ADQLColumn("O", "dec")); + select.add(columns.get(2)); // O.ra + select.add(columns.get(3)); // O.dec // FROM: ADQLTable table = new ADQLTable("truc.ObsCore"); @@ -50,40 +75,53 @@ public class TestADQLQuery { // WHERE: ClauseConstraints where = query.getWhere(); - where.add(new Comparison(new Operation(new ADQLColumn("ra"), OperationType.DIV, new ADQLColumn("dec")), ComparisonOperator.GREATER_THAN, new NumericConstant("1"))); + // ra/dec > 1 + where.add(new Comparison(new Operation(columns.get(4), OperationType.DIV, columns.get(5)), ComparisonOperator.GREATER_THAN, new NumericConstant("1"))); ConstraintsGroup constOr = new ConstraintsGroup(); - constOr.add(new Comparison(new ADQLColumn("typeObj"), ComparisonOperator.EQUAL, new StringConstant("Star"))); - constOr.add("OR", new Comparison(new ADQLColumn("typeObj"), ComparisonOperator.LIKE, new StringConstant("Galaxy*"))); + // AND (typeObj == 'Star' + constOr.add(new Comparison(columns.get(6), ComparisonOperator.EQUAL, new StringConstant("Star"))); + // OR typeObj LIKE 'Galaxy*') + constOr.add("OR", new Comparison(columns.get(7), ComparisonOperator.LIKE, new StringConstant("Galaxy*"))); where.add("AND", constOr); // ORDER BY: ClauseADQL orderBy = query.getOrderBy(); orderBy.add(new ADQLOrder(1, true)); + } - System.out.println("*** QUERY ***\n" + query.toADQL()); + @Test + public void testADQLQuery(){ + assertEquals("SELECT (O.nameObj || ' (' || O.typeObj || ')') AS Nom objet , O.ra , O.dec\nFROM truc.ObsCore AS O\nWHERE ra/dec > 1 AND (typeObj = 'Star' OR typeObj LIKE 'Galaxy*')\nORDER BY 1 DESC", query.toADQL()); + } + @Test + public void testSearch(){ ISearchHandler sHandler = new SearchColumnHandler(false); Iterator results = query.search(sHandler); - // IReplaceHandler sHandler = new SimpleReplaceHandler(false, false) { - // - // @Override - // protected boolean match(ADQLObject obj) { - // return (obj instanceof ADQLColumn) && (((ADQLColumn)obj).getColumnName().equalsIgnoreCase("typeObj")); - // } - // - // @Override - // public ADQLObject getReplacer(ADQLObject objToReplace) throws UnsupportedOperationException { - // return new ADQLColumn("NewTypeObj"); - // } - // - // }; - // sHandler.searchAndReplace(query); - // System.out.println("INFO: "+sHandler.getNbReplacement()+"/"+sHandler.getNbMatch()+" replaced objects !"); - // Iterator results = sHandler.iterator(); - System.out.println("\n*** SEARCH ALL COLUMNS ***"); - while(results.hasNext()) - System.out.println("\t- " + results.next().toADQL()); - - System.out.println("\n*** QUERY ***\n" + query.toADQL()); + assertEquals(columns.size(), sHandler.getNbMatch()); + for(ADQLColumn expectedCol : columns) + assertEquals(expectedCol, results.next()); + } + + @Test + public void testReplace(){ + IReplaceHandler sHandler = new SimpleReplaceHandler(false, false){ + @Override + protected boolean match(ADQLObject obj){ + return (obj instanceof ADQLColumn) && (((ADQLColumn)obj).getColumnName().equalsIgnoreCase("typeObj")); + } + + @Override + public ADQLObject getReplacer(ADQLObject objToReplace) throws UnsupportedOperationException{ + return new ADQLColumn("NewTypeObj"); + } + }; + sHandler.searchAndReplace(query); + assertEquals(typeObjColumns.size(), sHandler.getNbMatch()); + assertEquals(sHandler.getNbMatch(), sHandler.getNbReplacement()); + Iterator results = sHandler.iterator(); + for(ADQLColumn expectedCol : typeObjColumns) + assertEquals(expectedCol, results.next()); + assertEquals("SELECT (O.nameObj || ' (' || NewTypeObj || ')') AS Nom objet , O.ra , O.dec\nFROM truc.ObsCore AS O\nWHERE ra/dec > 1 AND (NewTypeObj = 'Star' OR NewTypeObj LIKE 'Galaxy*')\nORDER BY 1 DESC", query.toADQL()); } } diff --git a/test/adql/TestGetPositionInAllADQLObject.java b/test/adql/TestGetPositionInAllADQLObject.java index 19744e9b7aa31411ab1d7e5596a9755c6ebdd5a3..5fd4fc514745ddfaddedfe32b41f74e7d2f43ed4 100644 --- a/test/adql/TestGetPositionInAllADQLObject.java +++ b/test/adql/TestGetPositionInAllADQLObject.java @@ -11,13 +11,13 @@ public class TestGetPositionInAllADQLObject { public static void main(String[] args) throws Throwable{ ADQLParser parser = new ADQLParser(); - ADQLQuery query = parser.parseQuery("SELECT truc, bidule.machin FROM foo JOIN bidule USING(id) WHERE truc > 12.5 AND bidule.machin < 5"); + ADQLQuery query = parser.parseQuery("SELECT truc, bidule.machin FROM foo JOIN bidule USING(id) WHERE truc > 12.5 AND bidule.machin < 5 GROUP BY chose HAVING try > 0 ORDER BY chouetteAlors"); System.out.println("\nOBJECT WITH NO DEFINED POSITION:"); Iterator results = query.search(new SimpleSearchHandler(true){ @Override protected boolean match(ADQLObject obj){ - return obj.getPosition() == null; + return /*(obj instanceof ADQLList && ((ADQLList)obj).size() > 0) &&*/obj.getPosition() == null; } }); while(results.hasNext()) diff --git a/test/adql/TestIN.java b/test/adql/TestIN.java index 43ad6cac396f2260a8ff0af7e44803ce67aa2936..d3ad2f2241ff9544aa287aa333d21053623ce294 100644 --- a/test/adql/TestIN.java +++ b/test/adql/TestIN.java @@ -1,32 +1,52 @@ package adql; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + import java.util.Iterator; +import org.junit.BeforeClass; +import org.junit.Test; + import adql.query.ADQLList; import adql.query.ADQLObject; import adql.query.ADQLOrder; import adql.query.ADQLQuery; import adql.query.ClauseSelect; - import adql.query.constraint.In; - import adql.query.from.ADQLTable; - import adql.query.operand.ADQLColumn; import adql.query.operand.ADQLOperand; import adql.query.operand.StringConstant; - import adql.search.IReplaceHandler; import adql.search.SimpleReplaceHandler; - +import adql.translator.ADQLTranslator; import adql.translator.PostgreSQLTranslator; public class TestIN { - public static void main(String[] args) throws Exception{ - In myIn = new In(new ADQLColumn("typeObj"), new ADQLOperand[]{new StringConstant("galaxy"),new StringConstant("star"),new StringConstant("planet"),new StringConstant("nebula")}, true); - System.out.println(myIn.getName() + ": " + myIn.toADQL()); + private static ADQLTranslator translator = null; + + @BeforeClass + public static void setUpBeforeClass(){ + translator = new PostgreSQLTranslator(); + } + @Test + public void testIN(){ + // Test with a simple list of values (here, string constants): + In myIn = new In(new ADQLColumn("typeObj"), new ADQLOperand[]{new StringConstant("galaxy"),new StringConstant("star"),new StringConstant("planet"),new StringConstant("nebula")}, true); + // check the ADQL: + assertEquals("typeObj NOT IN ('galaxy' , 'star' , 'planet' , 'nebula')", myIn.toADQL()); + // check the SQL translation: + try{ + assertEquals(myIn.toADQL(), translator.translate(myIn)); + }catch(Exception ex){ + ex.printStackTrace(); + fail("This test should have succeeded because the IN statement is correct and theoretically well supported by the POSTGRESQL translator!"); + } + + // Test with a sub-query: ADQLQuery subQuery = new ADQLQuery(); ClauseSelect select = subQuery.getSelect(); @@ -40,10 +60,17 @@ public class TestIN { orderBy.add(new ADQLOrder(1)); myIn.setSubQuery(subQuery); - System.out.println("\n*** " + myIn.getName().toUpperCase() + " ***\n" + myIn.toADQL()); - PostgreSQLTranslator translator = new PostgreSQLTranslator(); - System.out.println("\n*** SQL TRANSLATION ***\n" + translator.translate(myIn)); - + // check the ADQL: + assertEquals("typeObj NOT IN (SELECT DISTINCT TOP 10 typeObj\nFROM Objects\nORDER BY 1 ASC)", myIn.toADQL()); + // check the SQL translation: + try{ + assertEquals("typeObj NOT IN (SELECT DISTINCT typeObj AS \"typeObj\"\nFROM Objects\nORDER BY 1 ASC\nLimit 10)", translator.translate(myIn)); + }catch(Exception ex){ + ex.printStackTrace(); + fail("This test should have succeeded because the IN statement is correct and theoretically well supported by the POSTGRESQL translator!"); + } + + // Test after replacement inside this IN statement: IReplaceHandler sHandler = new SimpleReplaceHandler(true){ @Override @@ -57,13 +84,12 @@ public class TestIN { } }; sHandler.searchAndReplace(myIn); - System.out.println("INFO: " + sHandler.getNbReplacement() + "/" + sHandler.getNbMatch() + " replaced objects !"); + assertEquals(2, sHandler.getNbMatch()); + assertEquals(sHandler.getNbMatch(), sHandler.getNbReplacement()); Iterator results = sHandler.iterator(); - System.out.println("\n*** SEARCH RESULTS ***"); while(results.hasNext()) - System.out.println("\t- " + results.next()); - - System.out.println("\n*** AFTER REPLACEMENT ***\n" + myIn.toADQL()); + assertEquals("typeObj", results.next().toADQL()); + assertEquals("type NOT IN (SELECT DISTINCT TOP 10 type\nFROM Objects\nORDER BY 1 ASC)", myIn.toADQL()); } } diff --git a/test/adql/TestIdentifierField.java b/test/adql/TestIdentifierField.java new file mode 100644 index 0000000000000000000000000000000000000000..c4c5fc2dc99d3d79927e0f2a6e488753a0f1004b --- /dev/null +++ b/test/adql/TestIdentifierField.java @@ -0,0 +1,25 @@ +package adql; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import org.junit.Test; + +import adql.query.IdentifierField; + +public class TestIdentifierField { + + @Test + public void testIsCaseSensitive(){ + byte b = 0x00; + assertFalse(IdentifierField.SCHEMA.isCaseSensitive(b)); + b = IdentifierField.SCHEMA.setCaseSensitive(b, true); + assertTrue(IdentifierField.SCHEMA.isCaseSensitive(b)); + } + + /*@Test + public void testSetCaseSensitive(){ + fail("Not yet implemented"); + }*/ + +} diff --git a/test/adql/db/TestDBChecker.java b/test/adql/db/TestDBChecker.java new file mode 100644 index 0000000000000000000000000000000000000000..3f9924a8616ad959deebbbbab8f7df1dac96b3f8 --- /dev/null +++ b/test/adql/db/TestDBChecker.java @@ -0,0 +1,740 @@ +package adql.db; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import adql.db.DBType.DBDatatype; +import adql.db.FunctionDef.FunctionParam; +import adql.db.exception.UnresolvedIdentifiersException; +import adql.parser.ADQLParser; +import adql.parser.ParseException; +import adql.query.ADQLObject; +import adql.query.ADQLQuery; +import adql.query.operand.ADQLColumn; +import adql.query.operand.ADQLOperand; +import adql.query.operand.StringConstant; +import adql.query.operand.function.DefaultUDF; +import adql.query.operand.function.UserDefinedFunction; +import adql.search.SimpleSearchHandler; +import adql.translator.ADQLTranslator; +import adql.translator.TranslationException; + +public class TestDBChecker { + + private static List tables; + + @BeforeClass + public static void setUpBeforeClass() throws Exception{ + tables = new ArrayList(); + + DefaultDBTable fooTable = new DefaultDBTable("foo"); + DBColumn col = new DefaultDBColumn("colS", new DBType(DBDatatype.VARCHAR), fooTable); + fooTable.addColumn(col); + col = new DefaultDBColumn("colI", new DBType(DBDatatype.INTEGER), fooTable); + fooTable.addColumn(col); + col = new DefaultDBColumn("colG", new DBType(DBDatatype.POINT), fooTable); + fooTable.addColumn(col); + + tables.add(fooTable); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception{} + + @Before + public void setUp() throws Exception{} + + @After + public void tearDown() throws Exception{} + + @Test + public void testNumericOrStringValueExpressionPrimary(){ + ADQLParser parser = new ADQLParser(); + try{ + assertNotNull(parser.parseQuery("SELECT 'toto' FROM foo;")); + assertNotNull(parser.parseQuery("SELECT ('toto') FROM foo;")); + assertNotNull(parser.parseQuery("SELECT (('toto')) FROM foo;")); + assertNotNull(parser.parseQuery("SELECT 'toto' || 'blabla' FROM foo;")); + assertNotNull(parser.parseQuery("SELECT ('toto' || 'blabla') FROM foo;")); + assertNotNull(parser.parseQuery("SELECT (('toto' || 'blabla')) FROM foo;")); + assertNotNull(parser.parseQuery("SELECT (('toto') || (('blabla'))) FROM foo;")); + assertNotNull(parser.parseQuery("SELECT 3 FROM foo;")); + assertNotNull(parser.parseQuery("SELECT ((2+3)*5) FROM foo;")); + assertNotNull(parser.parseQuery("SELECT ABS(-123) FROM foo;")); + assertNotNull(parser.parseQuery("SELECT ABS(2*-1+5) FROM foo;")); + assertNotNull(parser.parseQuery("SELECT ABS(COUNT(*)) FROM foo;")); + assertNotNull(parser.parseQuery("SELECT toto FROM foo;")); + assertNotNull(parser.parseQuery("SELECT toto * 3 FROM foo;")); + assertNotNull(parser.parseQuery("SELECT toto || 'blabla' FROM foo;")); + }catch(ParseException pe){ + pe.printStackTrace(); + fail(); + } + try{ + parser.parseQuery("SELECT ABS('toto') FROM foo;"); + fail(); + }catch(ParseException pe){} + try{ + parser.parseQuery("SELECT ABS(('toto' || 'blabla')) FROM foo;"); + fail(); + }catch(ParseException pe){} + try{ + parser.parseQuery("SELECT 'toto' || 1 FROM foo;"); + fail(); + }catch(ParseException pe){} + try{ + parser.parseQuery("SELECT 1 || 'toto' FROM foo;"); + fail(); + }catch(ParseException pe){} + try{ + parser.parseQuery("SELECT 'toto' * 3 FROM foo;"); + fail(); + }catch(ParseException pe){} + } + + @Test + public void testUDFManagement(){ + // UNKNOWN FUNCTIONS ARE NOT ALLOWED: + ADQLParser parser = new ADQLParser(new DBChecker(tables, new ArrayList(0))); + + // Test with a simple ADQL query without unknown or user defined function: + try{ + assertNotNull(parser.parseQuery("SELECT * FROM foo;")); + }catch(ParseException e){ + e.printStackTrace(); + fail("A simple and basic query should not be a problem for the parser!"); + } + + // Test with an ADQL query containing one not declared UDF: + try{ + parser.parseQuery("SELECT toto() FROM foo;"); + fail("This query contains a UDF while it's not allowed: this test should have failed!"); + }catch(ParseException e){ + assertTrue(e instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)e; + assertEquals(1, ex.getNbErrors()); + assertEquals("Unresolved function: \"toto()\"! No UDF has been defined or found with the signature: toto().", ex.getErrors().next().getMessage()); + } + + // DECLARE THE UDFs: + FunctionDef[] udfs = new FunctionDef[]{new FunctionDef("toto", new DBType(DBDatatype.VARCHAR)),new FunctionDef("tata", new DBType(DBDatatype.INTEGER))}; + parser = new ADQLParser(new DBChecker(tables, Arrays.asList(udfs))); + + // Test again: + try{ + assertNotNull(parser.parseQuery("SELECT toto() FROM foo;")); + assertNotNull(parser.parseQuery("SELECT tata() FROM foo;")); + }catch(ParseException e){ + e.printStackTrace(); + fail("This query contains a DECLARED UDF: this test should have succeeded!"); + } + + // Test but with at least one parameter: + try{ + parser.parseQuery("SELECT toto('blabla') FROM foo;"); + fail("This query contains an unknown UDF signature (the fct toto is declared with no parameter): this test should have failed!"); + }catch(ParseException e){ + assertTrue(e instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)e; + assertEquals(1, ex.getNbErrors()); + assertEquals("Unresolved function: \"toto('blabla')\"! No UDF has been defined or found with the signature: toto(STRING).", ex.getErrors().next().getMessage()); + } + + // Test with a UDF whose the class is specified ; the corresponding object in the ADQL tree must be replace by an instance of this class: + udfs = new FunctionDef[]{new FunctionDef("toto", new DBType(DBDatatype.VARCHAR), new FunctionParam[]{new FunctionParam("txt", new DBType(DBDatatype.VARCHAR))})}; + udfs[0].setUDFClass(UDFToto.class); + parser = new ADQLParser(new DBChecker(tables, Arrays.asList(udfs))); + try{ + ADQLQuery query = parser.parseQuery("SELECT toto('blabla') FROM foo;"); + assertNotNull(query); + Iterator it = query.search(new SimpleSearchHandler(){ + @Override + protected boolean match(ADQLObject obj){ + return (obj instanceof UserDefinedFunction) && ((UserDefinedFunction)obj).getName().equals("toto"); + } + }); + assertTrue(it.hasNext()); + assertEquals(UDFToto.class.getName(), it.next().getClass().getName()); + assertFalse(it.hasNext()); + }catch(Exception e){ + e.printStackTrace(); + fail("This query contains a DECLARED UDF with a valid UserDefinedFunction class: this test should have succeeded!"); + } + + // Test with a wrong parameter type: + try{ + parser.parseQuery("SELECT toto(123) FROM foo;"); + fail("This query contains an unknown UDF signature (the fct toto is declared with one parameter of type STRING...here it is a numeric): this test should have failed!"); + }catch(Exception e){ + assertTrue(e instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)e; + assertEquals(1, ex.getNbErrors()); + assertEquals("Unresolved function: \"toto(123)\"! No UDF has been defined or found with the signature: toto(NUMERIC).", ex.getErrors().next().getMessage()); + } + + // Test with UDF class constructor throwing an exception: + udfs = new FunctionDef[]{new FunctionDef("toto", new DBType(DBDatatype.VARCHAR), new FunctionParam[]{new FunctionParam("txt", new DBType(DBDatatype.VARCHAR))})}; + udfs[0].setUDFClass(WrongUDFToto.class); + parser = new ADQLParser(new DBChecker(tables, Arrays.asList(udfs))); + try{ + parser.parseQuery("SELECT toto('blabla') FROM foo;"); + fail("The set UDF class constructor has throw an error: this test should have failed!"); + }catch(Exception e){ + assertTrue(e instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)e; + assertEquals(1, ex.getNbErrors()); + assertEquals("Impossible to represent the function \"toto\": the following error occured while creating this representation: \"[Exception] Systematic error!\"", ex.getErrors().next().getMessage()); + } + } + + @Test + public void testGeometry(){ + // DECLARE A SIMPLE PARSER where all geometries are allowed by default: + ADQLParser parser = new ADQLParser(new DBChecker(tables)); + + // Test with several geometries while all are allowed: + try{ + assertNotNull(parser.parseQuery("SELECT * FROM foo WHERE CONTAINS(POINT('', 12.3, 45.6), CIRCLE('', 1.2, 2.3, 5)) = 1;")); + }catch(ParseException pe){ + pe.printStackTrace(); + fail("This query contains several geometries, and all are theoretically allowed: this test should have succeeded!"); + } + + // Test with several geometries while only the allowed ones: + try{ + parser = new ADQLParser(new DBChecker(tables, new ArrayList(0), Arrays.asList(new String[]{"CONTAINS","POINT","CIRCLE"}), null)); + assertNotNull(parser.parseQuery("SELECT * FROM foo WHERE CONTAINS(POINT('', 12.3, 45.6), CIRCLE('', 1.2, 2.3, 5)) = 1;")); + }catch(ParseException pe){ + pe.printStackTrace(); + fail("This query contains several geometries, and all are theoretically allowed: this test should have succeeded!"); + } + try{ + parser.parseQuery("SELECT * FROM foo WHERE INTERSECTS(POINT('', 12.3, 45.6), CIRCLE('', 1.2, 2.3, 5)) = 1;"); + fail("This query contains a not-allowed geometry function (INTERSECTS): this test should have failed!"); + }catch(ParseException pe){ + assertTrue(pe instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)pe; + assertEquals(1, ex.getNbErrors()); + assertEquals("The geometrical function \"INTERSECTS\" is not available in this implementation!", ex.getErrors().next().getMessage()); + } + + // Test by adding REGION: + try{ + parser = new ADQLParser(new DBChecker(tables, new ArrayList(0), Arrays.asList(new String[]{"CONTAINS","POINT","CIRCLE","REGION"}), null)); + assertNotNull(parser.parseQuery("SELECT * FROM foo WHERE CONTAINS(REGION('Position 12.3 45.6'), REGION('circle 1.2 2.3 5')) = 1;")); + }catch(ParseException pe){ + pe.printStackTrace(); + fail("This query contains several geometries, and all are theoretically allowed: this test should have succeeded!"); + } + try{ + parser.parseQuery("SELECT * FROM foo WHERE CONTAINS(REGION('Position 12.3 45.6'), REGION('BOX 1.2 2.3 5 9')) = 1;"); + fail("This query contains a not-allowed geometry function (BOX): this test should have failed!"); + }catch(ParseException pe){ + assertTrue(pe instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)pe; + assertEquals(1, ex.getNbErrors()); + assertEquals("The geometrical function \"BOX\" is not available in this implementation!", ex.getErrors().next().getMessage()); + } + + // Test with several geometries while none geometry is allowed: + try{ + parser = new ADQLParser(new DBChecker(tables, new ArrayList(0), new ArrayList(0), null)); + parser.parseQuery("SELECT * FROM foo WHERE CONTAINS(POINT('', 12.3, 45.6), CIRCLE('', 1.2, 2.3, 5)) = 1;"); + fail("This query contains geometries while they are all forbidden: this test should have failed!"); + }catch(ParseException pe){ + assertTrue(pe instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)pe; + assertEquals(3, ex.getNbErrors()); + Iterator itErrors = ex.getErrors(); + assertEquals("The geometrical function \"CONTAINS\" is not available in this implementation!", itErrors.next().getMessage()); + assertEquals("The geometrical function \"POINT\" is not available in this implementation!", itErrors.next().getMessage()); + assertEquals("The geometrical function \"CIRCLE\" is not available in this implementation!", itErrors.next().getMessage()); + } + } + + @Test + public void testCoordSys(){ + // DECLARE A SIMPLE PARSER where all coordinate systems are allowed by default: + ADQLParser parser = new ADQLParser(new DBChecker(tables)); + + // Test with several coordinate systems while all are allowed: + try{ + assertNotNull(parser.parseQuery("SELECT * FROM foo WHERE CONTAINS(POINT('', 12.3, 45.6), CIRCLE('', 1.2, 2.3, 5)) = 1;")); + assertNotNull(parser.parseQuery("SELECT * FROM foo WHERE CONTAINS(POINT('icrs', 12.3, 45.6), CIRCLE('cartesian2', 1.2, 2.3, 5)) = 1;")); + assertNotNull(parser.parseQuery("SELECT * FROM foo WHERE CONTAINS(POINT('lsr', 12.3, 45.6), CIRCLE('galactic heliocenter', 1.2, 2.3, 5)) = 1;")); + assertNotNull(parser.parseQuery("SELECT * FROM foo WHERE CONTAINS(POINT('unknownframe', 12.3, 45.6), CIRCLE('galactic unknownrefpos spherical2', 1.2, 2.3, 5)) = 1;")); + assertNotNull(parser.parseQuery("SELECT * FROM foo WHERE CONTAINS(REGION('position icrs lsr 12.3 45.6'), REGION('circle fk5 1.2 2.3 5')) = 1;")); + assertNotNull(parser.parseQuery("SELECT Region('not(position 1 2)') FROM foo;")); + }catch(ParseException pe){ + pe.printStackTrace(); + fail("This query contains several valid coordinate systems, and all are theoretically allowed: this test should have succeeded!"); + } + + // Concatenation as coordinate systems not checked: + try{ + assertNotNull(parser.parseQuery("SELECT * FROM foo WHERE CONTAINS(POINT('From ' || 'here', 12.3, 45.6), CIRCLE('', 1.2, 2.3, 5)) = 1;")); + }catch(ParseException pe){ + pe.printStackTrace(); + fail("This query contains a concatenation as coordinate systems (but only string constants are checked): this test should have succeeded!"); + } + + // Test with several coordinate systems while only some allowed: + try{ + parser = new ADQLParser(new DBChecker(tables, new ArrayList(0), null, Arrays.asList(new String[]{"icrs * *","fk4 geocenter *","galactic * spherical2"}))); + assertNotNull(parser.parseQuery("SELECT * FROM foo WHERE CONTAINS(POINT('', 12.3, 45.6), CIRCLE('', 1.2, 2.3, 5)) = 1;")); + assertNotNull(parser.parseQuery("SELECT * FROM foo WHERE CONTAINS(POINT('icrs', 12.3, 45.6), CIRCLE('cartesian3', 1.2, 2.3, 5)) = 1;")); + assertNotNull(parser.parseQuery("SELECT POINT('fk4', 12.3, 45.6) FROM foo;")); + assertNotNull(parser.parseQuery("SELECT * FROM foo WHERE CONTAINS(POINT('fk4 geocenter', 12.3, 45.6), CIRCLE('cartesian2', 1.2, 2.3, 5)) = 1;")); + assertNotNull(parser.parseQuery("SELECT * FROM foo WHERE CONTAINS(POINT('galactic', 12.3, 45.6), CIRCLE('galactic spherical2', 1.2, 2.3, 5)) = 1;")); + assertNotNull(parser.parseQuery("SELECT * FROM foo WHERE CONTAINS(POINT('galactic geocenter', 12.3, 45.6), CIRCLE('galactic lsr spherical2', 1.2, 2.3, 5)) = 1;")); + assertNotNull(parser.parseQuery("SELECT * FROM foo WHERE CONTAINS(REGION('position galactic lsr 12.3 45.6'), REGION('circle icrs 1.2 2.3 5')) = 1;")); + assertNotNull(parser.parseQuery("SELECT Region('not(position 1 2)') FROM foo;")); + }catch(ParseException pe){ + pe.printStackTrace(); + fail("This query contains several valid coordinate systems, and all are theoretically allowed: this test should have succeeded!"); + } + try{ + parser.parseQuery("SELECT POINT('fk5 geocenter', 12.3, 45.6) FROM foo;"); + fail("This query contains a not-allowed coordinate system ('fk5' is not allowed): this test should have failed!"); + }catch(ParseException pe){ + assertTrue(pe instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)pe; + assertEquals(1, ex.getNbErrors()); + assertEquals("Coordinate system \"fk5 geocenter\" (= \"FK5 GEOCENTER SPHERICAL2\") not allowed in this implementation.", ex.getErrors().next().getMessage()); + } + try{ + parser.parseQuery("SELECT Region('not(position fk5 heliocenter 1 2)') FROM foo;"); + fail("This query contains a not-allowed coordinate system ('fk5' is not allowed): this test should have failed!"); + }catch(ParseException pe){ + assertTrue(pe instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)pe; + assertEquals(1, ex.getNbErrors()); + assertEquals("Coordinate system \"FK5 HELIOCENTER\" (= \"FK5 HELIOCENTER SPHERICAL2\") not allowed in this implementation.", ex.getErrors().next().getMessage()); + } + + // Test with a coordinate system while none is allowed: + try{ + parser = new ADQLParser(new DBChecker(tables, new ArrayList(0), null, new ArrayList(0))); + assertNotNull(parser.parseQuery("SELECT * FROM foo WHERE CONTAINS(POINT('', 12.3, 45.6), CIRCLE('', 1.2, 2.3, 5)) = 1;")); + assertNotNull(parser.parseQuery("SELECT * FROM foo WHERE CONTAINS(REGION('position 12.3 45.6'), REGION('circle 1.2 2.3 5')) = 1;")); + assertNotNull(parser.parseQuery("SELECT Region('not(position 1 2)') FROM foo;")); + }catch(ParseException pe){ + pe.printStackTrace(); + fail("This query specifies none coordinate system: this test should have succeeded!"); + } + try{ + parser.parseQuery("SELECT * FROM foo WHERE CONTAINS(POINT('ICRS SPHERICAL2', 12.3, 45.6), CIRCLE('icrs', 1.2, 2.3, 5)) = 1;"); + fail("This query specifies coordinate systems while they are all forbidden: this test should have failed!"); + }catch(ParseException pe){ + assertTrue(pe instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)pe; + assertEquals(2, ex.getNbErrors()); + Iterator itErrors = ex.getErrors(); + assertEquals("Coordinate system \"ICRS SPHERICAL2\" (= \"ICRS UNKNOWNREFPOS SPHERICAL2\") not allowed in this implementation.", itErrors.next().getMessage()); + assertEquals("Coordinate system \"icrs\" (= \"ICRS UNKNOWNREFPOS SPHERICAL2\") not allowed in this implementation.", itErrors.next().getMessage()); + } + try{ + parser.parseQuery("SELECT Region('not(position fk4 1 2)') FROM foo;"); + fail("This query specifies coordinate systems while they are all forbidden: this test should have failed!"); + }catch(ParseException pe){ + assertTrue(pe instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)pe; + assertEquals(1, ex.getNbErrors()); + assertEquals("Coordinate system \"FK4\" (= \"FK4 UNKNOWNREFPOS SPHERICAL2\") not allowed in this implementation.", ex.getErrors().next().getMessage()); + } + } + + @Test + public void testTypesChecking(){ + // DECLARE A SIMPLE PARSER: + ADQLParser parser = new ADQLParser(new DBChecker(tables)); + + // Test the type of columns generated by the parser: + try{ + ADQLQuery query = parser.parseQuery("SELECT colS, colI, colG FROM foo;"); + ADQLOperand colS = query.getSelect().get(0).getOperand(); + ADQLOperand colI = query.getSelect().get(1).getOperand(); + ADQLOperand colG = query.getSelect().get(2).getOperand(); + // test string column: + assertTrue(colS instanceof ADQLColumn); + assertTrue(colS.isString()); + assertFalse(colS.isNumeric()); + assertFalse(colS.isGeometry()); + // test integer column: + assertTrue(colI instanceof ADQLColumn); + assertFalse(colI.isString()); + assertTrue(colI.isNumeric()); + assertFalse(colI.isGeometry()); + // test geometry column: + assertTrue(colG instanceof ADQLColumn); + assertFalse(colG.isString()); + assertFalse(colG.isNumeric()); + assertTrue(colG.isGeometry()); + }catch(ParseException e1){ + if (e1 instanceof UnresolvedIdentifiersException) + ((UnresolvedIdentifiersException)e1).getErrors().next().printStackTrace(); + else + e1.printStackTrace(); + fail("This query contains known columns: this test should have succeeded!"); + } + + // Test the expected type - NUMERIC - generated by the parser: + try{ + assertNotNull(parser.parseQuery("SELECT colI * 3 FROM foo;")); + }catch(ParseException e){ + e.printStackTrace(); + fail("This query contains a product between 2 numerics: this test should have succeeded!"); + } + try{ + parser.parseQuery("SELECT colS * 3 FROM foo;"); + fail("This query contains a product between a string and an integer: this test should have failed!"); + }catch(ParseException e){ + assertTrue(e instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)e; + assertEquals(1, ex.getNbErrors()); + assertEquals("Type mismatch! A numeric value was expected instead of \"colS\".", ex.getErrors().next().getMessage()); + } + try{ + parser.parseQuery("SELECT colG * 3 FROM foo;"); + fail("This query contains a product between a geometry and an integer: this test should have failed!"); + }catch(ParseException e){ + assertTrue(e instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)e; + assertEquals(1, ex.getNbErrors()); + assertEquals("Type mismatch! A numeric value was expected instead of \"colG\".", ex.getErrors().next().getMessage()); + } + + // Test the expected type - STRING - generated by the parser: + try{ + assertNotNull(parser.parseQuery("SELECT colS || 'blabla' FROM foo;")); + }catch(ParseException e){ + e.printStackTrace(); + fail("This query contains a concatenation between 2 strings: this test should have succeeded!"); + } + try{ + parser.parseQuery("SELECT colI || 'blabla' FROM foo;"); + fail("This query contains a concatenation between an integer and a string: this test should have failed!"); + }catch(ParseException e){ + assertTrue(e instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)e; + assertEquals(1, ex.getNbErrors()); + assertEquals("Type mismatch! A string value was expected instead of \"colI\".", ex.getErrors().next().getMessage()); + } + try{ + parser.parseQuery("SELECT colG || 'blabla' FROM foo;"); + fail("This query contains a concatenation between a geometry and a string: this test should have failed!"); + }catch(ParseException e){ + assertTrue(e instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)e; + assertEquals(1, ex.getNbErrors()); + assertEquals("Type mismatch! A string value was expected instead of \"colG\".", ex.getErrors().next().getMessage()); + } + + // Test the expected type - GEOMETRY - generated by the parser: + try{ + assertNotNull(parser.parseQuery("SELECT CONTAINS(colG, CIRCLE('', 1, 2, 5)) FROM foo;")); + }catch(ParseException e){ + e.printStackTrace(); + fail("This query contains a geometrical predicate between 2 geometries: this test should have succeeded!"); + } + try{ + parser.parseQuery("SELECT CONTAINS(colI, CIRCLE('', 1, 2, 5)) FROM foo;"); + fail("This query contains a geometrical predicate between an integer and a geometry: this test should have failed!"); + }catch(ParseException e){ + assertTrue(e instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)e; + assertEquals(1, ex.getNbErrors()); + assertEquals("Type mismatch! A geometry was expected instead of \"colI\".", ex.getErrors().next().getMessage()); + } + try{ + parser.parseQuery("SELECT CONTAINS(colS, CIRCLE('', 1, 2, 5)) FROM foo;"); + fail("This query contains a geometrical predicate between a string and a geometry: this test should have failed!"); + }catch(ParseException e){ + assertTrue(e instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)e; + assertEquals(1, ex.getNbErrors()); + assertEquals("Type mismatch! A geometry was expected instead of \"colS\".", ex.getErrors().next().getMessage()); + } + + // DECLARE SOME UDFs: + FunctionDef[] udfs = new FunctionDef[]{new FunctionDef("toto", new DBType(DBDatatype.VARCHAR)),new FunctionDef("tata", new DBType(DBDatatype.INTEGER)),new FunctionDef("titi", new DBType(DBDatatype.REGION))}; + parser = new ADQLParser(new DBChecker(tables, Arrays.asList(udfs))); + + // Test the return type of the function TOTO generated by the parser: + try{ + ADQLQuery query = parser.parseQuery("SELECT toto() FROM foo;"); + ADQLOperand fct = query.getSelect().get(0).getOperand(); + assertTrue(fct instanceof DefaultUDF); + assertNotNull(((DefaultUDF)fct).getDefinition()); + assertTrue(fct.isString()); + assertFalse(fct.isNumeric()); + assertFalse(fct.isGeometry()); + }catch(ParseException e1){ + e1.printStackTrace(); + fail("This query contains a DECLARED UDF: this test should have succeeded!"); + } + + // Test the return type checking inside a whole query: + try{ + assertNotNull(parser.parseQuery("SELECT toto() || 'Blabla ' AS \"SuperText\" FROM foo;")); + }catch(ParseException e1){ + e1.printStackTrace(); + fail("This query contains a DECLARED UDF concatenated to a String: this test should have succeeded!"); + } + try{ + parser.parseQuery("SELECT toto()*3 AS \"SuperError\" FROM foo;"); + fail("This query contains a DECLARED UDF BUT used as numeric...which is here not possible: this test should have failed!"); + }catch(ParseException e1){ + assertTrue(e1 instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)e1; + assertEquals(1, ex.getNbErrors()); + assertEquals("Type mismatch! A numeric value was expected instead of \"toto()\".", ex.getErrors().next().getMessage()); + } + + // Test the return type of the function TATA generated by the parser: + try{ + ADQLQuery query = parser.parseQuery("SELECT tata() FROM foo;"); + ADQLOperand fct = query.getSelect().get(0).getOperand(); + assertTrue(fct instanceof DefaultUDF); + assertNotNull(((DefaultUDF)fct).getDefinition()); + assertFalse(fct.isString()); + assertTrue(fct.isNumeric()); + assertFalse(fct.isGeometry()); + }catch(ParseException e1){ + e1.printStackTrace(); + fail("This query contains a DECLARED UDF: this test should have succeeded!"); + } + + // Test the return type checking inside a whole query: + try{ + assertNotNull(parser.parseQuery("SELECT tata()*3 AS \"aNumeric\" FROM foo;")); + }catch(ParseException e1){ + e1.printStackTrace(); + fail("This query contains a DECLARED UDF multiplicated by 3: this test should have succeeded!"); + } + try{ + parser.parseQuery("SELECT 'Blabla ' || tata() AS \"SuperError\" FROM foo;"); + fail("This query contains a DECLARED UDF BUT used as string...which is here not possible: this test should have failed!"); + }catch(ParseException e1){ + assertTrue(e1 instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)e1; + assertEquals(1, ex.getNbErrors()); + assertEquals("Type mismatch! A string value was expected instead of \"tata()\".", ex.getErrors().next().getMessage()); + } + try{ + parser.parseQuery("SELECT tata() || 'Blabla ' AS \"SuperError\" FROM foo;"); + fail("This query contains a DECLARED UDF BUT used as string...which is here not possible: this test should have failed!"); + }catch(ParseException e1){ + assertTrue(e1 instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)e1; + assertEquals(1, ex.getNbErrors()); + assertEquals("Type mismatch! A string value was expected instead of \"tata()\".", ex.getErrors().next().getMessage()); + } + + // Test the return type of the function TITI generated by the parser: + try{ + ADQLQuery query = parser.parseQuery("SELECT titi() FROM foo;"); + ADQLOperand fct = query.getSelect().get(0).getOperand(); + assertTrue(fct instanceof DefaultUDF); + assertNotNull(((DefaultUDF)fct).getDefinition()); + assertFalse(fct.isString()); + assertFalse(fct.isNumeric()); + assertTrue(fct.isGeometry()); + }catch(ParseException e1){ + e1.printStackTrace(); + fail("This query contains a DECLARED UDF: this test should have succeeded!"); + } + + // Test the return type checking inside a whole query: + try{ + parser.parseQuery("SELECT CONTAINS(colG, titi()) ' AS \"Super\" FROM foo;"); + fail("Geometrical UDFs are not allowed for the moment in the ADQL language: this test should have failed!"); + }catch(ParseException e1){ + assertTrue(e1 instanceof ParseException); + assertEquals(" Encountered \"(\". Was expecting one of: \")\" \".\" \".\" \")\" ", e1.getMessage()); + } + try{ + parser.parseQuery("SELECT titi()*3 AS \"SuperError\" FROM foo;"); + fail("This query contains a DECLARED UDF BUT used as numeric...which is here not possible: this test should have failed!"); + }catch(ParseException e1){ + assertTrue(e1 instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)e1; + assertEquals(1, ex.getNbErrors()); + assertEquals("Type mismatch! A numeric value was expected instead of \"titi()\".", ex.getErrors().next().getMessage()); + } + + // CLEAR ALL UDFs AND ALLOW UNKNOWN FUNCTION: + parser = new ADQLParser(new DBChecker(tables, null)); + + // Test again: + try{ + assertNotNull(parser.parseQuery("SELECT toto() FROM foo;")); + }catch(ParseException e){ + e.printStackTrace(); + fail("The parser allow ANY unknown function: this test should have succeeded!"); + } + + // Test the return type of the function generated by the parser: + try{ + ADQLQuery query = parser.parseQuery("SELECT toto() FROM foo;"); + ADQLOperand fct = query.getSelect().get(0).getOperand(); + assertTrue(fct instanceof DefaultUDF); + assertNull(((DefaultUDF)fct).getDefinition()); + assertTrue(fct.isString()); + assertTrue(fct.isNumeric()); + }catch(ParseException e1){ + e1.printStackTrace(); + fail("The parser allow ANY unknown function: this test should have succeeded!"); + } + + // DECLARE THE UDF (while unknown functions are allowed): + parser = new ADQLParser(new DBChecker(tables, Arrays.asList(new FunctionDef[]{new FunctionDef("toto", new DBType(DBDatatype.VARCHAR))}))); + + // Test the return type of the function generated by the parser: + try{ + ADQLQuery query = parser.parseQuery("SELECT toto() FROM foo;"); + ADQLOperand fct = query.getSelect().get(0).getOperand(); + assertTrue(fct instanceof DefaultUDF); + assertNotNull(((DefaultUDF)fct).getDefinition()); + assertTrue(fct.isString()); + assertFalse(fct.isNumeric()); + }catch(ParseException e1){ + e1.printStackTrace(); + fail("The parser allow ANY unknown function: this test should have succeeded!"); + } + + // DECLARE UDFs WITH SAME NAMES BUT DIFFERENT TYPE OF ARGUMENT: + udfs = new FunctionDef[]{new FunctionDef("toto", new DBType(DBDatatype.VARCHAR), new FunctionParam[]{new FunctionParam("attr", new DBType(DBDatatype.VARCHAR))}),new FunctionDef("toto", new DBType(DBDatatype.INTEGER), new FunctionParam[]{new FunctionParam("attr", new DBType(DBDatatype.INTEGER))}),new FunctionDef("toto", new DBType(DBDatatype.INTEGER), new FunctionParam[]{new FunctionParam("attr", new DBType(DBDatatype.POINT))})}; + parser = new ADQLParser(new DBChecker(tables, Arrays.asList(udfs))); + + // Test the return type in function of the parameter: + try{ + assertNotNull(parser.parseQuery("SELECT toto('blabla') AS toto1, toto(123) AS toto2, toto(POINT('', 1, 2)) AS toto3 FROM foo;")); + }catch(ParseException e1){ + e1.printStackTrace(); + fail("This query contains two DECLARED UDFs used here: this test should have succeeded!"); + } + try{ + parser.parseQuery("SELECT toto('blabla') * 123 AS \"SuperError\" FROM foo;"); + fail("This query contains a DECLARED UDF BUT used as numeric...which is here not possible: this test should have failed!"); + }catch(ParseException e){ + assertTrue(e instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)e; + assertEquals(1, ex.getNbErrors()); + assertEquals("Type mismatch! A numeric value was expected instead of \"toto('blabla')\".", ex.getErrors().next().getMessage()); + } + try{ + parser.parseQuery("SELECT toto(123) || 'blabla' AS \"SuperError\" FROM foo;"); + fail("This query contains a DECLARED UDF BUT used as string...which is here not possible: this test should have failed!"); + }catch(ParseException e){ + assertTrue(e instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)e; + assertEquals(1, ex.getNbErrors()); + assertEquals("Type mismatch! A string value was expected instead of \"toto(123)\".", ex.getErrors().next().getMessage()); + } + try{ + parser.parseQuery("SELECT toto(POINT('', 1, 2)) || 'blabla' AS \"SuperError\" FROM foo;"); + fail("This query contains a DECLARED UDF BUT used as string...which is here not possible: this test should have failed!"); + }catch(ParseException e){ + assertTrue(e instanceof UnresolvedIdentifiersException); + UnresolvedIdentifiersException ex = (UnresolvedIdentifiersException)e; + assertEquals(1, ex.getNbErrors()); + assertEquals("Type mismatch! A string value was expected instead of \"toto(POINT('', 1, 2))\".", ex.getErrors().next().getMessage()); + } + } + + private static class WrongUDFToto extends UDFToto { + public WrongUDFToto(final ADQLOperand[] params) throws Exception{ + super(params); + throw new Exception("Systematic error!"); + } + } + + public static class UDFToto extends UserDefinedFunction { + protected StringConstant fakeParam; + + public UDFToto(final ADQLOperand[] params) throws Exception{ + if (params == null || params.length == 0) + throw new Exception("Missing parameter for the user defined function \"toto\"!"); + else if (params.length > 1) + throw new Exception("Too many parameters for the function \"toto\"! Only one is required."); + else if (!(params[0] instanceof StringConstant)) + throw new Exception("Wrong parameter type! The parameter of the UDF \"toto\" must be a string constant."); + fakeParam = (StringConstant)params[0]; + } + + @Override + public final boolean isNumeric(){ + return false; + } + + @Override + public final boolean isString(){ + return true; + } + + @Override + public final boolean isGeometry(){ + return false; + } + + @Override + public ADQLObject getCopy() throws Exception{ + ADQLOperand[] params = new ADQLOperand[]{(StringConstant)fakeParam.getCopy()}; + return new UDFToto(params); + } + + @Override + public final String getName(){ + return "toto"; + } + + @Override + public final ADQLOperand[] getParameters(){ + return new ADQLOperand[]{fakeParam}; + } + + @Override + public final int getNbParameters(){ + return 1; + } + + @Override + public final ADQLOperand getParameter(int index) throws ArrayIndexOutOfBoundsException{ + if (index != 0) + throw new ArrayIndexOutOfBoundsException("Incorrect parameter index: " + index + "! The function \"toto\" has only one parameter."); + return fakeParam; + } + + @Override + public ADQLOperand setParameter(int index, ADQLOperand replacer) throws ArrayIndexOutOfBoundsException, NullPointerException, Exception{ + if (index != 0) + throw new ArrayIndexOutOfBoundsException("Incorrect parameter index: " + index + "! The function \"toto\" has only one parameter."); + else if (!(replacer instanceof StringConstant)) + throw new Exception("Wrong parameter type! The parameter of the UDF \"toto\" must be a string constant."); + return (fakeParam = (StringConstant)replacer); + } + + @Override + public String translate(final ADQLTranslator caller) throws TranslationException{ + /* Note: Since this function is totally fake, this function will be replaced in SQL by its parameter (the string). */ + return caller.translate(fakeParam); + } + } + +} diff --git a/test/adql/db/TestFunctionDef.java b/test/adql/db/TestFunctionDef.java new file mode 100644 index 0000000000000000000000000000000000000000..c3d738a07aa0185136ab5199b98c915ec51ae1ed --- /dev/null +++ b/test/adql/db/TestFunctionDef.java @@ -0,0 +1,312 @@ +package adql.db; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import org.junit.Test; + +import adql.db.DBType.DBDatatype; +import adql.db.FunctionDef.FunctionParam; +import adql.parser.ParseException; +import adql.query.operand.ADQLOperand; +import adql.query.operand.NumericConstant; +import adql.query.operand.StringConstant; +import adql.query.operand.function.ADQLFunction; +import adql.query.operand.function.DefaultUDF; +import adql.query.operand.function.geometry.PointFunction; + +public class TestFunctionDef { + + @Test + public void testIsString(){ + for(DBDatatype type : DBDatatype.values()){ + switch(type){ + case CHAR: + case VARCHAR: + case TIMESTAMP: + case CLOB: + assertTrue(new FunctionDef("foo", new DBType(type)).isString); + break; + default: + assertFalse(new FunctionDef("foo", new DBType(type)).isString); + } + } + } + + @Test + public void testIsGeometry(){ + for(DBDatatype type : DBDatatype.values()){ + switch(type){ + case POINT: + case REGION: + assertTrue(new FunctionDef("foo", new DBType(type)).isGeometry); + break; + default: + assertFalse(new FunctionDef("foo", new DBType(type)).isGeometry); + } + } + } + + @Test + public void testIsNumeric(){ + for(DBDatatype type : DBDatatype.values()){ + switch(type){ + case CHAR: + case VARCHAR: + case TIMESTAMP: + case POINT: + case REGION: + case CLOB: + assertFalse(new FunctionDef("foo", new DBType(type)).isNumeric); + break; + default: + assertTrue(new FunctionDef("foo", new DBType(type)).isNumeric); + } + } + } + + @Test + public void testToString(){ + assertEquals("fct1()", new FunctionDef("fct1").toString()); + assertEquals("fct1() -> VARCHAR", new FunctionDef("fct1", new DBType(DBDatatype.VARCHAR)).toString()); + assertEquals("fct1(foo DOUBLE) -> VARCHAR", new FunctionDef("fct1", new DBType(DBDatatype.VARCHAR), new FunctionParam[]{new FunctionParam("foo", new DBType(DBDatatype.DOUBLE))}).toString()); + assertEquals("fct1(foo DOUBLE)", new FunctionDef("fct1", new FunctionParam[]{new FunctionParam("foo", new DBType(DBDatatype.DOUBLE))}).toString()); + assertEquals("fct1(foo DOUBLE, pt POINT) -> VARCHAR", new FunctionDef("fct1", new DBType(DBDatatype.VARCHAR), new FunctionParam[]{new FunctionParam("foo", new DBType(DBDatatype.DOUBLE)),new FunctionParam("pt", new DBType(DBDatatype.POINT))}).toString()); + assertEquals("fct1(foo DOUBLE, pt POINT)", new FunctionDef("fct1", null, new FunctionParam[]{new FunctionParam("foo", new DBType(DBDatatype.DOUBLE)),new FunctionParam("pt", new DBType(DBDatatype.POINT))}).toString()); + } + + @Test + public void testParse(){ + final String WRONG_FULL_SYNTAX = "Wrong function definition syntax! Expected syntax: \"(?) ?\", where =\"[a-zA-Z]+[a-zA-Z0-9_]*\", =\" -> \", =\"( (, )*)\", should be one of the types described in the UPLOAD section of the TAP documentation. Examples of good syntax: \"foo()\", \"foo() -> VARCHAR\", \"foo(param INTEGER)\", \"foo(param1 INTEGER, param2 DOUBLE) -> DOUBLE\""; + final String WRONG_PARAM_SYNTAX = "Wrong parameters syntax! Expected syntax: \"( (, )*)\", where =\"[a-zA-Z]+[a-zA-Z0-9_]*\", should be one of the types described in the UPLOAD section of the TAP documentation. Examples of good syntax: \"()\", \"(param INTEGER)\", \"(param1 INTEGER, param2 DOUBLE)\""; + + // NULL test: + try{ + FunctionDef.parse(null); + fail("A NULL string is not valide!"); + }catch(Exception ex){ + assertTrue(ex instanceof NullPointerException); + assertEquals("Missing string definition to build a FunctionDef!", ex.getMessage()); + } + + // EMPTY STRING test: + try{ + FunctionDef.parse(""); + fail("An empty string is not valide!"); + }catch(Exception ex){ + assertTrue(ex instanceof ParseException); + assertEquals(WRONG_FULL_SYNTAX, ex.getMessage()); + } + + // CORRECT string definitions: + try{ + assertEquals("foo()", FunctionDef.parse("foo()").toString()); + assertEquals("foo() -> VARCHAR", FunctionDef.parse("foo() -> string").toString()); + assertEquals("foo() -> VARCHAR", FunctionDef.parse("foo()->string").toString()); + assertEquals("foo(toto VARCHAR) -> SMALLINT", FunctionDef.parse("foo(toto varchar) -> boolean").toString()); + assertEquals("foo(param1 DOUBLE, param2 INTEGER) -> DOUBLE", FunctionDef.parse(" foo ( param1 numeric, param2 int ) -> DOUBLE ").toString()); + assertEquals("foo_ALTernative2first(p POINT, d TIMESTAMP) -> TIMESTAMP", FunctionDef.parse("foo_ALTernative2first (p POINT,d date) -> time").toString()); + assertEquals("blabla_123(toto INTEGER, bla SMALLINT, truc CLOB, bidule CHAR, smurph POINT, date TIMESTAMP) -> SMALLINT", FunctionDef.parse("blabla_123(toto int4, bla bool, truc text, bidule character, smurph point, date timestamp) -> BOOLEAN").toString()); + }catch(Exception ex){ + ex.printStackTrace(System.err); + fail("All this string definitions are correct."); + } + + // TYPE PARAMETER test: + try{ + for(DBDatatype t : DBDatatype.values()){ + switch(t){ + case CHAR: + case VARCHAR: + case BINARY: + case VARBINARY: + assertEquals("foo() -> " + t.toString() + "(10)", FunctionDef.parse("foo() -> " + t.toString() + "(10)").toString()); + break; + default: + assertEquals("foo() -> " + t.toString(), FunctionDef.parse("foo() -> " + t.toString() + "(10)").toString()); + } + } + }catch(Exception ex){ + ex.printStackTrace(System.err); + fail("Wrong type parsing!"); + } + + // WRONG string definitions: + try{ + FunctionDef.parse("123()"); + fail("No number is allowed as first character of a function name!"); + }catch(Exception ex){ + assertTrue(ex instanceof ParseException); + assertEquals(WRONG_FULL_SYNTAX, ex.getMessage()); + } + try{ + FunctionDef.parse("1foo()"); + fail("No number is allowed as first character of a function name!"); + }catch(Exception ex){ + assertTrue(ex instanceof ParseException); + assertEquals(WRONG_FULL_SYNTAX, ex.getMessage()); + } + try{ + FunctionDef.parse("foo,truc()"); + fail("No other character than [a-zA-Z0-9_] is allowed after a first character [a-zA-Z] in a function name!"); + }catch(Exception ex){ + assertTrue(ex instanceof ParseException); + assertEquals(WRONG_FULL_SYNTAX, ex.getMessage()); + } + try{ + FunctionDef.parse("foo"); + fail("A function definition must contain at list parenthesis even if there is no parameter."); + }catch(Exception ex){ + assertTrue(ex instanceof ParseException); + assertEquals(WRONG_FULL_SYNTAX, ex.getMessage()); + } + try{ + FunctionDef.parse("foo(param)"); + fail("A parameter must always have a type!"); + }catch(Exception ex){ + assertTrue(ex instanceof ParseException); + assertEquals("Wrong syntax for the 1-th parameter: \"param\"! Expected syntax: \"( (, )*)\", where =\"[a-zA-Z]+[a-zA-Z0-9_]*\", should be one of the types described in the UPLOAD section of the TAP documentation. Examples of good syntax: \"()\", \"(param INTEGER)\", \"(param1 INTEGER, param2 DOUBLE)\"", ex.getMessage()); + } + try{ + FunctionDef.parse("foo()->aType"); + fail("Wrong (return) type!"); + }catch(Exception ex){ + assertTrue(ex instanceof ParseException); + assertEquals("Unknown return type: \"aType\"!", ex.getMessage()); + } + try{ + FunctionDef.parse("foo()->aType(10)"); + fail("Wrong (return) type!"); + }catch(Exception ex){ + assertTrue(ex instanceof ParseException); + assertEquals("Unknown return type: \"aType(10)\"!", ex.getMessage()); + } + try{ + FunctionDef.parse("foo() -> "); + fail("The return type is missing!"); + }catch(Exception ex){ + assertTrue(ex instanceof ParseException); + assertEquals(WRONG_FULL_SYNTAX, ex.getMessage()); + } + try{ + FunctionDef.parse("foo(,)"); + fail("Missing parameter definition!"); + }catch(Exception ex){ + assertTrue(ex instanceof ParseException); + assertEquals(WRONG_PARAM_SYNTAX, ex.getMessage()); + } + try{ + FunctionDef.parse("foo(param1 int,)"); + fail("Missing parameter definition!"); + }catch(Exception ex){ + assertTrue(ex instanceof ParseException); + assertEquals(WRONG_PARAM_SYNTAX, ex.getMessage()); + } + try{ + FunctionDef.parse("foo(param1 aType)"); + fail("Wrong parameter type!"); + }catch(Exception ex){ + assertTrue(ex instanceof ParseException); + assertEquals("Unknown type for the parameter \"param1\": \"aType\"!", ex.getMessage()); + } + try{ + FunctionDef.parse("foo(param1 aType(10))"); + fail("Wrong parameter type!"); + }catch(Exception ex){ + assertTrue(ex instanceof ParseException); + assertEquals("Unknown type for the parameter \"param1\": \"aType(10)\"!", ex.getMessage()); + } + } + + @Test + public void testCompareToFunctionDef(){ + // DEFINITION 1 :: fct1() -> VARCHAR + FunctionDef def1 = new FunctionDef("fct1", new DBType(DBDatatype.VARCHAR)); + + // TEST :: Identity test (def1 with def1): [EQUAL] + assertEquals(0, def1.compareTo(def1)); + + // TEST :: With a function having a different name and also no parameter: [GREATER] + assertEquals(1, def1.compareTo(new FunctionDef("fct0", new DBType(DBDatatype.VARCHAR)))); + + // TEST :: With a function having the same name, but a different return type: [EQUAL} + assertEquals(0, def1.compareTo(new FunctionDef("fct1", new DBType(DBDatatype.INTEGER)))); + + // TEST :: With a function having the same name, but 2 parameters: [LESS (4 characters: ø against 1010)] + assertEquals(-6, def1.compareTo(new FunctionDef("fct1", new DBType(DBDatatype.INTEGER), new FunctionParam[]{new FunctionParam("foo", new DBType(DBDatatype.INTEGER)),new FunctionParam("foo", new DBType(DBDatatype.INTEGER))}))); + + // DEFINITION 1 :: fct1(foo1 CHAR(12), foo2 DOUBLE) -> VARCHAR + def1 = new FunctionDef("fct1", new DBType(DBDatatype.VARCHAR), new FunctionParam[]{new FunctionParam("foo1", new DBType(DBDatatype.CHAR, 12)),new FunctionParam("foo2", new DBType(DBDatatype.DOUBLE))}); + + // TEST :: Identity test (def1 with def1): [EQUAL] + assertEquals(0, def1.compareTo(def1)); + + // DEFINITION 2 :: fct1(foo1 CHAR(12), foo2 VARCHAR) -> VARCHAR + FunctionDef def2 = new FunctionDef("fct1", new DBType(DBDatatype.VARCHAR), new FunctionParam[]{new FunctionParam("foo1", new DBType(DBDatatype.CHAR, 12)),new FunctionParam("foo2", new DBType(DBDatatype.VARCHAR))}); + + // TEST :: Identity test (def2 with def2): [EQUAL] + assertEquals(0, def2.compareTo(def2)); + + // TEST :: Same name, but different type for the last parameter only: [GREATER (because Numeric = 10 > String = 01)] + assertEquals(1, def1.compareTo(def2)); + + // DEFINITION 2 :: fct2(foo1 CHAR(12), foo2 DOUBLE) -> VARCHAR + def2 = new FunctionDef("fct2", new DBType(DBDatatype.VARCHAR), new FunctionParam[]{new FunctionParam("foo1", new DBType(DBDatatype.CHAR, 12)),new FunctionParam("foo2", new DBType(DBDatatype.DOUBLE))}); + + // TEST :: Identity test (def2 with def2): [EQUAL] + assertEquals(0, def2.compareTo(def2)); + + // TEST :: Different name but same parameters: [LESS] + assertEquals(-1, def1.compareTo(def2)); + + // DEFINITION 2 :: fct1(foo1 CHAR(12), foo2 POINT) -> VARCHAR + def2 = new FunctionDef("fct1", new DBType(DBDatatype.VARCHAR), new FunctionParam[]{new FunctionParam("foo1", new DBType(DBDatatype.CHAR, 12)),new FunctionParam("foo2", new DBType(DBDatatype.POINT))}); + + // TEST :: Identity test (def2 with def2): [EQUAL] + assertEquals(0, def2.compareTo(def2)); + + // TEST :: Same name, but different type for the last parameter only: [GREATER] + assertEquals(1, def1.compareTo(def2)); + } + + @Test + public void testCompareToADQLFunction(){ + // DEFINITION :: fct1() -> VARCHAR + FunctionDef def = new FunctionDef("fct1", new DBType(DBDatatype.VARCHAR)); + + // TEST :: NULL: + try{ + def.compareTo((ADQLFunction)null); + fail("Missing ADQL function for comparison with FunctionDef!"); + }catch(Exception e){ + assertTrue(e instanceof NullPointerException); + assertEquals("Missing ADQL function with which comparing this function definition!", e.getMessage()); + } + + // TEST :: "fct1()": [EQUAL] + assertEquals(0, def.compareTo(new DefaultUDF("fct1", null))); + + // TEST :: "fct0()": [GREATER] + assertEquals(1, def.compareTo(new DefaultUDF("fct0", null))); + + // TEST :: "fct1(12.3, 3.14)": [LESS (of 2 params)] + assertEquals(-2, def.compareTo(new DefaultUDF("fct1", new ADQLOperand[]{new NumericConstant(12.3),new NumericConstant(3.14)}))); + + // DEFINITION :: fct1(foo1 CHAR(12), foo2 DOUBLE) -> VARCHAR + def = new FunctionDef("fct1", new DBType(DBDatatype.VARCHAR), new FunctionParam[]{new FunctionParam("foo1", new DBType(DBDatatype.CHAR, 12)),new FunctionParam("foo2", new DBType(DBDatatype.DOUBLE))}); + + // TEST :: "fct1('blabla', 'blabla2')": [GREATER (because the second param is numeric and Numeric = 10 > String = 01)] + assertEquals(1, def.compareTo(new DefaultUDF("fct1", new ADQLOperand[]{new StringConstant("blabla"),new StringConstant("blabla2")}))); + + // TEST :: "fct1('blabla', POINT('COORDSYS', 1.2, 3.4))": [GREATER (same reason ; POINT is considered as a String)] + try{ + assertEquals(1, def.compareTo(new DefaultUDF("fct1", new ADQLOperand[]{new StringConstant("blabla"),new PointFunction(new StringConstant("COORDSYS"), new NumericConstant(1.2), new NumericConstant(3.4))}))); + }catch(Exception e){ + e.printStackTrace(); + fail(); + } + } + +} diff --git a/test/adql/db/TestSTCS.java b/test/adql/db/TestSTCS.java new file mode 100644 index 0000000000000000000000000000000000000000..8ff76460c076c73b611c4e8cf1e564b55aa6c91b --- /dev/null +++ b/test/adql/db/TestSTCS.java @@ -0,0 +1,536 @@ +package adql.db; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.StringBufferInputStream; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import adql.db.STCS.CoordSys; +import adql.db.STCS.Flavor; +import adql.db.STCS.Frame; +import adql.db.STCS.RefPos; +import adql.db.STCS.Region; +import adql.db.STCS.RegionType; +import adql.parser.ADQLParser; +import adql.parser.ParseException; +import adql.query.operand.ADQLColumn; +import adql.query.operand.ADQLOperand; +import adql.query.operand.NegativeOperand; +import adql.query.operand.NumericConstant; +import adql.query.operand.Operation; +import adql.query.operand.OperationType; +import adql.query.operand.StringConstant; +import adql.query.operand.function.geometry.BoxFunction; +import adql.query.operand.function.geometry.CircleFunction; +import adql.query.operand.function.geometry.ContainsFunction; +import adql.query.operand.function.geometry.GeometryFunction; +import adql.query.operand.function.geometry.GeometryFunction.GeometryValue; +import adql.query.operand.function.geometry.PointFunction; +import adql.query.operand.function.geometry.PolygonFunction; +import adql.query.operand.function.geometry.RegionFunction; + +@SuppressWarnings("deprecation") +public class TestSTCS { + + @BeforeClass + public static void setUpBeforeClass() throws Exception{} + + @AfterClass + public static void tearDownAfterClass() throws Exception{} + + @Before + public void setUp() throws Exception{} + + @After + public void tearDown() throws Exception{} + + @Test + public void buildRegion(){ + // Special values: + try{ + new Region((GeometryFunction)null); + fail(); + }catch(Exception e){ + assertTrue(e instanceof NullPointerException); + assertEquals("Missing geometry to convert into STCS.Region!", e.getMessage()); + } + + try{ + new Region((Region)null); + fail(); + }catch(Exception e){ + assertTrue(e instanceof NullPointerException); + assertEquals("Missing region to NOT select!", e.getMessage()); + } + + try{ + new Region(new ContainsFunction(new GeometryValue(new RegionFunction(new StringConstant("position 1 2"))), new GeometryValue(new RegionFunction(new StringConstant("circle 0 1 4"))))); + fail(); + }catch(Exception e){ + assertTrue(e instanceof IllegalArgumentException); + assertEquals("Unknown region type! Only geometrical function PointFunction, CircleFunction, BoxFunction, PolygonFunction and RegionFunction are allowed.", e.getMessage()); + } + + // Allowed values (1 test for each type of region): + try{ + Region r = new Region(new PointFunction(new StringConstant(""), new NumericConstant(1.2), new NegativeOperand(new NumericConstant(2.3)))); + assertEquals(RegionType.POSITION, r.type); + assertEquals("", r.coordSys.toSTCS()); + assertEquals(1, r.coordinates.length); + assertEquals(2, r.coordinates[0].length); + assertEquals(1.2, r.coordinates[0][0], 0); + assertEquals(-2.3, r.coordinates[0][1], 0); + assertEquals(Double.NaN, r.radius, 0); + assertEquals(Double.NaN, r.width, 0); + assertEquals(Double.NaN, r.height, 0); + assertNull(r.regions); + assertEquals("POSITION 1.2 -2.3", r.toSTCS()); + + r = new Region(new CircleFunction(new StringConstant("ICRS"), new NumericConstant(1.2), new NegativeOperand(new NumericConstant(2.3)), new NumericConstant(5))); + assertEquals(RegionType.CIRCLE, r.type); + assertEquals("ICRS", r.coordSys.toSTCS()); + assertEquals(1, r.coordinates.length); + assertEquals(2, r.coordinates[0].length); + assertEquals(1.2, r.coordinates[0][0], 0); + assertEquals(-2.3, r.coordinates[0][1], 0); + assertEquals(5, r.radius, 0); + assertEquals(Double.NaN, r.width, 0); + assertEquals(Double.NaN, r.height, 0); + assertNull(r.regions); + assertEquals("CIRCLE ICRS 1.2 -2.3 5.0", r.toSTCS()); + + r = new Region(new BoxFunction(new StringConstant("ICRS heliocenter"), new NumericConstant(1.2), new NegativeOperand(new NumericConstant(2.3)), new NumericConstant(5), new NumericConstant(4.6))); + assertEquals(RegionType.BOX, r.type); + assertEquals("ICRS HELIOCENTER", r.coordSys.toSTCS()); + assertEquals(1, r.coordinates.length); + assertEquals(2, r.coordinates[0].length); + assertEquals(1.2, r.coordinates[0][0], 0); + assertEquals(-2.3, r.coordinates[0][1], 0); + assertEquals(Double.NaN, r.radius, 0); + assertEquals(5, r.width, 0); + assertEquals(4.6, r.height, 0); + assertNull(r.regions); + assertEquals("BOX ICRS HELIOCENTER 1.2 -2.3 5.0 4.6", r.toSTCS()); + + r = new Region(new PolygonFunction(new StringConstant("cartesian2"), new ADQLOperand[]{new NumericConstant(1.2),new NegativeOperand(new NumericConstant(2.3)),new NumericConstant(5),new NumericConstant(4.6),new NegativeOperand(new NumericConstant(.89)),new NumericConstant(1)})); + assertEquals(RegionType.POLYGON, r.type); + assertEquals("CARTESIAN2", r.coordSys.toSTCS()); + assertEquals(3, r.coordinates.length); + assertEquals(2, r.coordinates[0].length); + assertEquals(1.2, r.coordinates[0][0], 0); + assertEquals(-2.3, r.coordinates[0][1], 0); + assertEquals(5, r.coordinates[1][0], 0); + assertEquals(4.6, r.coordinates[1][1], 0); + assertEquals(-0.89, r.coordinates[2][0], 0); + assertEquals(1, r.coordinates[2][1], 0); + assertEquals(Double.NaN, r.radius, 0); + assertEquals(Double.NaN, r.width, 0); + assertEquals(Double.NaN, r.height, 0); + assertNull(r.regions); + assertEquals("POLYGON CARTESIAN2 1.2 -2.3 5.0 4.6 -0.89 1.0", r.toSTCS()); + + r = new Region(new RegionFunction(new StringConstant("position ICrs 1.2 -2.3"))); + assertEquals(RegionType.POSITION, r.type); + assertEquals("ICRS", r.coordSys.toSTCS()); + assertEquals(1, r.coordinates.length); + assertEquals(2, r.coordinates[0].length); + assertEquals(1.2, r.coordinates[0][0], 0); + assertEquals(-2.3, r.coordinates[0][1], 0); + assertEquals(Double.NaN, r.radius, 0); + assertEquals(Double.NaN, r.width, 0); + assertEquals(Double.NaN, r.height, 0); + assertNull(r.regions); + assertEquals("POSITION ICRS 1.2 -2.3", r.toSTCS()); + + r = new Region(new RegionFunction(new StringConstant("Union ICRS (Polygon 1 4 2 4 2 5 1 5 Polygon 3 4 4 4 4 5 3 5)"))); + assertEquals(RegionType.UNION, r.type); + assertEquals("ICRS", r.coordSys.toSTCS()); + assertNull(r.coordinates); + assertEquals(Double.NaN, r.radius, 0); + assertEquals(Double.NaN, r.width, 0); + assertEquals(Double.NaN, r.height, 0); + assertEquals(2, r.regions.length); + assertEquals("UNION ICRS (POLYGON 1.0 4.0 2.0 4.0 2.0 5.0 1.0 5.0 POLYGON 3.0 4.0 4.0 4.0 4.0 5.0 3.0 5.0)", r.toString()); + // inner region 1 + Region innerR = r.regions[0]; + assertEquals(RegionType.POLYGON, innerR.type); + assertEquals("", innerR.coordSys.toSTCS()); + assertEquals(4, innerR.coordinates.length); + assertEquals(2, innerR.coordinates[0].length); + assertEquals(1, innerR.coordinates[0][0], 0); + assertEquals(4, innerR.coordinates[0][1], 0); + assertEquals(2, innerR.coordinates[1][0], 0); + assertEquals(4, innerR.coordinates[1][1], 0); + assertEquals(2, innerR.coordinates[2][0], 0); + assertEquals(5, innerR.coordinates[2][1], 0); + assertEquals(1, innerR.coordinates[3][0], 0); + assertEquals(5, innerR.coordinates[3][1], 0); + assertEquals(Double.NaN, innerR.radius, 0); + assertEquals(Double.NaN, innerR.width, 0); + assertEquals(Double.NaN, innerR.height, 0); + assertNull(innerR.regions); + assertEquals("POLYGON 1.0 4.0 2.0 4.0 2.0 5.0 1.0 5.0", innerR.toSTCS()); + // inner region 2 + innerR = r.regions[1]; + assertEquals(RegionType.POLYGON, innerR.type); + assertEquals("", innerR.coordSys.toSTCS()); + assertEquals(4, innerR.coordinates.length); + assertEquals(2, innerR.coordinates[0].length); + assertEquals(3, innerR.coordinates[0][0], 0); + assertEquals(4, innerR.coordinates[0][1], 0); + assertEquals(4, innerR.coordinates[1][0], 0); + assertEquals(4, innerR.coordinates[1][1], 0); + assertEquals(4, innerR.coordinates[2][0], 0); + assertEquals(5, innerR.coordinates[2][1], 0); + assertEquals(3, innerR.coordinates[3][0], 0); + assertEquals(5, innerR.coordinates[3][1], 0); + assertEquals(Double.NaN, innerR.radius, 0); + assertEquals(Double.NaN, innerR.width, 0); + assertEquals(Double.NaN, innerR.height, 0); + assertNull(innerR.regions); + assertEquals("POLYGON 3.0 4.0 4.0 4.0 4.0 5.0 3.0 5.0", innerR.toSTCS()); + + r = new Region(new RegionFunction(new StringConstant("NOT(CIRCLE ICRS 1.2 -2.3 5)"))); + assertEquals(RegionType.NOT, r.type); + assertNull(r.coordSys); + assertNull(r.coordinates); + assertEquals(Double.NaN, r.radius, 0); + assertEquals(Double.NaN, r.width, 0); + assertEquals(Double.NaN, r.height, 0); + assertEquals(1, r.regions.length); + assertEquals("NOT(CIRCLE ICRS 1.2 -2.3 5.0)", r.toSTCS()); + // inner region + innerR = r.regions[0]; + assertEquals(RegionType.CIRCLE, innerR.type); + assertEquals("ICRS", innerR.coordSys.toSTCS()); + assertEquals(1, innerR.coordinates.length); + assertEquals(2, innerR.coordinates[0].length); + assertEquals(1.2, innerR.coordinates[0][0], 0); + assertEquals(-2.3, innerR.coordinates[0][1], 0); + assertEquals(5, innerR.radius, 0); + assertEquals(Double.NaN, innerR.width, 0); + assertEquals(Double.NaN, innerR.height, 0); + assertNull(innerR.regions); + assertEquals("CIRCLE ICRS 1.2 -2.3 5.0", innerR.toSTCS()); + }catch(Exception e){ + e.printStackTrace(System.err); + fail(); + } + + // Test with incorrect syntaxes: + try{ + new Region(new PointFunction(new StringConstant(""), new StringConstant("1.2"), new NegativeOperand(new NumericConstant(2.3)))); + fail("The first coordinate is a StringConstant rather than a NumericConstant!"); + }catch(Exception e){ + assertTrue(e instanceof ParseException); + assertEquals("Can not convert into STC-S a non numeric argument (including ADQLColumn and Operation)!", e.getMessage()); + } + try{ + new Region(new PointFunction(new NumericConstant(.65), new NumericConstant(1.2), new NegativeOperand(new NumericConstant(2.3)))); + fail("The coordinate system is a NumericConstant rather than a StringConstant!"); + }catch(Exception e){ + assertTrue(e instanceof ParseException); + assertEquals("A coordinate system must be a string literal: \"0.65\" is not a string operand!", e.getMessage()); + } + try{ + new Region(new PointFunction(new StringConstant(""), null, new NegativeOperand(new NumericConstant(2.3)))); + fail("The first coordinate is missing!"); + }catch(Exception e){ + assertTrue(e instanceof NullPointerException); + assertEquals("The POINT function must have non-null coordinates!", e.getMessage()); + } + try{ + new Region(new RegionFunction(new StringConstant(""))); + fail("Missing STC-S expression!"); + }catch(Exception e){ + assertTrue(e instanceof ParseException); + assertEquals("Missing STC-S expression to parse!", e.getMessage()); + } + try{ + new Region(new RegionFunction(new StringConstant("MyRegion HERE 1.2"))); + fail("Totally incorrect region type!"); + }catch(Exception e){ + assertTrue(e instanceof ParseException); + assertEquals("Unknown STC region type: \"MYREGION\"!", e.getMessage()); + } + try{ + new Region(new RegionFunction((new ADQLParser(new StringBufferInputStream("'POSITION ' || coordinateSys || ' ' || ra || ' ' || dec"))).StringExpression())); + fail("String concatenation can not be managed!"); + }catch(Exception e){ + assertTrue(e instanceof ParseException); + assertEquals("Can not convert into STC-S a non string argument (including ADQLColumn and Concatenation)!", e.getMessage()); + } + try{ + new Region(new PointFunction(new ADQLColumn("coordSys"), new NumericConstant(1), new NumericConstant(2))); + fail("Columns can not be managed!"); + }catch(Exception e){ + assertTrue(e instanceof ParseException); + assertEquals("Can not convert into STC-S a non string argument (including ADQLColumn and Concatenation)!", e.getMessage()); + } + try{ + new Region(new PointFunction(new StringConstant("ICRS"), new Operation(new NumericConstant(2), OperationType.MULT, new NumericConstant(5)), new NumericConstant(2))); + fail("Operations can not be managed!"); + }catch(Exception e){ + assertTrue(e instanceof ParseException); + assertEquals("Can not convert into STC-S a non numeric argument (including ADQLColumn and Operation)!", e.getMessage()); + } + } + + @Test + public void parseCoordSys(){ + // GOOD SYNTAXES: + try{ + CoordSys p; + + // Default coordinate system (should be then interpreted as local coordinate system): + for(String s : new String[]{null,""," "}){ + p = STCS.parseCoordSys(s); + assertEquals(Frame.UNKNOWNFRAME, p.frame); + assertEquals(RefPos.UNKNOWNREFPOS, p.refpos); + assertEquals(Flavor.SPHERICAL2, p.flavor); + assertTrue(p.isDefault()); + } + + // Just a frame: + p = STCS.parseCoordSys("ICRS"); + assertEquals(Frame.ICRS, p.frame); + assertEquals(RefPos.UNKNOWNREFPOS, p.refpos); + assertEquals(Flavor.SPHERICAL2, p.flavor); + assertFalse(p.isDefault()); + + // Just a reference position: + p = STCS.parseCoordSys("LSR"); + assertEquals(Frame.UNKNOWNFRAME, p.frame); + assertEquals(RefPos.LSR, p.refpos); + assertEquals(Flavor.SPHERICAL2, p.flavor); + assertFalse(p.isDefault()); + + // Just a flavor: + p = STCS.parseCoordSys("CARTESIAN2"); + assertEquals(Frame.UNKNOWNFRAME, p.frame); + assertEquals(RefPos.UNKNOWNREFPOS, p.refpos); + assertEquals(Flavor.CARTESIAN2, p.flavor); + assertFalse(p.isDefault()); + + // Frame + RefPos: + p = STCS.parseCoordSys("ICRS LSR"); + assertEquals(Frame.ICRS, p.frame); + assertEquals(RefPos.LSR, p.refpos); + assertEquals(Flavor.SPHERICAL2, p.flavor); + assertFalse(p.isDefault()); + + // Frame + Flavor: + p = STCS.parseCoordSys("ICRS SPHERICAL2"); + assertEquals(Frame.ICRS, p.frame); + assertEquals(RefPos.UNKNOWNREFPOS, p.refpos); + assertEquals(Flavor.SPHERICAL2, p.flavor); + assertFalse(p.isDefault()); + + // RefPos + Flavor: + p = STCS.parseCoordSys("HELIOCENTER SPHERICAL2"); + assertEquals(Frame.UNKNOWNFRAME, p.frame); + assertEquals(RefPos.HELIOCENTER, p.refpos); + assertEquals(Flavor.SPHERICAL2, p.flavor); + assertFalse(p.isDefault()); + + // Frame + RefPos + Flavor + p = STCS.parseCoordSys("ICRS GEOCENTER SPHERICAL2"); + assertEquals(Frame.ICRS, p.frame); + assertEquals(RefPos.GEOCENTER, p.refpos); + assertEquals(Flavor.SPHERICAL2, p.flavor); + assertFalse(p.isDefault()); + + // Lets try in a different case: + p = STCS.parseCoordSys("icrs Geocenter SpheriCAL2"); + assertEquals(Frame.ICRS, p.frame); + assertEquals(RefPos.GEOCENTER, p.refpos); + assertEquals(Flavor.SPHERICAL2, p.flavor); + assertFalse(p.isDefault()); + }catch(Exception e){ + e.printStackTrace(System.err); + fail(); + } + + // WRONG SYNTAXES: + try{ + STCS.parseCoordSys("HOME"); + fail(); + }catch(Exception e){ + assertTrue(e instanceof ParseException); + assertEquals("Incorrect syntax: \"HOME\" was unexpected! Expected syntax: \"[(ECLIPTIC|FK4|FK5|GALACTIC|ICRS|UNKNOWNFRAME)] [(BARYCENTER|GEOCENTER|HELIOCENTER|LSR|TOPOCENTER|RELOCATABLE|UNKNOWNREFPOS)] [(CARTESIAN2|CARTESIAN3|SPHERICAL2)]\" ; an empty string is also allowed and will be interpreted as the coordinate system locally used.", e.getMessage()); + } + + // With wrong reference position: + try{ + STCS.parseCoordSys("ICRS HOME SPHERICAL2"); + fail(); + }catch(Exception e){ + assertTrue(e instanceof ParseException); + assertEquals("Incorrect syntax: \"HOME SPHERICAL2\" was unexpected! Expected syntax: \"[(ECLIPTIC|FK4|FK5|GALACTIC|ICRS|UNKNOWNFRAME)] [(BARYCENTER|GEOCENTER|HELIOCENTER|LSR|TOPOCENTER|RELOCATABLE|UNKNOWNREFPOS)] [(CARTESIAN2|CARTESIAN3|SPHERICAL2)]\" ; an empty string is also allowed and will be interpreted as the coordinate system locally used.", e.getMessage()); + } + + // With a cartesian flavor: + try{ + STCS.parseCoordSys("ICRS CARTESIAN2"); + fail(); + }catch(Exception e){ + assertTrue(e instanceof ParseException); + assertEquals("a coordinate system expressed with a cartesian flavor MUST have an UNKNOWNFRAME and UNKNOWNREFPOS!", e.getMessage()); + } + try{ + STCS.parseCoordSys("LSR CARTESIAN3"); + fail(); + }catch(Exception e){ + assertTrue(e instanceof ParseException); + assertEquals("a coordinate system expressed with a cartesian flavor MUST have an UNKNOWNFRAME and UNKNOWNREFPOS!", e.getMessage()); + } + try{ + CoordSys p = STCS.parseCoordSys("CARTESIAN2"); + assertEquals(Frame.UNKNOWNFRAME, p.frame); + assertEquals(RefPos.UNKNOWNREFPOS, p.refpos); + assertEquals(Flavor.CARTESIAN2, p.flavor); + + p = STCS.parseCoordSys("CARTESIAN3"); + assertEquals(Frame.UNKNOWNFRAME, p.frame); + assertEquals(RefPos.UNKNOWNREFPOS, p.refpos); + assertEquals(Flavor.CARTESIAN3, p.flavor); + }catch(Exception e){ + e.printStackTrace(System.err); + fail(); + } + + // Without spaces: + try{ + STCS.parseCoordSys("icrsGeocentercarteSIAN2"); + fail(); + }catch(Exception e){ + assertTrue(e instanceof ParseException); + assertEquals("Incorrect syntax: \"icrsGeocentercarteSIAN2\" was unexpected! Expected syntax: \"[(ECLIPTIC|FK4|FK5|GALACTIC|ICRS|UNKNOWNFRAME)] [(BARYCENTER|GEOCENTER|HELIOCENTER|LSR|TOPOCENTER|RELOCATABLE|UNKNOWNREFPOS)] [(CARTESIAN2|CARTESIAN3|SPHERICAL2)]\" ; an empty string is also allowed and will be interpreted as the coordinate system locally used.", e.getMessage()); + } + } + + @Test + public void serializeCoordSys(){ + try{ + assertEquals("", STCS.toSTCS((CoordSys)null)); + + assertEquals("", STCS.toSTCS(new CoordSys())); + + assertEquals("", STCS.toSTCS(new CoordSys(null, null, null))); + assertEquals("", STCS.toSTCS(new CoordSys(Frame.DEFAULT, RefPos.DEFAULT, Flavor.DEFAULT))); + assertEquals("", STCS.toSTCS(new CoordSys(Frame.UNKNOWNFRAME, RefPos.UNKNOWNREFPOS, Flavor.SPHERICAL2))); + + assertEquals("", STCS.toSTCS(new CoordSys(null))); + assertEquals("", STCS.toSTCS(new CoordSys(""))); + assertEquals("", STCS.toSTCS(new CoordSys(" \n\r"))); + + assertEquals("ICRS", STCS.toSTCS(new CoordSys(Frame.ICRS, null, null))); + assertEquals("ICRS", STCS.toSTCS(new CoordSys(Frame.ICRS, RefPos.DEFAULT, Flavor.DEFAULT))); + assertEquals("ICRS", STCS.toSTCS(new CoordSys(Frame.ICRS, RefPos.UNKNOWNREFPOS, Flavor.SPHERICAL2))); + + assertEquals("GEOCENTER", STCS.toSTCS(new CoordSys(null, RefPos.GEOCENTER, null))); + assertEquals("GEOCENTER", STCS.toSTCS(new CoordSys(Frame.DEFAULT, RefPos.GEOCENTER, Flavor.DEFAULT))); + assertEquals("GEOCENTER", STCS.toSTCS(new CoordSys(Frame.UNKNOWNFRAME, RefPos.GEOCENTER, Flavor.SPHERICAL2))); + + assertEquals("CARTESIAN3", STCS.toSTCS(new CoordSys(null, null, Flavor.CARTESIAN3))); + assertEquals("CARTESIAN3", STCS.toSTCS(new CoordSys(Frame.DEFAULT, RefPos.UNKNOWNREFPOS, Flavor.CARTESIAN3))); + assertEquals("CARTESIAN3", STCS.toSTCS(new CoordSys(Frame.UNKNOWNFRAME, RefPos.UNKNOWNREFPOS, Flavor.CARTESIAN3))); + + assertEquals("ICRS GEOCENTER", STCS.toSTCS(new CoordSys(Frame.ICRS, RefPos.GEOCENTER, null))); + assertEquals("ICRS GEOCENTER", STCS.toSTCS(new CoordSys(Frame.ICRS, RefPos.GEOCENTER, Flavor.DEFAULT))); + + assertEquals("UNKNOWNFRAME UNKNOWNREFPOS SPHERICAL2", new CoordSys().toFullSTCS()); + assertEquals("UNKNOWNFRAME UNKNOWNREFPOS SPHERICAL2", new CoordSys("").toFullSTCS()); + assertEquals("UNKNOWNFRAME UNKNOWNREFPOS SPHERICAL2", new CoordSys(null).toFullSTCS()); + assertEquals("UNKNOWNFRAME UNKNOWNREFPOS SPHERICAL2", new CoordSys(" \n\t").toFullSTCS()); + assertEquals("UNKNOWNFRAME UNKNOWNREFPOS SPHERICAL2", new CoordSys(null, null, null).toFullSTCS()); + assertEquals("UNKNOWNFRAME UNKNOWNREFPOS SPHERICAL2", new CoordSys(Frame.DEFAULT, RefPos.DEFAULT, Flavor.DEFAULT).toFullSTCS()); + assertEquals("ICRS UNKNOWNREFPOS SPHERICAL2", new CoordSys(Frame.ICRS, null, null).toFullSTCS()); + assertEquals("ICRS UNKNOWNREFPOS SPHERICAL2", new CoordSys(Frame.ICRS, RefPos.DEFAULT, Flavor.DEFAULT).toFullSTCS()); + assertEquals("UNKNOWNFRAME GEOCENTER SPHERICAL2", new CoordSys(Frame.UNKNOWNFRAME, RefPos.GEOCENTER, Flavor.DEFAULT).toFullSTCS()); + assertEquals("UNKNOWNFRAME UNKNOWNREFPOS CARTESIAN3", new CoordSys(Frame.DEFAULT, RefPos.DEFAULT, Flavor.CARTESIAN3).toFullSTCS()); + assertEquals("ICRS GEOCENTER SPHERICAL2", new CoordSys(Frame.ICRS, RefPos.GEOCENTER, Flavor.DEFAULT).toFullSTCS()); + }catch(ParseException pe){ + pe.printStackTrace(System.err); + fail(); + } + } + + @Test + public void parseRegion(){ + // TESTS WITH NO STC-S: + try{ + STCS.parseRegion(null); + fail(); + }catch(Exception e){ + assertTrue(e instanceof ParseException); + assertEquals("Missing STC-S expression to parse!", e.getMessage()); + } + try{ + STCS.parseRegion(""); + fail(); + }catch(Exception e){ + assertTrue(e instanceof ParseException); + assertEquals("Missing STC-S expression to parse!", e.getMessage()); + } + try{ + STCS.parseRegion(" \n\r"); + fail(); + }catch(Exception e){ + assertTrue(e instanceof ParseException); + assertEquals("Missing STC-S expression to parse!", e.getMessage()); + } + + // TESTS WITH A VALID EXPRESSION, EACH OF A DIFFERENT REGION TYPE: + String[] expressions = new String[]{" Position GALACTIC 10 20","Circle ICRS GEOCENTER 10 20 0.5 ","BOX cartesian2 3 3 2 2","Polygon 1 4 2 4 2 5 1 5","Union ICRS (Polygon 1 4 2 4 2 5 1 5 Polygon 3 4 4 4 4 5 3 5)","INTERSECTION ICRS (Polygon 1 4 2 4 2 5 1 5 Polygon 3 4 4 4 4 5 3 5)","NOT(Circle ICRS GEOCENTER 10 20 0.5)"}; + try{ + for(String e : expressions) + STCS.parseRegion(e); + }catch(Exception ex){ + ex.printStackTrace(System.err); + fail(); + } + + // TEST WITH A MISSING PARAMETER: + expressions = new String[]{" Position GALACTIC 10 ","BOX cartesian2 3 3 2","NOT()"}; + for(String e : expressions){ + try{ + STCS.parseRegion(e); + fail(); + }catch(Exception ex){ + assertTrue(ex instanceof ParseException); + assertTrue(ex.getMessage().startsWith("Unexpected End Of Expression! Expected syntax: \"")); + } + } + + // TEST WITH A WRONG COORDINATE SYSTEM (since it is optional in all these expressions, it will be considered as a coordinate...which is of course, not the case): + try{ + STCS.parseRegion("Circle HERE 10 20 0.5 "); + fail(); + }catch(Exception ex){ + assertTrue(ex instanceof ParseException); + assertTrue(ex.getMessage().startsWith("Incorrect syntax: a coordinates pair (2 numerics separated by one or more spaces) was expected! Expected syntax: \"CIRCLE \", where coordPair=\" \", radius=\"\" and coordSys=\"[(ECLIPTIC|FK4|FK5|GALACTIC|ICRS|UNKNOWNFRAME)] [(BARYCENTER|GEOCENTER|HELIOCENTER|LSR|TOPOCENTER|RELOCATABLE|UNKNOWNREFPOS)] [(CARTESIAN2|CARTESIAN3|SPHERICAL2)]\" ; an empty string is also allowed and will be interpreted as the coordinate system locally used.")); + } + + // TEST WITH EITHER A WRONG NUMERIC (L in lower case instead of 1) OR A MISSING OPENING PARENTHESIS: + expressions = new String[]{"Polygon 1 4 2 4 2 5 l 5","Union ICRS Polygon 1 4 2 4 2 5 1 5 Polygon 3 4 4 4 4 5 3 5)"}; + for(String e : expressions){ + try{ + STCS.parseRegion(e); + fail(); + }catch(Exception ex){ + assertTrue(ex instanceof ParseException); + assertTrue(ex.getMessage().startsWith("Incorrect syntax: ")); + } + } + } +} diff --git a/test/adql/parser/TestADQLParser.java b/test/adql/parser/TestADQLParser.java new file mode 100644 index 0000000000000000000000000000000000000000..10837d51c98c84a26d562efc5d48ccd053c588b8 --- /dev/null +++ b/test/adql/parser/TestADQLParser.java @@ -0,0 +1,43 @@ +package adql.parser; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.fail; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import adql.query.ADQLQuery; +import adql.query.operand.StringConstant; + +public class TestADQLParser { + + @BeforeClass + public static void setUpBeforeClass() throws Exception{} + + @AfterClass + public static void tearDownAfterClass() throws Exception{} + + @Before + public void setUp() throws Exception{} + + @After + public void tearDown() throws Exception{} + + @Test + public void test(){ + ADQLParser parser = new ADQLParser(); + try{ + ADQLQuery query = parser.parseQuery("SELECT 'truc''machin' 'bidule' -- why not a comment now ^^\n'FIN' FROM foo;"); + assertNotNull(query); + assertEquals("truc'machinbiduleFIN", ((StringConstant)(query.getSelect().get(0).getOperand())).getValue()); + assertEquals("'truc''machinbiduleFIN'", query.getSelect().get(0).getOperand().toADQL()); + }catch(Exception ex){ + fail("String litteral concatenation is perfectly legal according to the ADQL standard."); + } + } + +} diff --git a/test/adql/query/from/TestCrossJoin.java b/test/adql/query/from/TestCrossJoin.java new file mode 100644 index 0000000000000000000000000000000000000000..ce440bcb13f3dc44ef1032182a4c5c50c9fd236a --- /dev/null +++ b/test/adql/query/from/TestCrossJoin.java @@ -0,0 +1,95 @@ +package adql.query.from; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.fail; + +import java.util.List; + +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.Test; + +import adql.db.DBColumn; +import adql.db.DBType; +import adql.db.DBType.DBDatatype; +import adql.db.DefaultDBColumn; +import adql.db.DefaultDBTable; +import adql.db.SearchColumnList; +import adql.query.IdentifierField; + +public class TestCrossJoin { + + private ADQLTable tableA, tableB; + + @AfterClass + public static void tearDownAfterClass() throws Exception{} + + @Before + public void setUp() throws Exception{ + /* SET THE TABLES AND COLUMNS NEEDED FOR THE TEST */ + // Describe the available table: + DefaultDBTable metaTableA = new DefaultDBTable("A"); + metaTableA.setADQLSchemaName("public"); + DefaultDBTable metaTableB = new DefaultDBTable("B"); + metaTableB.setADQLSchemaName("public"); + + // Describe its columns: + metaTableA.addColumn(new DefaultDBColumn("id", new DBType(DBDatatype.VARCHAR), metaTableA)); + metaTableA.addColumn(new DefaultDBColumn("txta", new DBType(DBDatatype.VARCHAR), metaTableA)); + metaTableB.addColumn(new DefaultDBColumn("id", new DBType(DBDatatype.VARCHAR), metaTableB)); + metaTableB.addColumn(new DefaultDBColumn("txtb", new DBType(DBDatatype.VARCHAR), metaTableB)); + + // Build the ADQL tables: + tableA = new ADQLTable("A"); + tableA.setDBLink(metaTableA); + tableB = new ADQLTable("B"); + tableB.setDBLink(metaTableB); + } + + @Test + public void testGetDBColumns(){ + try{ + ADQLJoin join = new CrossJoin(tableA, tableB); + SearchColumnList joinColumns = join.getDBColumns(); + assertEquals(4, joinColumns.size()); + + // check column A.id and B.id + List lstFound = joinColumns.search(null, null, null, "id", IdentifierField.getFullCaseSensitive(true)); + assertEquals(2, lstFound.size()); + // A.id + assertNotNull(lstFound.get(0).getTable()); + assertEquals("A", lstFound.get(0).getTable().getADQLName()); + assertEquals("public", lstFound.get(0).getTable().getADQLSchemaName()); + assertEquals(1, joinColumns.search(null, "public", "A", "id", IdentifierField.getFullCaseSensitive(true)).size()); + // B.id + assertNotNull(lstFound.get(1).getTable()); + assertEquals("B", lstFound.get(1).getTable().getADQLName()); + assertEquals("public", lstFound.get(1).getTable().getADQLSchemaName()); + assertEquals(1, joinColumns.search(null, "public", "B", "id", IdentifierField.getFullCaseSensitive(true)).size()); + assertEquals(0, joinColumns.search(null, "public", "C", "id", IdentifierField.getFullCaseSensitive(true)).size()); + + // check column A.txta + lstFound = joinColumns.search(null, null, null, "txta", IdentifierField.getFullCaseSensitive(true)); + assertEquals(1, lstFound.size()); + assertNotNull(lstFound.get(0).getTable()); + assertEquals("A", lstFound.get(0).getTable().getADQLName()); + assertEquals("public", lstFound.get(0).getTable().getADQLSchemaName()); + assertEquals(1, joinColumns.search(null, "public", "A", "txta", IdentifierField.getFullCaseSensitive(true)).size()); + assertEquals(0, joinColumns.search(null, "public", "B", "txta", IdentifierField.getFullCaseSensitive(true)).size()); + + // check column B.txtb + lstFound = joinColumns.search(null, null, null, "txtb", IdentifierField.getFullCaseSensitive(true)); + assertEquals(1, lstFound.size()); + assertNotNull(lstFound.get(0).getTable()); + assertEquals("B", lstFound.get(0).getTable().getADQLName()); + assertEquals("public", lstFound.get(0).getTable().getADQLSchemaName()); + assertEquals(1, joinColumns.search(null, "public", "B", "txtb", IdentifierField.getFullCaseSensitive(true)).size()); + assertEquals(0, joinColumns.search(null, "public", "A", "txtb", IdentifierField.getFullCaseSensitive(true)).size()); + + }catch(Exception ex){ + ex.printStackTrace(); + fail("This test should have succeeded!"); + } + } +} diff --git a/test/adql/query/from/TestInnerJoin.java b/test/adql/query/from/TestInnerJoin.java new file mode 100644 index 0000000000000000000000000000000000000000..57c4f87edaf197d4a3df74b1925dd4c542735347 --- /dev/null +++ b/test/adql/query/from/TestInnerJoin.java @@ -0,0 +1,158 @@ +package adql.query.from; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.fail; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.Before; +import org.junit.Test; + +import adql.db.DBColumn; +import adql.db.DBCommonColumn; +import adql.db.DBType; +import adql.db.DBType.DBDatatype; +import adql.db.DefaultDBColumn; +import adql.db.DefaultDBTable; +import adql.db.SearchColumnList; +import adql.query.IdentifierField; +import adql.query.operand.ADQLColumn; + +public class TestInnerJoin { + + private ADQLTable tableA, tableB, tableC; + + @Before + public void setUp() throws Exception{ + /* SET THE TABLES AND COLUMNS NEEDED FOR THE TEST */ + // Describe the available table: + DefaultDBTable metaTableA = new DefaultDBTable("A"); + metaTableA.setADQLSchemaName("public"); + DefaultDBTable metaTableB = new DefaultDBTable("B"); + metaTableB.setADQLSchemaName("public"); + DefaultDBTable metaTableC = new DefaultDBTable("C"); + metaTableC.setADQLSchemaName("public"); + + // Describe its columns: + metaTableA.addColumn(new DefaultDBColumn("id", new DBType(DBDatatype.VARCHAR), metaTableA)); + metaTableA.addColumn(new DefaultDBColumn("txta", new DBType(DBDatatype.VARCHAR), metaTableA)); + metaTableB.addColumn(new DefaultDBColumn("id", new DBType(DBDatatype.VARCHAR), metaTableB)); + metaTableB.addColumn(new DefaultDBColumn("txtb", new DBType(DBDatatype.VARCHAR), metaTableB)); + metaTableC.addColumn(new DefaultDBColumn("Id", new DBType(DBDatatype.VARCHAR), metaTableC)); + metaTableC.addColumn(new DefaultDBColumn("txta", new DBType(DBDatatype.VARCHAR), metaTableC)); + metaTableC.addColumn(new DefaultDBColumn("txtc", new DBType(DBDatatype.VARCHAR), metaTableC)); + + // Build the ADQL tables: + tableA = new ADQLTable("A"); + tableA.setDBLink(metaTableA); + tableB = new ADQLTable("B"); + tableB.setDBLink(metaTableB); + tableC = new ADQLTable("C"); + tableC.setDBLink(metaTableC); + } + + @Test + public void testGetDBColumns(){ + // Test NATURAL JOIN 1: + try{ + ADQLJoin join = new InnerJoin(tableA, tableB); + SearchColumnList joinColumns = join.getDBColumns(); + assertEquals(3, joinColumns.size()); + List lstFound = joinColumns.search(null, null, null, "id", IdentifierField.getFullCaseSensitive(true)); + assertEquals(1, lstFound.size()); + assertEquals(DBCommonColumn.class, lstFound.get(0).getClass()); + assertEquals(1, joinColumns.search(null, "public", "A", "id", IdentifierField.getFullCaseSensitive(true)).size()); + assertEquals(1, joinColumns.search(null, "public", "B", "id", IdentifierField.getFullCaseSensitive(true)).size()); + assertEquals(0, joinColumns.search(null, "public", "C", "id", IdentifierField.getFullCaseSensitive(true)).size()); + lstFound = joinColumns.search(null, "public", "A", "txta", IdentifierField.getFullCaseSensitive(true)); + assertEquals(1, lstFound.size()); + lstFound = joinColumns.search(null, "public", "B", "txtb", IdentifierField.getFullCaseSensitive(true)); + assertEquals(1, lstFound.size()); + }catch(Exception ex){ + ex.printStackTrace(); + fail("This test should have succeeded!"); + } + + // Test NATURAL JOIN 2: + try{ + ADQLJoin join = new InnerJoin(tableA, tableC); + SearchColumnList joinColumns = join.getDBColumns(); + assertEquals(3, joinColumns.size()); + + // check id (column common to table A and C only): + List lstFound = joinColumns.search(null, null, null, "id", IdentifierField.getFullCaseSensitive(true)); + assertEquals(1, lstFound.size()); + assertEquals(DBCommonColumn.class, lstFound.get(0).getClass()); + assertEquals(1, joinColumns.search(null, "public", "A", "id", IdentifierField.getFullCaseSensitive(true)).size()); + assertEquals(1, joinColumns.search(null, "public", "C", "id", IdentifierField.getFullCaseSensitive(true)).size()); + assertEquals(0, joinColumns.search(null, "public", "B", "id", IdentifierField.getFullCaseSensitive(true)).size()); + + // check txta (column common to table A and C only): + lstFound = joinColumns.search(null, null, null, "txta", IdentifierField.getFullCaseSensitive(true)); + assertEquals(1, lstFound.size()); + assertEquals(DBCommonColumn.class, lstFound.get(0).getClass()); + assertEquals(1, joinColumns.search(null, "public", "A", "txta", IdentifierField.getFullCaseSensitive(true)).size()); + assertEquals(1, joinColumns.search(null, "public", "C", "txta", IdentifierField.getFullCaseSensitive(true)).size()); + assertEquals(0, joinColumns.search(null, "public", "B", "id", IdentifierField.getFullCaseSensitive(true)).size()); + + // check txtc (only for table C) + lstFound = joinColumns.search(null, null, null, "txtc", IdentifierField.getFullCaseSensitive(true)); + assertEquals(1, lstFound.size()); + assertNotNull(lstFound.get(0).getTable()); + assertEquals("C", lstFound.get(0).getTable().getADQLName()); + assertEquals("public", lstFound.get(0).getTable().getADQLSchemaName()); + + }catch(Exception ex){ + ex.printStackTrace(); + fail("This test should have succeeded!"); + } + + // Test with a USING("id"): + try{ + List usingList = new ArrayList(1); + usingList.add(new ADQLColumn("id")); + ADQLJoin join = new InnerJoin(tableA, tableC, usingList); + SearchColumnList joinColumns = join.getDBColumns(); + assertEquals(4, joinColumns.size()); + + // check id (column common to table A and C only): + List lstFound = joinColumns.search(null, null, null, "id", IdentifierField.getFullCaseSensitive(true)); + assertEquals(1, lstFound.size()); + assertEquals(DBCommonColumn.class, lstFound.get(0).getClass()); + assertEquals(1, joinColumns.search(null, "public", "A", "id", IdentifierField.getFullCaseSensitive(true)).size()); + assertEquals(1, joinColumns.search(null, "public", "C", "id", IdentifierField.getFullCaseSensitive(true)).size()); + assertEquals(0, joinColumns.search(null, "public", "B", "id", IdentifierField.getFullCaseSensitive(true)).size()); + + // check A.txta and C.txta: + lstFound = joinColumns.search(null, null, null, "txta", IdentifierField.getFullCaseSensitive(true)); + assertEquals(2, lstFound.size()); + // A.txta + assertNotNull(lstFound.get(0).getTable()); + assertEquals("A", lstFound.get(0).getTable().getADQLName()); + assertEquals("public", lstFound.get(0).getTable().getADQLSchemaName()); + assertEquals(1, joinColumns.search(null, "public", "A", "txta", IdentifierField.getFullCaseSensitive(true)).size()); + // C.txta + assertNotNull(lstFound.get(1).getTable()); + assertEquals("C", lstFound.get(1).getTable().getADQLName()); + assertEquals("public", lstFound.get(1).getTable().getADQLSchemaName()); + assertEquals(1, joinColumns.search(null, "public", "C", "txta", IdentifierField.getFullCaseSensitive(true)).size()); + assertEquals(0, joinColumns.search(null, "public", "B", "txta", IdentifierField.getFullCaseSensitive(true)).size()); + + // check txtc (only for table C): + lstFound = joinColumns.search(null, null, null, "txtc", IdentifierField.getFullCaseSensitive(true)); + assertEquals(1, lstFound.size()); + assertNotNull(lstFound.get(0).getTable()); + assertEquals("C", lstFound.get(0).getTable().getADQLName()); + assertEquals("public", lstFound.get(0).getTable().getADQLSchemaName()); + assertEquals(1, joinColumns.search(null, "public", "C", "txtc", IdentifierField.getFullCaseSensitive(true)).size()); + assertEquals(0, joinColumns.search(null, "public", "A", "txtc", IdentifierField.getFullCaseSensitive(true)).size()); + + }catch(Exception ex){ + ex.printStackTrace(); + fail("This test should have succeeded!"); + } + } + +} diff --git a/test/adql/translator/TestPgSphereTranslator.java b/test/adql/translator/TestPgSphereTranslator.java new file mode 100644 index 0000000000000000000000000000000000000000..2f34471fc7be78303a7945fee526c63fb425e71f --- /dev/null +++ b/test/adql/translator/TestPgSphereTranslator.java @@ -0,0 +1,332 @@ +package adql.translator; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.sql.Types; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.postgresql.util.PGobject; + +import adql.db.DBType; +import adql.db.DBType.DBDatatype; +import adql.db.STCS.Region; +import adql.parser.ParseException; + +public class TestPgSphereTranslator { + + @BeforeClass + public static void setUpBeforeClass() throws Exception{} + + @AfterClass + public static void tearDownAfterClass() throws Exception{} + + @Before + public void setUp() throws Exception{} + + @After + public void tearDown() throws Exception{} + + @Test + public void testConvertTypeFromDB(){ + PgSphereTranslator translator = new PgSphereTranslator(); + + // POINT + DBType type = translator.convertTypeFromDB(Types.OTHER, "spoint", "spoint", null); + assertNotNull(type); + assertEquals(DBDatatype.POINT, type.type); + assertEquals(DBType.NO_LENGTH, type.length); + + // CIRCLE + type = translator.convertTypeFromDB(Types.OTHER, "scircle", "scircle", null); + assertNotNull(type); + assertEquals(DBDatatype.REGION, type.type); + assertEquals(DBType.NO_LENGTH, type.length); + + // BOX + type = translator.convertTypeFromDB(Types.OTHER, "sbox", "sbox", null); + assertNotNull(type); + assertEquals(DBDatatype.REGION, type.type); + assertEquals(DBType.NO_LENGTH, type.length); + + // POLYGON + type = translator.convertTypeFromDB(Types.OTHER, "spoly", "spoly", null); + assertNotNull(type); + assertEquals(DBDatatype.REGION, type.type); + assertEquals(DBType.NO_LENGTH, type.length); + } + + @Test + public void testConvertTypeToDB(){ + PgSphereTranslator translator = new PgSphereTranslator(); + + // NULL + assertEquals("VARCHAR", translator.convertTypeToDB(null)); + + // POINT + assertEquals("spoint", translator.convertTypeToDB(new DBType(DBDatatype.POINT))); + + // REGION (any other region is transformed into a polygon) + assertEquals("spoly", translator.convertTypeToDB(new DBType(DBDatatype.REGION))); + } + + @Test + public void testTranslateGeometryFromDB(){ + PgSphereTranslator translator = new PgSphereTranslator(); + PGobject pgo = new PGobject(); + + // NULL + try{ + assertNull(translator.translateGeometryFromDB(null)); + }catch(Throwable t){ + t.printStackTrace(); + fail(t.getMessage()); + } + + // SPOINT + try{ + pgo.setType("spoint"); + pgo.setValue("(0.1 , 0.2)"); + Region r = translator.translateGeometryFromDB(pgo); + assertEquals(5.72957, r.coordinates[0][0], 1e-5); + assertEquals(11.45915, r.coordinates[0][1], 1e-5); + + pgo.setValue("(5.72957d , 11.45915d)"); + r = translator.translateGeometryFromDB(pgo); + assertEquals(5.72957, r.coordinates[0][0], 1e-5); + assertEquals(11.45915, r.coordinates[0][1], 1e-5); + + pgo.setValue("( 5d 43m 46.480625s , +11d 27m 32.961249s)"); + r = translator.translateGeometryFromDB(pgo); + assertEquals(5.72957, r.coordinates[0][0], 1e-5); + assertEquals(11.45915, r.coordinates[0][1], 1e-5); + + pgo.setValue("( 0h 22m 55.098708s , +11d 27m 32.961249s)"); + r = translator.translateGeometryFromDB(pgo); + assertEquals(5.72957, r.coordinates[0][0], 1e-5); + assertEquals(11.45915, r.coordinates[0][1], 1e-5); + }catch(Throwable t){ + t.printStackTrace(); + fail(t.getMessage()); + } + + // SCIRCLE + try{ + pgo.setType("scircle"); + pgo.setValue("<(0.1,-0.2),1>"); + Region r = translator.translateGeometryFromDB(pgo); + assertEquals(5.72957, r.coordinates[0][0], 1e-5); + assertEquals(-11.45915, r.coordinates[0][1], 1e-5); + assertEquals(57.29577, r.radius, 1e-5); + + pgo.setValue("<(5.72957d , -11.45915d) , 57.29577d>"); + r = translator.translateGeometryFromDB(pgo); + assertEquals(5.72957, r.coordinates[0][0], 1e-5); + assertEquals(-11.45915, r.coordinates[0][1], 1e-5); + assertEquals(57.29577, r.radius, 1e-5); + + pgo.setValue("<( 5d 43m 46.452s , -11d 27m 32.94s) , 57d 17m 44.772s>"); + r = translator.translateGeometryFromDB(pgo); + assertEquals(5.72957, r.coordinates[0][0], 1e-5); + assertEquals(-11.45915, r.coordinates[0][1], 1e-5); + assertEquals(57.29577, r.radius, 1e-5); + + pgo.setValue("<( 0h 22m 55.0968s , -11d 27m 32.94s) , 57d 17m 44.772s>"); + r = translator.translateGeometryFromDB(pgo); + assertEquals(5.72957, r.coordinates[0][0], 1e-5); + assertEquals(-11.45915, r.coordinates[0][1], 1e-5); + assertEquals(57.29577, r.radius, 1e-5); + }catch(Throwable t){ + t.printStackTrace(); + fail(t.getMessage()); + } + + // SBOX + try{ + pgo.setType("sbox"); + pgo.setValue("((0.1,0.2),(0.5,0.5))"); + Region r = translator.translateGeometryFromDB(pgo); + assertEquals(17.18873, r.coordinates[0][0], 1e-5); + assertEquals(20.05352, r.coordinates[0][1], 1e-5); + assertEquals(22.91831, r.width, 1e-5); + assertEquals(17.18873, r.height, 1e-5); + + pgo.setValue("((5.72957795130823d , 11.4591559026165d), (28.6478897565412d , 28.6478897565412d))"); + r = translator.translateGeometryFromDB(pgo); + assertEquals(17.18873, r.coordinates[0][0], 1e-5); + assertEquals(20.05352, r.coordinates[0][1], 1e-5); + assertEquals(22.91831, r.width, 1e-5); + assertEquals(17.18873, r.height, 1e-5); + + pgo.setValue("(( 5d 43m 46.480625s , +11d 27m 32.961249s), ( 28d 38m 52.403124s , +28d 38m 52.403124s))"); + r = translator.translateGeometryFromDB(pgo); + assertEquals(17.18873, r.coordinates[0][0], 1e-5); + assertEquals(20.05352, r.coordinates[0][1], 1e-5); + assertEquals(22.91831, r.width, 1e-5); + assertEquals(17.18873, r.height, 1e-5); + + pgo.setValue("(( 0h 22m 55.098708s , +11d 27m 32.961249s), ( 1h 54m 35.493542s , +28d 38m 52.403124s))"); + r = translator.translateGeometryFromDB(pgo); + assertEquals(17.18873, r.coordinates[0][0], 1e-5); + assertEquals(20.05352, r.coordinates[0][1], 1e-5); + assertEquals(22.91831, r.width, 1e-5); + assertEquals(17.18873, r.height, 1e-5); + }catch(Throwable t){ + t.printStackTrace(); + fail(t.getMessage()); + } + + // SPOLY + try{ + pgo.setType("spoly"); + pgo.setValue("{(0.789761486527434 , 0.00436332312998582),(0.789761486527434 , 0.00872664625997165),(0.785398163397448 , 0.00872664625997165),(0.785398163397448 , 0.00436332312998582),(0.781034840267463 , 0.00436332312998582),(0.781034840267463 , 0),(0.785398163397448 , 0)}"); + Region r = translator.translateGeometryFromDB(pgo); + assertEquals(45.25, r.coordinates[0][0], 1e-2); + assertEquals(0.25, r.coordinates[0][1], 1e-2); + assertEquals(45.25, r.coordinates[1][0], 1e-2); + assertEquals(0.5, r.coordinates[1][1], 1e-2); + assertEquals(45, r.coordinates[2][0], 1e-2); + assertEquals(0.5, r.coordinates[2][1], 1e-2); + assertEquals(45, r.coordinates[3][0], 1e-2); + assertEquals(0.25, r.coordinates[3][1], 1e-2); + assertEquals(44.75, r.coordinates[4][0], 1e-2); + assertEquals(0.25, r.coordinates[4][1], 1e-2); + assertEquals(44.75, r.coordinates[5][0], 1e-2); + assertEquals(0, r.coordinates[5][1], 1e-2); + assertEquals(45, r.coordinates[6][0], 1e-2); + assertEquals(0, r.coordinates[6][1], 1e-2); + + pgo.setValue("{(45.25d , 0.25d), (45.25d , 0.5d), (45d , 0.5d), (45d , 0.25d), (44.75d , 0.25d), (44.75d , 0d), (45d , 0d)}"); + r = translator.translateGeometryFromDB(pgo); + assertEquals(45.25, r.coordinates[0][0], 1e-2); + assertEquals(0.25, r.coordinates[0][1], 1e-2); + assertEquals(45.25, r.coordinates[1][0], 1e-2); + assertEquals(0.5, r.coordinates[1][1], 1e-2); + assertEquals(45, r.coordinates[2][0], 1e-2); + assertEquals(0.5, r.coordinates[2][1], 1e-2); + assertEquals(45, r.coordinates[3][0], 1e-2); + assertEquals(0.25, r.coordinates[3][1], 1e-2); + assertEquals(44.75, r.coordinates[4][0], 1e-2); + assertEquals(0.25, r.coordinates[4][1], 1e-2); + assertEquals(44.75, r.coordinates[5][0], 1e-2); + assertEquals(0, r.coordinates[5][1], 1e-2); + assertEquals(45, r.coordinates[6][0], 1e-2); + assertEquals(0, r.coordinates[6][1], 1e-2); + + pgo.setValue("{( 45d 15m 0s , + 0d 15m 0s),( 45d 15m 0s , + 0d 30m 0s),( 45d 0m 0s , + 0d 30m 0s),( 45d 0m 0s , + 0d 15m 0s),( 44d 45m 0s , + 0d 15m 0s),( 44d 45m 0s , + 0d 0m 0s),( 45d 0m 0s , + 0d 0m 0s)}"); + r = translator.translateGeometryFromDB(pgo); + assertEquals(45.25, r.coordinates[0][0], 1e-2); + assertEquals(0.25, r.coordinates[0][1], 1e-2); + assertEquals(45.25, r.coordinates[1][0], 1e-2); + assertEquals(0.5, r.coordinates[1][1], 1e-2); + assertEquals(45, r.coordinates[2][0], 1e-2); + assertEquals(0.5, r.coordinates[2][1], 1e-2); + assertEquals(45, r.coordinates[3][0], 1e-2); + assertEquals(0.25, r.coordinates[3][1], 1e-2); + assertEquals(44.75, r.coordinates[4][0], 1e-2); + assertEquals(0.25, r.coordinates[4][1], 1e-2); + assertEquals(44.75, r.coordinates[5][0], 1e-2); + assertEquals(0, r.coordinates[5][1], 1e-2); + assertEquals(45, r.coordinates[6][0], 1e-2); + assertEquals(0, r.coordinates[6][1], 1e-2); + + pgo.setValue("{( 3h 1m 0s , + 0d 15m 0s),( 3h 1m 0s , + 0d 30m 0s),( 3h 0m 0s , + 0d 30m 0s),( 3h 0m 0s , + 0d 15m 0s),( 2h 59m 0s , + 0d 15m 0s),( 2h 59m 0s , + 0d 0m 0s),( 3h 0m 0s , + 0d 0m 0s)}"); + r = translator.translateGeometryFromDB(pgo); + assertEquals(45.25, r.coordinates[0][0], 1e-2); + assertEquals(0.25, r.coordinates[0][1], 1e-2); + assertEquals(45.25, r.coordinates[1][0], 1e-2); + assertEquals(0.5, r.coordinates[1][1], 1e-2); + assertEquals(45, r.coordinates[2][0], 1e-2); + assertEquals(0.5, r.coordinates[2][1], 1e-2); + assertEquals(45, r.coordinates[3][0], 1e-2); + assertEquals(0.25, r.coordinates[3][1], 1e-2); + assertEquals(44.75, r.coordinates[4][0], 1e-2); + assertEquals(0.25, r.coordinates[4][1], 1e-2); + assertEquals(44.75, r.coordinates[5][0], 1e-2); + assertEquals(0, r.coordinates[5][1], 1e-2); + assertEquals(45, r.coordinates[6][0], 1e-2); + assertEquals(0, r.coordinates[6][1], 1e-2); + }catch(Throwable t){ + t.printStackTrace(); + fail(t.getMessage()); + } + + // OTHER + try{ + translator.translateGeometryFromDB(new Double(12.3)); + fail("The translation of a Double as a geometry is not supported!"); + }catch(Throwable t){ + assertTrue(t instanceof ParseException); + assertEquals("Incompatible type! The column value \"12.3\" was supposed to be a geometrical object.", t.getMessage()); + } + try{ + pgo.setType("sline"); + pgo.setValue("( -90d, -20d, 200d, XYZ ), 30d "); + translator.translateGeometryFromDB(pgo); + fail("The translation of a sline is not supported!"); + }catch(Throwable t){ + assertTrue(t instanceof ParseException); + assertEquals("Unsupported PgSphere type: \"sline\"! Impossible to convert the column value \"( -90d, -20d, 200d, XYZ ), 30d \" into a Region.", t.getMessage()); + } + } + + @Test + public void testTranslateGeometryToDB(){ + PgSphereTranslator translator = new PgSphereTranslator(); + + try{ + // NULL + assertNull(translator.translateGeometryToDB(null)); + + // POSITION + Region r = new Region(null, new double[]{45,0}); + PGobject pgo = (PGobject)translator.translateGeometryToDB(r); + assertNotNull(pgo); + assertEquals("spoint", pgo.getType()); + assertEquals("(45.0d,0.0d)", pgo.getValue()); + + // CIRCLE + r = new Region(null, new double[]{45,0}, 1.2); + pgo = (PGobject)translator.translateGeometryToDB(r); + assertNotNull(pgo); + assertEquals("spoly", pgo.getType()); + assertEquals("{(46.2d,0.0d),(46.176942336483876d,0.2341083864193539d),(46.108655439013546d,0.4592201188381077d),(45.99776353476305d,0.6666842796235226d),(45.848528137423855d,0.8485281374238569d),(45.666684279623524d,0.9977635347630542d),(45.45922011883811d,1.1086554390135441d),(45.23410838641935d,1.1769423364838765d),(45.0d,1.2d),(44.76589161358065d,1.1769423364838765d),(44.54077988116189d,1.1086554390135441d),(44.333315720376476d,0.9977635347630543d),(44.151471862576145d,0.848528137423857d),(44.00223646523695d,0.6666842796235226d),(43.891344560986454d,0.4592201188381073d),(43.823057663516124d,0.23410838641935325d),(43.8d,-9.188564877424678E-16d),(43.823057663516124d,-0.23410838641935505d),(43.891344560986454d,-0.45922011883810904d),(44.00223646523695d,-0.6666842796235241d),(44.151471862576145d,-0.8485281374238584d),(44.333315720376476d,-0.9977635347630555d),(44.540779881161896d,-1.108655439013545d),(44.76589161358065d,-1.176942336483877d),(45.0d,-1.2d),(45.23410838641936d,-1.1769423364838758d),(45.45922011883811d,-1.1086554390135428d),(45.666684279623524d,-0.9977635347630521d),(45.84852813742386d,-0.8485281374238541d),(45.99776353476306d,-0.6666842796235192d),(46.108655439013546d,-0.45922011883810354d),(46.176942336483876d,-0.23410838641934922d)}", pgo.getValue()); + + // BOX + r = new Region(null, new double[]{45,0}, 1.2, 5); + pgo = (PGobject)translator.translateGeometryToDB(r); + assertNotNull(pgo); + assertEquals("spoly", pgo.getType()); + assertEquals("{(44.4d,-2.5d),(44.4d,2.5d),(45.6d,2.5d),(45.6d,-2.5d)}", pgo.getValue()); + + // POLYGON + r = new Region(null, new double[][]{new double[]{45.25,0.25},new double[]{45.25,0.5},new double[]{45,0.5},new double[]{45,0.25},new double[]{44.75,0.25},new double[]{44.75,0},new double[]{45,0}}); + pgo = (PGobject)translator.translateGeometryToDB(r); + assertNotNull(pgo); + assertEquals("spoly", pgo.getType()); + assertEquals("{(45.25d,0.25d),(45.25d,0.5d),(45.0d,0.5d),(45.0d,0.25d),(44.75d,0.25d),(44.75d,0.0d),(45.0d,0.0d)}", pgo.getValue()); + + // OTHER + try{ + r = new Region(new Region(null, new double[]{45,0})); + translator.translateGeometryToDB(r); + fail("The translation of a STC Not region is not supported!"); + }catch(Throwable ex){ + assertTrue(ex instanceof ParseException); + assertEquals("Unsupported geometrical region: \"" + r.type + "\"!", ex.getMessage()); + } + + }catch(ParseException t){ + t.printStackTrace(); + fail(t.getMessage()); + } + } + +} diff --git a/test/tap/config/AllTAPConfigTests.java b/test/tap/config/AllTAPConfigTests.java new file mode 100644 index 0000000000000000000000000000000000000000..c752a887f6f0a88877e813e8a685619bc7cca507 --- /dev/null +++ b/test/tap/config/AllTAPConfigTests.java @@ -0,0 +1,29 @@ +package tap.config; + +import java.util.Properties; + +import org.junit.runner.RunWith; +import org.junit.runners.Suite; +import org.junit.runners.Suite.SuiteClasses; + +import tap.parameters.TestMaxRecController; + +@RunWith(Suite.class) +@SuiteClasses({TestTAPConfiguration.class,TestConfigurableServiceConnection.class,TestConfigurableTAPFactory.class,TestMaxRecController.class}) +public class AllTAPConfigTests { + + public final static Properties getValidProperties(){ + Properties validProp = new Properties(); + validProp.setProperty("database_access", "jdbc"); + validProp.setProperty("jdbc_url", "jdbc:postgresql:gmantele"); + validProp.setProperty("jdbc_driver", "org.postgresql.Driver"); + validProp.setProperty("db_username", "gmantele"); + validProp.setProperty("db_password", "pwd"); + validProp.setProperty("sql_translator", "postgres"); + validProp.setProperty("metadata", "db"); + validProp.setProperty("file_manager", "local"); + validProp.setProperty("file_root_path", "bin/ext/test/tap"); + return validProp; + } + +} diff --git a/test/tap/config/TestConfigurableServiceConnection.java b/test/tap/config/TestConfigurableServiceConnection.java new file mode 100644 index 0000000000000000000000000000000000000000..f2058cff233a76ef9f6991a2a569fabd02f435a6 --- /dev/null +++ b/test/tap/config/TestConfigurableServiceConnection.java @@ -0,0 +1,1200 @@ +package tap.config; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static tap.config.TAPConfiguration.DEFAULT_ASYNC_FETCH_SIZE; +import static tap.config.TAPConfiguration.DEFAULT_MAX_ASYNC_JOBS; +import static tap.config.TAPConfiguration.DEFAULT_SYNC_FETCH_SIZE; +import static tap.config.TAPConfiguration.KEY_ASYNC_FETCH_SIZE; +import static tap.config.TAPConfiguration.KEY_COORD_SYS; +import static tap.config.TAPConfiguration.KEY_DEFAULT_OUTPUT_LIMIT; +import static tap.config.TAPConfiguration.KEY_FILE_MANAGER; +import static tap.config.TAPConfiguration.KEY_GEOMETRIES; +import static tap.config.TAPConfiguration.KEY_LOG_ROTATION; +import static tap.config.TAPConfiguration.KEY_MAX_ASYNC_JOBS; +import static tap.config.TAPConfiguration.KEY_MAX_OUTPUT_LIMIT; +import static tap.config.TAPConfiguration.KEY_METADATA; +import static tap.config.TAPConfiguration.KEY_METADATA_FILE; +import static tap.config.TAPConfiguration.KEY_MIN_LOG_LEVEL; +import static tap.config.TAPConfiguration.KEY_OUTPUT_FORMATS; +import static tap.config.TAPConfiguration.KEY_SYNC_FETCH_SIZE; +import static tap.config.TAPConfiguration.KEY_TAP_FACTORY; +import static tap.config.TAPConfiguration.KEY_UDFS; +import static tap.config.TAPConfiguration.KEY_USER_IDENTIFIER; +import static tap.config.TAPConfiguration.VALUE_ANY; +import static tap.config.TAPConfiguration.VALUE_CSV; +import static tap.config.TAPConfiguration.VALUE_DB; +import static tap.config.TAPConfiguration.VALUE_FITS; +import static tap.config.TAPConfiguration.VALUE_JSON; +import static tap.config.TAPConfiguration.VALUE_LOCAL; +import static tap.config.TAPConfiguration.VALUE_NONE; +import static tap.config.TAPConfiguration.VALUE_SV; +import static tap.config.TAPConfiguration.VALUE_TEXT; +import static tap.config.TAPConfiguration.VALUE_TSV; +import static tap.config.TAPConfiguration.VALUE_VOTABLE; +import static tap.config.TAPConfiguration.VALUE_XML; + +import java.io.File; +import java.io.PrintWriter; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.Map; +import java.util.Properties; + +import javax.servlet.http.HttpServletRequest; + +import org.junit.BeforeClass; +import org.junit.Test; + +import tap.AbstractTAPFactory; +import tap.ServiceConnection; +import tap.ServiceConnection.LimitUnit; +import tap.TAPException; +import tap.db.DBConnection; +import tap.db.DBException; +import tap.db.JDBCConnection; +import tap.formatter.OutputFormat; +import tap.formatter.VOTableFormat; +import uk.ac.starlink.votable.DataFormat; +import uk.ac.starlink.votable.VOTableVersion; +import uws.UWSException; +import uws.job.user.DefaultJobOwner; +import uws.job.user.JobOwner; +import uws.service.UWSUrl; +import uws.service.UserIdentifier; +import uws.service.file.LocalUWSFileManager; +import uws.service.log.DefaultUWSLog; +import uws.service.log.UWSLog.LogLevel; +import adql.db.FunctionDef; +import adql.db.STCS.Flavor; +import adql.db.STCS.Frame; +import adql.db.STCS.RefPos; +import adql.db.TestDBChecker.UDFToto; +import adql.translator.PostgreSQLTranslator; + +public class TestConfigurableServiceConnection { + + private final static String XML_FILE = "test/tap/config/tables.xml"; + + private static Properties validProp, noFmProp, fmClassNameProp, + incorrectFmProp, correctLogProp, incorrectLogLevelProp, + incorrectLogRotationProp, xmlMetaProp, wrongManualMetaProp, + missingMetaProp, missingMetaFileProp, wrongMetaProp, + wrongMetaFileProp, validFormatsProp, validVOTableFormatsProp, + badSVFormat1Prop, badSVFormat2Prop, badVotFormat1Prop, + badVotFormat2Prop, badVotFormat3Prop, badVotFormat4Prop, + badVotFormat5Prop, badVotFormat6Prop, unknownFormatProp, + maxAsyncProp, negativeMaxAsyncProp, notIntMaxAsyncProp, + defaultOutputLimitProp, maxOutputLimitProp, + bothOutputLimitGoodProp, bothOutputLimitBadProp, syncFetchSizeProp, + notIntSyncFetchSizeProp, negativeSyncFetchSizeProp, + notIntAsyncFetchSizeProp, negativeAsyncFetchSizeProp, + asyncFetchSizeProp, userIdentProp, notClassPathUserIdentProp, + coordSysProp, noneCoordSysProp, anyCoordSysProp, + noneInsideCoordSysProp, unknownCoordSysProp, geometriesProp, + noneGeomProp, anyGeomProp, noneInsideGeomProp, unknownGeomProp, + anyUdfsProp, noneUdfsProp, udfsProp, udfsWithClassNameProp, + udfsListWithNONEorANYProp, udfsWithWrongParamLengthProp, + udfsWithMissingBracketsProp, udfsWithMissingDefProp1, + udfsWithMissingDefProp2, emptyUdfItemProp1, emptyUdfItemProp2, + udfWithMissingEndBracketProp, customFactoryProp, + badCustomFactoryProp; + + @BeforeClass + public static void setUp() throws Exception{ + // LOAD ALL PROPERTIES FILES NEEDED FOR ALL THE TESTS: + validProp = AllTAPConfigTests.getValidProperties(); + + noFmProp = (Properties)validProp.clone(); + noFmProp.setProperty(KEY_FILE_MANAGER, ""); + + fmClassNameProp = (Properties)validProp.clone(); + fmClassNameProp.setProperty(KEY_FILE_MANAGER, "{tap.config.TestConfigurableServiceConnection$FileManagerTest}"); + + incorrectFmProp = (Properties)validProp.clone(); + incorrectFmProp.setProperty(KEY_FILE_MANAGER, "foo"); + + correctLogProp = (Properties)validProp.clone(); + correctLogProp.setProperty(KEY_LOG_ROTATION, " M 5 6 03 "); + correctLogProp.setProperty(KEY_MIN_LOG_LEVEL, " WARNing "); + + incorrectLogLevelProp = (Properties)validProp.clone(); + incorrectLogLevelProp.setProperty(KEY_MIN_LOG_LEVEL, "foo"); + + incorrectLogRotationProp = (Properties)validProp.clone(); + incorrectLogRotationProp.setProperty(KEY_LOG_ROTATION, "foo"); + + xmlMetaProp = (Properties)validProp.clone(); + xmlMetaProp.setProperty(KEY_METADATA, VALUE_XML); + xmlMetaProp.setProperty(KEY_METADATA_FILE, XML_FILE); + + wrongManualMetaProp = (Properties)validProp.clone(); + wrongManualMetaProp.setProperty(KEY_METADATA, "{tap.metadata.TAPMetadata}"); + + missingMetaProp = (Properties)validProp.clone(); + missingMetaProp.remove(KEY_METADATA); + + wrongMetaProp = (Properties)validProp.clone(); + wrongMetaProp.setProperty(KEY_METADATA, "foo"); + + wrongMetaFileProp = (Properties)validProp.clone(); + wrongMetaFileProp.setProperty(KEY_METADATA, VALUE_XML); + wrongMetaFileProp.setProperty(KEY_METADATA_FILE, "foo"); + + missingMetaFileProp = (Properties)validProp.clone(); + missingMetaFileProp.setProperty(KEY_METADATA, VALUE_XML); + missingMetaFileProp.remove(KEY_METADATA_FILE); + + validFormatsProp = (Properties)validProp.clone(); + validFormatsProp.setProperty(KEY_OUTPUT_FORMATS, VALUE_FITS + "," + VALUE_TEXT + "," + VALUE_JSON + "," + VALUE_CSV + " , " + VALUE_TSV + ",, , " + VALUE_SV + "([])" + ", " + VALUE_SV + "(|):text/psv:psv" + ", " + VALUE_SV + "($)::test" + ", \t " + VALUE_SV + "(@):text/arobase:" + ", {tap.formatter.HTMLFormat}"); + + validVOTableFormatsProp = (Properties)validProp.clone(); + validVOTableFormatsProp.setProperty(KEY_OUTPUT_FORMATS, "votable, votable()::, vot(), vot::, votable:, votable(Td, 1.0), vot(TableData), votable(,1.2), vot(Fits):application/fits:supervot"); + + badSVFormat1Prop = (Properties)validProp.clone(); + badSVFormat1Prop.setProperty(KEY_OUTPUT_FORMATS, VALUE_SV); + + badSVFormat2Prop = (Properties)validProp.clone(); + badSVFormat2Prop.setProperty(KEY_OUTPUT_FORMATS, VALUE_SV + "()"); + + badVotFormat1Prop = (Properties)validProp.clone(); + badVotFormat1Prop.setProperty(KEY_OUTPUT_FORMATS, "votable(foo)"); + + badVotFormat2Prop = (Properties)validProp.clone(); + badVotFormat2Prop.setProperty(KEY_OUTPUT_FORMATS, "vot(,foo)"); + + badVotFormat3Prop = (Properties)validProp.clone(); + badVotFormat3Prop.setProperty(KEY_OUTPUT_FORMATS, "text, vot(TD"); + + badVotFormat4Prop = (Properties)validProp.clone(); + badVotFormat4Prop.setProperty(KEY_OUTPUT_FORMATS, "vot(TD, text"); + + badVotFormat5Prop = (Properties)validProp.clone(); + badVotFormat5Prop.setProperty(KEY_OUTPUT_FORMATS, "vot(TD, 1.0, foo)"); + + badVotFormat6Prop = (Properties)validProp.clone(); + badVotFormat6Prop.setProperty(KEY_OUTPUT_FORMATS, "vot:application/xml:votable:foo"); + + unknownFormatProp = (Properties)validProp.clone(); + unknownFormatProp.setProperty(KEY_OUTPUT_FORMATS, "foo"); + + maxAsyncProp = (Properties)validProp.clone(); + maxAsyncProp.setProperty(KEY_MAX_ASYNC_JOBS, "10"); + + negativeMaxAsyncProp = (Properties)validProp.clone(); + negativeMaxAsyncProp.setProperty(KEY_MAX_ASYNC_JOBS, "-2"); + + notIntMaxAsyncProp = (Properties)validProp.clone(); + notIntMaxAsyncProp.setProperty(KEY_MAX_ASYNC_JOBS, "foo"); + + defaultOutputLimitProp = (Properties)validProp.clone(); + defaultOutputLimitProp.setProperty(KEY_DEFAULT_OUTPUT_LIMIT, "100"); + + maxOutputLimitProp = (Properties)validProp.clone(); + maxOutputLimitProp.setProperty(KEY_MAX_OUTPUT_LIMIT, "1000R"); + + bothOutputLimitGoodProp = (Properties)validProp.clone(); + bothOutputLimitGoodProp.setProperty(KEY_DEFAULT_OUTPUT_LIMIT, "100R"); + bothOutputLimitGoodProp.setProperty(KEY_MAX_OUTPUT_LIMIT, "1000"); + + bothOutputLimitBadProp = (Properties)validProp.clone(); + bothOutputLimitBadProp.setProperty(KEY_DEFAULT_OUTPUT_LIMIT, "1000"); + bothOutputLimitBadProp.setProperty(KEY_MAX_OUTPUT_LIMIT, "100"); + + syncFetchSizeProp = (Properties)validProp.clone(); + syncFetchSizeProp.setProperty(KEY_SYNC_FETCH_SIZE, "50"); + + notIntSyncFetchSizeProp = (Properties)validProp.clone(); + notIntSyncFetchSizeProp.setProperty(KEY_SYNC_FETCH_SIZE, "foo"); + + negativeSyncFetchSizeProp = (Properties)validProp.clone(); + negativeSyncFetchSizeProp.setProperty(KEY_SYNC_FETCH_SIZE, "-3"); + + asyncFetchSizeProp = (Properties)validProp.clone(); + asyncFetchSizeProp.setProperty(KEY_ASYNC_FETCH_SIZE, "50"); + + notIntAsyncFetchSizeProp = (Properties)validProp.clone(); + notIntAsyncFetchSizeProp.setProperty(KEY_ASYNC_FETCH_SIZE, "foo"); + + negativeAsyncFetchSizeProp = (Properties)validProp.clone(); + negativeAsyncFetchSizeProp.setProperty(KEY_ASYNC_FETCH_SIZE, "-3"); + + userIdentProp = (Properties)validProp.clone(); + userIdentProp.setProperty(KEY_USER_IDENTIFIER, "{tap.config.TestConfigurableServiceConnection$UserIdentifierTest}"); + + notClassPathUserIdentProp = (Properties)validProp.clone(); + notClassPathUserIdentProp.setProperty(KEY_USER_IDENTIFIER, "foo"); + + coordSysProp = (Properties)validProp.clone(); + coordSysProp.setProperty(KEY_COORD_SYS, "icrs * *, ICrs * (Spherical2| CARTEsian2)"); + + noneCoordSysProp = (Properties)validProp.clone(); + noneCoordSysProp.setProperty(KEY_COORD_SYS, VALUE_NONE); + + anyCoordSysProp = (Properties)validProp.clone(); + anyCoordSysProp.setProperty(KEY_COORD_SYS, VALUE_ANY); + + noneInsideCoordSysProp = (Properties)validProp.clone(); + noneInsideCoordSysProp.setProperty(KEY_COORD_SYS, " ICRS * *, none, FK4 (GEOCENTER|heliocenter) *"); + + unknownCoordSysProp = (Properties)validProp.clone(); + unknownCoordSysProp.setProperty(KEY_COORD_SYS, "ICRS foo *"); + + geometriesProp = (Properties)validProp.clone(); + geometriesProp.setProperty(KEY_GEOMETRIES, "point, CIRCle , cONTAins,intersECTS"); + + noneGeomProp = (Properties)validProp.clone(); + noneGeomProp.setProperty(KEY_GEOMETRIES, VALUE_NONE); + + anyGeomProp = (Properties)validProp.clone(); + anyGeomProp.setProperty(KEY_GEOMETRIES, VALUE_ANY); + + noneInsideGeomProp = (Properties)validProp.clone(); + noneInsideGeomProp.setProperty(KEY_GEOMETRIES, "POINT, Box, none, circle"); + + unknownGeomProp = (Properties)validProp.clone(); + unknownGeomProp.setProperty(KEY_GEOMETRIES, "POINT, Contains, foo, circle,Polygon"); + + anyUdfsProp = (Properties)validProp.clone(); + anyUdfsProp.setProperty(KEY_UDFS, VALUE_ANY); + + noneUdfsProp = (Properties)validProp.clone(); + noneUdfsProp.setProperty(KEY_UDFS, VALUE_NONE); + + udfsProp = (Properties)validProp.clone(); + udfsProp.setProperty(KEY_UDFS, "[toto(a string)] , [ titi(b REAL) -> double ]"); + + udfsWithClassNameProp = (Properties)validProp.clone(); + udfsWithClassNameProp.setProperty(KEY_UDFS, "[toto(a string)->VARCHAR, {adql.db.TestDBChecker$UDFToto}]"); + + udfsListWithNONEorANYProp = (Properties)validProp.clone(); + udfsListWithNONEorANYProp.setProperty(KEY_UDFS, "[toto(a string)->VARCHAR],ANY"); + + udfsWithWrongParamLengthProp = (Properties)validProp.clone(); + udfsWithWrongParamLengthProp.setProperty(KEY_UDFS, "[toto(a string)->VARCHAR, {adql.db.TestDBChecker$UDFToto}, foo]"); + + udfsWithMissingBracketsProp = (Properties)validProp.clone(); + udfsWithMissingBracketsProp.setProperty(KEY_UDFS, "toto(a string)->VARCHAR"); + + udfsWithMissingDefProp1 = (Properties)validProp.clone(); + udfsWithMissingDefProp1.setProperty(KEY_UDFS, "[{adql.db.TestDBChecker$UDFToto}]"); + + udfsWithMissingDefProp2 = (Properties)validProp.clone(); + udfsWithMissingDefProp2.setProperty(KEY_UDFS, "[,{adql.db.TestDBChecker$UDFToto}]"); + + emptyUdfItemProp1 = (Properties)validProp.clone(); + emptyUdfItemProp1.setProperty(KEY_UDFS, "[ ]"); + + emptyUdfItemProp2 = (Properties)validProp.clone(); + emptyUdfItemProp2.setProperty(KEY_UDFS, "[ , ]"); + + udfWithMissingEndBracketProp = (Properties)validProp.clone(); + udfWithMissingEndBracketProp.setProperty(KEY_UDFS, "[toto(a string)->VARCHAR"); + + customFactoryProp = (Properties)validProp.clone(); + customFactoryProp.setProperty(KEY_TAP_FACTORY, "{tap.config.TestConfigurableServiceConnection$CustomTAPFactory}"); + + badCustomFactoryProp = (Properties)validProp.clone(); + badCustomFactoryProp.setProperty(KEY_TAP_FACTORY, "{tap.config.TestConfigurableServiceConnection$BadCustomTAPFactory}"); + } + + /** + * CONSTRUCTOR TESTS + * * In general: + * - A valid configuration file builds successfully a fully functional ServiceConnection object. + * + * * Over the file manager: + * - If no TAPFileManager is provided, an exception must be thrown. + * - If a class name toward a valid TAPFileManager is provided, a functional DefaultServiceConnection must be successfully built. + * - An incorrect file manager value in the configuration file must generate an exception. + * + * * Over the output format: + * - If a SV format is badly expressed (test with "sv" and "sv()"), an exception must be thrown. + * - If an unknown output format is provided an exception must be thrown. + * + * Note: the good configuration of the TAPFactory built by the DefaultServiceConnection is tested in {@link TestConfigurableTAPFactory}. + * + * @see ConfigurableServiceConnection#DefaultServiceConnection(Properties) + */ + @Test + public void testDefaultServiceConnectionProperties(){ + // Valid Configuration File: + PrintWriter writer = null; + int nbSchemas = -1, nbTables = -1; + try{ + // build the ServiceConnection: + ServiceConnection connection = new ConfigurableServiceConnection(validProp); + + // tests: + assertNotNull(connection.getLogger()); + assertEquals(LogLevel.DEBUG, ((DefaultUWSLog)connection.getLogger()).getMinLogLevel()); + assertNotNull(connection.getFileManager()); + assertEquals("daily at 00:00", ((LocalUWSFileManager)connection.getFileManager()).getLogRotationFreq()); + assertNotNull(connection.getFactory()); + assertNotNull(connection.getTAPMetadata()); + assertTrue(connection.getTAPMetadata().getNbSchemas() >= 1); + assertTrue(connection.getTAPMetadata().getNbTables() >= 5); + assertFalse(connection.isAvailable()); + assertEquals(DEFAULT_MAX_ASYNC_JOBS, connection.getNbMaxAsyncJobs()); + assertTrue(connection.getRetentionPeriod()[0] <= connection.getRetentionPeriod()[1]); + assertTrue(connection.getExecutionDuration()[0] <= connection.getExecutionDuration()[1]); + assertNull(connection.getUserIdentifier()); + assertNull(connection.getGeometries()); + assertEquals(0, connection.getUDFs().size()); + assertNotNull(connection.getFetchSize()); + assertEquals(2, connection.getFetchSize().length); + assertEquals(DEFAULT_ASYNC_FETCH_SIZE, connection.getFetchSize()[0]); + assertEquals(DEFAULT_SYNC_FETCH_SIZE, connection.getFetchSize()[1]); + + // finally, save metadata in an XML file for the other tests: + writer = new PrintWriter(new File(XML_FILE)); + connection.getTAPMetadata().write(writer); + nbSchemas = connection.getTAPMetadata().getNbSchemas(); + nbTables = connection.getTAPMetadata().getNbTables(); + + }catch(Exception e){ + fail("This MUST have succeeded because the property file is valid! \nCaught exception: " + getPertinentMessage(e)); + }finally{ + if (writer != null) + writer.close(); + } + + // Valid XML metadata: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(xmlMetaProp); + assertNotNull(connection.getLogger()); + assertEquals(LogLevel.DEBUG, ((DefaultUWSLog)connection.getLogger()).getMinLogLevel()); + assertNotNull(connection.getFileManager()); + assertEquals("daily at 00:00", ((LocalUWSFileManager)connection.getFileManager()).getLogRotationFreq()); + assertNotNull(connection.getFactory()); + assertNotNull(connection.getTAPMetadata()); + assertEquals(nbSchemas, connection.getTAPMetadata().getNbSchemas()); + assertEquals(nbTables, connection.getTAPMetadata().getNbTables()); + assertFalse(connection.isAvailable()); + assertEquals(DEFAULT_MAX_ASYNC_JOBS, connection.getNbMaxAsyncJobs()); + assertTrue(connection.getRetentionPeriod()[0] <= connection.getRetentionPeriod()[1]); + assertTrue(connection.getExecutionDuration()[0] <= connection.getExecutionDuration()[1]); + assertNull(connection.getUserIdentifier()); + assertNull(connection.getGeometries()); + assertEquals(0, connection.getUDFs().size()); + assertNotNull(connection.getFetchSize()); + assertEquals(2, connection.getFetchSize().length); + assertEquals(DEFAULT_ASYNC_FETCH_SIZE, connection.getFetchSize()[0]); + assertEquals(DEFAULT_SYNC_FETCH_SIZE, connection.getFetchSize()[1]); + }catch(Exception e){ + fail("This MUST have succeeded because the property file is valid! \nCaught exception: " + getPertinentMessage(e)); + } + + // Missing metadata property: + try{ + new ConfigurableServiceConnection(missingMetaProp); + fail("This MUST have failed because the property 'metadata' is missing!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("The property \"" + KEY_METADATA + "\" is missing! It is required to create a TAP Service. Three possible values: " + VALUE_XML + " (to get metadata from a TableSet XML document), " + VALUE_DB + " (to fetch metadata from the database schema TAP_SCHEMA) or the name (between {}) of a class extending TAPMetadata.", e.getMessage()); + } + + // Missing metadata_file property: + try{ + new ConfigurableServiceConnection(missingMetaFileProp); + fail("This MUST have failed because the property 'metadata_file' is missing!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("The property \"" + KEY_METADATA_FILE + "\" is missing! According to the property \"" + KEY_METADATA + "\", metadata must be fetched from an XML document. The local file path of it MUST be provided using the property \"" + KEY_METADATA_FILE + "\".", e.getMessage()); + } + + // Wrong metadata property: + try{ + new ConfigurableServiceConnection(wrongMetaProp); + fail("This MUST have failed because the property 'metadata' has a wrong value!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Unsupported value for the property \"" + KEY_METADATA + "\": \"foo\"! Only two values are allowed: " + VALUE_XML + " (to get metadata from a TableSet XML document) or " + VALUE_DB + " (to fetch metadata from the database schema TAP_SCHEMA).", e.getMessage()); + } + + // Wrong MANUAL metadata: + try{ + new ConfigurableServiceConnection(wrongManualMetaProp); + fail("This MUST have failed because the class specified in the property 'metadata' does not extend TAPMetadata but is TAPMetadata!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Wrong class for the property \"" + KEY_METADATA + "\": \"tap.metadata.TAPMetadata\"! The class provided in this property MUST EXTEND tap.metadata.TAPMetadata.", e.getMessage()); + } + + // Wrong metadata_file property: + try{ + new ConfigurableServiceConnection(wrongMetaFileProp); + fail("This MUST have failed because the property 'metadata_file' has a wrong value!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("A grave error occurred while reading/parsing the TableSet XML document: \"foo\"!", e.getMessage()); + } + + // No File Manager: + try{ + new ConfigurableServiceConnection(noFmProp); + fail("This MUST have failed because no File Manager is specified!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("The property \"" + KEY_FILE_MANAGER + "\" is missing! It is required to create a TAP Service. Two possible values: " + VALUE_LOCAL + " or a class name between {...}.", e.getMessage()); + } + + // File Manager = Class Name: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(fmClassNameProp); + assertNotNull(connection.getLogger()); + assertEquals(LogLevel.DEBUG, ((DefaultUWSLog)connection.getLogger()).getMinLogLevel()); + assertNotNull(connection.getFileManager()); + assertEquals("daily at 00:00", ((LocalUWSFileManager)connection.getFileManager()).getLogRotationFreq()); + assertNotNull(connection.getFactory()); + assertNotNull(connection.getTAPMetadata()); + assertFalse(connection.isAvailable()); + + /* Retention periods and execution durations are different in this configuration file from the valid one (validProp). + * Max period and max duration are set in this file as less than respectively the default period and the default duration. + * In such situation, the default period/duration is set to the maximum one, in order to ensure that the maximum value is + * still greater or equals than the default one. So the max and default values must be equal there. + */ + assertTrue(connection.getRetentionPeriod()[0] == connection.getRetentionPeriod()[1]); + assertTrue(connection.getExecutionDuration()[0] == connection.getExecutionDuration()[1]); + }catch(Exception e){ + fail("This MUST have succeeded because the provided file manager is a class name valid! \nCaught exception: " + getPertinentMessage(e)); + } + + // Incorrect File Manager Value: + try{ + new ConfigurableServiceConnection(incorrectFmProp); + fail("This MUST have failed because an incorrect File Manager value has been provided!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Class name expected for the property \"file_manager\" instead of: \"foo\"! The specified class must extend/implement uws.service.file.UWSFileManager.", e.getMessage()); + } + + // Custom log level and log rotation: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(correctLogProp); + assertNotNull(connection.getLogger()); + assertEquals(LogLevel.WARNING, ((DefaultUWSLog)connection.getLogger()).getMinLogLevel()); + assertNotNull(connection.getFileManager()); + assertEquals("monthly on the 5th at 06:03", ((LocalUWSFileManager)connection.getFileManager()).getLogRotationFreq()); + }catch(Exception e){ + fail("This MUST have succeeded because the provided log level and log rotation are valid! \nCaught exception: " + getPertinentMessage(e)); + } + + // Incorrect log level: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(incorrectLogLevelProp); + assertNotNull(connection.getLogger()); + assertEquals(LogLevel.DEBUG, ((DefaultUWSLog)connection.getLogger()).getMinLogLevel()); + }catch(Exception e){ + fail("This MUST have succeeded because even if the provided log level is incorrect the default behavior is to not throw exception and set the default value! \nCaught exception: " + getPertinentMessage(e)); + } + + // Incorrect log rotation: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(incorrectLogRotationProp); + assertNotNull(connection.getFileManager()); + assertEquals("daily at 00:00", ((LocalUWSFileManager)connection.getFileManager()).getLogRotationFreq()); + }catch(Exception e){ + fail("This MUST have succeeded because even if the provided log rotation is incorrect the default behavior is to not throw exception and set the default value! \nCaught exception: " + getPertinentMessage(e)); + } + + // Valid output formats list: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(validFormatsProp); + assertNotNull(connection.getOutputFormat(VALUE_VOTABLE)); + assertNotNull(connection.getOutputFormat(VALUE_JSON)); + assertNotNull(connection.getOutputFormat(VALUE_CSV)); + assertNotNull(connection.getOutputFormat(VALUE_TSV)); + assertNotNull(connection.getOutputFormat("psv")); + assertNotNull(connection.getOutputFormat("text/psv")); + assertNotNull(connection.getOutputFormat("text")); + assertNotNull(connection.getOutputFormat("text/plain")); + assertNotNull(connection.getOutputFormat("test")); + assertNotNull(connection.getOutputFormat("text/arobase")); + }catch(Exception e){ + fail("This MUST have succeeded because the property file is valid! \nCaught exception: " + getPertinentMessage(e)); + } + + // Valid VOTable output formats list: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(validVOTableFormatsProp); + Iterator it = connection.getOutputFormats(); + OutputFormat f = it.next(); /* votable */ + assertEquals(VOTableFormat.class, f.getClass()); + assertEquals("application/x-votable+xml", f.getMimeType()); + assertEquals(VALUE_VOTABLE, f.getShortMimeType()); + assertEquals(DataFormat.BINARY, ((VOTableFormat)f).getVotSerialization()); + assertEquals(VOTableVersion.V13, ((VOTableFormat)f).getVotVersion()); + f = it.next(); /* votable():: */ + assertEquals(VOTableFormat.class, f.getClass()); + assertEquals("application/x-votable+xml", f.getMimeType()); + assertEquals(VALUE_VOTABLE, f.getShortMimeType()); + assertEquals(DataFormat.BINARY, ((VOTableFormat)f).getVotSerialization()); + assertEquals(VOTableVersion.V13, ((VOTableFormat)f).getVotVersion()); + f = it.next(); /* vot() */ + assertEquals(VOTableFormat.class, f.getClass()); + assertEquals("application/x-votable+xml", f.getMimeType()); + assertEquals(VALUE_VOTABLE, f.getShortMimeType()); + assertEquals(DataFormat.BINARY, ((VOTableFormat)f).getVotSerialization()); + assertEquals(VOTableVersion.V13, ((VOTableFormat)f).getVotVersion()); + f = it.next(); /* vot:: */ + assertEquals(VOTableFormat.class, f.getClass()); + assertEquals("application/x-votable+xml", f.getMimeType()); + assertEquals(VALUE_VOTABLE, f.getShortMimeType()); + assertEquals(DataFormat.BINARY, ((VOTableFormat)f).getVotSerialization()); + assertEquals(VOTableVersion.V13, ((VOTableFormat)f).getVotVersion()); + f = it.next(); /* votable: */ + assertEquals(VOTableFormat.class, f.getClass()); + assertEquals("application/x-votable+xml", f.getMimeType()); + assertEquals(VALUE_VOTABLE, f.getShortMimeType()); + assertEquals(DataFormat.BINARY, ((VOTableFormat)f).getVotSerialization()); + assertEquals(VOTableVersion.V13, ((VOTableFormat)f).getVotVersion()); + f = it.next(); /* votable(Td, 1.0) */ + assertEquals(VOTableFormat.class, f.getClass()); + assertEquals("application/x-votable+xml;serialization=TABLEDATA", f.getMimeType()); + assertEquals("votable/td", f.getShortMimeType()); + assertEquals(DataFormat.TABLEDATA, ((VOTableFormat)f).getVotSerialization()); + assertEquals(VOTableVersion.V10, ((VOTableFormat)f).getVotVersion()); + f = it.next(); /* votable(TableData) */ + assertEquals(VOTableFormat.class, f.getClass()); + assertEquals("application/x-votable+xml;serialization=TABLEDATA", f.getMimeType()); + assertEquals("votable/td", f.getShortMimeType()); + assertEquals(DataFormat.TABLEDATA, ((VOTableFormat)f).getVotSerialization()); + assertEquals(VOTableVersion.V13, ((VOTableFormat)f).getVotVersion()); + f = it.next(); /* votable(, 1.2) */ + assertEquals(VOTableFormat.class, f.getClass()); + assertEquals("application/x-votable+xml", f.getMimeType()); + assertEquals(VALUE_VOTABLE, f.getShortMimeType()); + assertEquals(DataFormat.BINARY, ((VOTableFormat)f).getVotSerialization()); + assertEquals(VOTableVersion.V12, ((VOTableFormat)f).getVotVersion()); + f = it.next(); /* vot(fits):application/fits,supervot */ + assertEquals(VOTableFormat.class, f.getClass()); + assertEquals("application/fits", f.getMimeType()); + assertEquals("supervot", f.getShortMimeType()); + assertEquals(DataFormat.FITS, ((VOTableFormat)f).getVotSerialization()); + assertEquals(VOTableVersion.V13, ((VOTableFormat)f).getVotVersion()); + assertFalse(it.hasNext()); + }catch(Exception e){ + fail("This MUST have succeeded because the property file is valid! \nCaught exception: " + getPertinentMessage(e)); + } + + // Bad SV(...) format 1 = "sv": + try{ + new ConfigurableServiceConnection(badSVFormat1Prop); + fail("This MUST have failed because an incorrect SV output format value has been provided!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Missing separator char/string for the SV output format: \"sv\"!", e.getMessage()); + } + + // Bad SV(...) format 2 = "sv()": + try{ + new ConfigurableServiceConnection(badSVFormat2Prop); + fail("This MUST have failed because an incorrect SV output format value has been provided!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Missing separator char/string for the SV output format: \"sv()\"!", e.getMessage()); + } + + // Bad VOTable(...) format 1 = "votable(foo)": + try{ + new ConfigurableServiceConnection(badVotFormat1Prop); + fail("This MUST have failed because an incorrect VOTable output format value has been provided!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Unsupported VOTable serialization: \"foo\"! Accepted values: 'binary' (or 'b'), 'binary2' (or 'b2'), 'tabledata' (or 'td') and 'fits'.", e.getMessage()); + } + + // Bad VOTable(...) format 2 = "votable(,foo)": + try{ + new ConfigurableServiceConnection(badVotFormat2Prop); + fail("This MUST have failed because an incorrect VOTable output format value has been provided!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Unsupported VOTable version: \"foo\"! Accepted values: '1.0' (or 'v1.0'), '1.1' (or 'v1.1'), '1.2' (or 'v1.2') and '1.3' (or 'v1.3').", e.getMessage()); + } + + // Bad VOTable(...) format 3 = "text, vot(TD": + try{ + new ConfigurableServiceConnection(badVotFormat3Prop); + fail("This MUST have failed because an incorrect VOTable output format value has been provided!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Wrong output format specification syntax in: \"vot(TD\"! A VOTable parameters list must end with ')'.", e.getMessage()); + } + + // Bad VOTable(...) format 4 = "vot(TD, text": + try{ + new ConfigurableServiceConnection(badVotFormat4Prop); + fail("This MUST have failed because an incorrect VOTable output format value has been provided!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Missing right parenthesis in: \"vot(TD, text\"!", e.getMessage()); + } + + // Bad VOTable(...) format 5 = "vot(TD, 1.0, foo)": + try{ + new ConfigurableServiceConnection(badVotFormat5Prop); + fail("This MUST have failed because an incorrect VOTable output format value has been provided!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Wrong number of parameters for the output format VOTable: \"vot(TD, 1.0, foo)\"! Only two parameters may be provided: serialization and version.", e.getMessage()); + } + + // Bad VOTable(...) format 6 = "vot:application/xml:votable:foo": + try{ + new ConfigurableServiceConnection(badVotFormat6Prop); + fail("This MUST have failed because an incorrect VOTable output format value has been provided!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Wrong output format specification syntax in: \"vot:application/xml:votable:foo\"! After a MIME type and a short MIME type, no more information is expected.", e.getMessage()); + } + + // Unknown output format: + try{ + new ConfigurableServiceConnection(unknownFormatProp); + fail("This MUST have failed because an incorrect output format value has been provided!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Unknown output format: foo", e.getMessage()); + } + + // Valid value for max_async_jobs: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(maxAsyncProp); + assertEquals(10, connection.getNbMaxAsyncJobs()); + }catch(Exception e){ + fail("This MUST have succeeded because a valid max_async_jobs is provided! \nCaught exception: " + getPertinentMessage(e)); + } + + // Negative value for max_async_jobs: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(negativeMaxAsyncProp); + assertEquals(-2, connection.getNbMaxAsyncJobs()); + }catch(Exception e){ + fail("This MUST have succeeded because a negative max_async_jobs is equivalent to 'no restriction'! \nCaught exception: " + getPertinentMessage(e)); + } + + // A not integer value for max_async_jobs: + try{ + new ConfigurableServiceConnection(notIntMaxAsyncProp); + fail("This MUST have failed because a not integer value has been provided for \"" + KEY_MAX_ASYNC_JOBS + "\"!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Integer expected for the property \"" + KEY_MAX_ASYNC_JOBS + "\", instead of: \"foo\"!", e.getMessage()); + } + + // Test with no output limit specified: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(validProp); + assertEquals(connection.getOutputLimit()[0], -1); + assertEquals(connection.getOutputLimit()[1], -1); + assertEquals(connection.getOutputLimitType()[0], LimitUnit.rows); + assertEquals(connection.getOutputLimitType()[1], LimitUnit.rows); + }catch(Exception e){ + fail("This MUST have succeeded because providing no output limit is valid! \nCaught exception: " + getPertinentMessage(e)); + } + + // Test with only a set default output limit: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(defaultOutputLimitProp); + assertEquals(connection.getOutputLimit()[0], 100); + assertEquals(connection.getOutputLimit()[1], -1); + assertEquals(connection.getOutputLimitType()[0], LimitUnit.rows); + assertEquals(connection.getOutputLimitType()[1], LimitUnit.rows); + }catch(Exception e){ + fail("This MUST have succeeded because setting the default output limit is valid! \nCaught exception: " + getPertinentMessage(e)); + } + + // Test with only a set maximum output limit: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(maxOutputLimitProp); + assertEquals(1000, connection.getOutputLimit()[0]); + assertEquals(1000, connection.getOutputLimit()[1]); + assertEquals(LimitUnit.rows, connection.getOutputLimitType()[0]); + assertEquals(LimitUnit.rows, connection.getOutputLimitType()[1]); + }catch(Exception e){ + fail("This MUST have succeeded because setting only the maximum output limit is valid! \nCaught exception: " + getPertinentMessage(e)); + } + + // Test with both a default and a maximum output limits where default <= max: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(bothOutputLimitGoodProp); + assertEquals(connection.getOutputLimit()[0], 100); + assertEquals(connection.getOutputLimit()[1], 1000); + assertEquals(connection.getOutputLimitType()[0], LimitUnit.rows); + assertEquals(connection.getOutputLimitType()[1], LimitUnit.rows); + }catch(Exception e){ + fail("This MUST have succeeded because the default output limit is less or equal the maximum one! \nCaught exception: " + getPertinentMessage(e)); + } + + // Test with both a default and a maximum output limits BUT where default > max: + /* In a such case, the default value is set silently to the maximum one. */ + try{ + ServiceConnection connection = new ConfigurableServiceConnection(bothOutputLimitBadProp); + assertEquals(100, connection.getOutputLimit()[1]); + assertEquals(connection.getOutputLimit()[1], connection.getOutputLimit()[0]); + assertEquals(LimitUnit.rows, connection.getOutputLimitType()[1]); + assertEquals(connection.getOutputLimitType()[1], connection.getOutputLimitType()[0]); + }catch(Exception e){ + fail("This MUST have succeeded because the default output limit is set automatically to the maximum one if bigger! \nCaught exception: " + getPertinentMessage(e)); + } + + // Test with a not integer sync. fetch size: + try{ + new ConfigurableServiceConnection(notIntSyncFetchSizeProp); + fail("This MUST have failed because the set sync. fetch size is not an integer!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Integer expected for the property " + KEY_SYNC_FETCH_SIZE + ": \"foo\"!", e.getMessage()); + } + + // Test with a negative sync. fetch size: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(negativeSyncFetchSizeProp); + assertNotNull(connection.getFetchSize()); + assertTrue(connection.getFetchSize().length >= 2); + assertEquals(connection.getFetchSize()[1], 0); + }catch(Exception e){ + fail("This MUST have succeeded because a negative fetch size must be set by default to 0 (meaning default JDBC driver value)! \nCaught exception: " + getPertinentMessage(e)); + } + + // Test with any valid sync. fetch size: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(syncFetchSizeProp); + assertNotNull(connection.getFetchSize()); + assertTrue(connection.getFetchSize().length >= 2); + assertEquals(connection.getFetchSize()[1], 50); + }catch(Exception e){ + fail("This MUST have succeeded because a valid fetch size has been provided! \nCaught exception: " + getPertinentMessage(e)); + } + + // Test with a not integer async. fetch size: + try{ + new ConfigurableServiceConnection(notIntAsyncFetchSizeProp); + fail("This MUST have failed because the set async. fetch size is not an integer!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Integer expected for the property " + KEY_ASYNC_FETCH_SIZE + ": \"foo\"!", e.getMessage()); + } + + // Test with a negative async. fetch size: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(negativeAsyncFetchSizeProp); + assertNotNull(connection.getFetchSize()); + assertTrue(connection.getFetchSize().length >= 1); + assertEquals(connection.getFetchSize()[0], 0); + }catch(Exception e){ + fail("This MUST have succeeded because a negative fetch size must be set by default to 0 (meaning default JDBC driver value)! \nCaught exception: " + getPertinentMessage(e)); + } + + // Test with any valid async. fetch size: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(asyncFetchSizeProp); + assertNotNull(connection.getFetchSize()); + assertTrue(connection.getFetchSize().length >= 1); + assertEquals(connection.getFetchSize()[0], 50); + }catch(Exception e){ + fail("This MUST have succeeded because a valid fetch size has been provided! \nCaught exception: " + getPertinentMessage(e)); + } + + // Valid user identifier: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(userIdentProp); + assertNotNull(connection.getUserIdentifier()); + assertNotNull(connection.getUserIdentifier().extractUserId(null, null)); + assertEquals("everybody", connection.getUserIdentifier().extractUserId(null, null).getID()); + }catch(Exception e){ + fail("This MUST have succeeded because the class path toward the fake UserIdentifier is correct! \nCaught exception: " + getPertinentMessage(e)); + } + + // Not a class name for user_identifier: + try{ + new ConfigurableServiceConnection(notClassPathUserIdentProp); + fail("This MUST have failed because the user_identifier value is not a class name!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Class name expected for the property \"" + KEY_USER_IDENTIFIER + "\" instead of: \"foo\"! The specified class must extend/implement uws.service.UserIdentifier.", e.getMessage()); + } + + // Valid geometry list: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(geometriesProp); + assertNotNull(connection.getGeometries()); + assertEquals(4, connection.getGeometries().size()); + assertEquals("POINT", ((ArrayList)connection.getGeometries()).get(0)); + assertEquals("CIRCLE", ((ArrayList)connection.getGeometries()).get(1)); + assertEquals("CONTAINS", ((ArrayList)connection.getGeometries()).get(2)); + assertEquals("INTERSECTS", ((ArrayList)connection.getGeometries()).get(3)); + }catch(Exception e){ + fail("This MUST have succeeded because the given list of geometries is correct! \nCaught exception: " + getPertinentMessage(e)); + } + + // "NONE" as geometry list: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(noneGeomProp); + assertNotNull(connection.getGeometries()); + assertEquals(0, connection.getGeometries().size()); + }catch(Exception e){ + fail("This MUST have succeeded because the given list of geometries is correct (reduced to only NONE)! \nCaught exception: " + getPertinentMessage(e)); + } + + // "ANY" as geometry list: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(anyGeomProp); + assertNull(connection.getGeometries()); + }catch(Exception e){ + fail("This MUST have succeeded because the given list of geometries is correct (reduced to only ANY)! \nCaught exception: " + getPertinentMessage(e)); + } + + // "NONE" inside a geometry list: + try{ + new ConfigurableServiceConnection(noneInsideGeomProp); + fail("This MUST have failed because the given geometry list contains at least 2 items, whose one is NONE!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("The special value \"" + VALUE_NONE + "\" can not be used inside a list! It MUST be used in replacement of a whole list to specify that no value is allowed.", e.getMessage()); + } + + // Unknown geometrical function: + try{ + new ConfigurableServiceConnection(unknownGeomProp); + fail("This MUST have failed because the given geometry list contains at least 1 unknown ADQL geometrical function!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Unknown ADQL geometrical function: \"foo\"!", e.getMessage()); + } + + // Valid coordinate systems list: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(coordSysProp); + assertNotNull(connection.getCoordinateSystems()); + assertEquals(2, connection.getCoordinateSystems().size()); + assertEquals("icrs * *", ((ArrayList)connection.getCoordinateSystems()).get(0)); + assertEquals("ICrs * (Spherical2| CARTEsian2)", ((ArrayList)connection.getCoordinateSystems()).get(1)); + }catch(Exception e){ + fail("This MUST have succeeded because the given list of coordinate systems is correct! \nCaught exception: " + getPertinentMessage(e)); + } + + // "NONE" as coordinate systems list: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(noneCoordSysProp); + assertNotNull(connection.getCoordinateSystems()); + assertEquals(0, connection.getCoordinateSystems().size()); + }catch(Exception e){ + fail("This MUST have succeeded because the given list of coordinate systems is correct (reduced to only NONE)! \nCaught exception: " + getPertinentMessage(e)); + } + + // "ANY" as coordinate systems list: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(anyCoordSysProp); + assertNull(connection.getCoordinateSystems()); + }catch(Exception e){ + fail("This MUST have succeeded because the given list of coordinate systems is correct (reduced to only ANY)! \nCaught exception: " + getPertinentMessage(e)); + } + + // "NONE" inside a coordinate systems list: + try{ + new ConfigurableServiceConnection(noneInsideCoordSysProp); + fail("This MUST have failed because the given coordinate systems list contains at least 3 items, whose one is NONE!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("The special value \"" + VALUE_NONE + "\" can not be used inside a list! It MUST be used in replacement of a whole list to specify that no value is allowed.", e.getMessage()); + } + + // Unknown coordinate system function: + try{ + new ConfigurableServiceConnection(unknownCoordSysProp); + fail("This MUST have failed because the given coordinate systems list contains at least 1 unknown coordinate system!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Incorrect coordinate system regular expression (\"ICRS foo *\"): Wrong allowed coordinate system syntax for the 1-th item: \"ICRS foo *\"! Expected: \"frameRegExp refposRegExp flavorRegExp\" ; where each xxxRegExp = (xxx | '*' | '('xxx ('|' xxx)*')'), frame=\"" + Frame.regexp + "\", refpos=\"" + RefPos.regexp + "\" and flavor=\"" + Flavor.regexp + "\" ; an empty string is also allowed and will be interpreted as '*' (so all possible values).", e.getMessage()); + } + + // "ANY" as UDFs list: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(anyUdfsProp); + assertNull(connection.getUDFs()); + }catch(Exception e){ + fail("This MUST have succeeded because the given list of UDFs is correct (reduced to only ANY)! \nCaught exception: " + getPertinentMessage(e)); + } + + // "NONE" as UDFs list: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(noneUdfsProp); + assertNotNull(connection.getUDFs()); + assertEquals(0, connection.getUDFs().size()); + }catch(Exception e){ + fail("This MUST have succeeded because the given list of UDFs is correct (reduced to only NONE)! \nCaught exception: " + getPertinentMessage(e)); + } + + // Valid list of UDFs: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(udfsProp); + assertNotNull(connection.getUDFs()); + assertEquals(2, connection.getUDFs().size()); + Iterator it = connection.getUDFs().iterator(); + assertEquals("toto(a VARCHAR)", it.next().toString()); + assertEquals("titi(b REAL) -> DOUBLE", it.next().toString()); + }catch(Exception e){ + fail("This MUST have succeeded because the given list of UDFs contains valid items! \nCaught exception: " + getPertinentMessage(e)); + } + + // Valid list of UDFs containing one UDF with a class name: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(udfsWithClassNameProp); + assertNotNull(connection.getUDFs()); + assertEquals(1, connection.getUDFs().size()); + FunctionDef def = connection.getUDFs().iterator().next(); + assertEquals("toto(a VARCHAR) -> VARCHAR", def.toString()); + assertEquals(UDFToto.class, def.getUDFClass()); + }catch(Exception e){ + fail("This MUST have succeeded because the given list of UDFs contains valid items! \nCaught exception: " + getPertinentMessage(e)); + } + + // "NONE" inside a UDFs list: + try{ + new ConfigurableServiceConnection(udfsListWithNONEorANYProp); + fail("This MUST have failed because the given UDFs list contains at least 2 items, whose one is ANY!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Wrong UDF declaration syntax: unexpected character at position 27 in the property " + KEY_UDFS + ": \"A\"! A UDF declaration must have one of the following syntaxes: \"[signature]\" or \"[signature,{className}]\".", e.getMessage()); + } + + // UDF with no brackets: + try{ + new ConfigurableServiceConnection(udfsWithMissingBracketsProp); + fail("This MUST have failed because one UDFs list item has no brackets!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Wrong UDF declaration syntax: unexpected character at position 1 in the property " + KEY_UDFS + ": \"t\"! A UDF declaration must have one of the following syntaxes: \"[signature]\" or \"[signature,{className}]\".", e.getMessage()); + } + + // UDFs whose one item have more parts than supported: + try{ + new ConfigurableServiceConnection(udfsWithWrongParamLengthProp); + fail("This MUST have failed because one UDFs list item has too many parameters!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Wrong UDF declaration syntax: only two items (signature and class name) can be given within brackets. (position in the property " + KEY_UDFS + ": 58)", e.getMessage()); + } + + // UDF with missing definition part (or wrong since there is no comma): + try{ + new ConfigurableServiceConnection(udfsWithMissingDefProp1); + fail("This MUST have failed because one UDFs list item has a wrong signature part (it has been forgotten)!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Wrong UDF declaration syntax: Wrong function definition syntax! Expected syntax: \"(?) ?\", where =\"[a-zA-Z]+[a-zA-Z0-9_]*\", =\" -> \", =\"( (, )*)\", should be one of the types described in the UPLOAD section of the TAP documentation. Examples of good syntax: \"foo()\", \"foo() -> VARCHAR\", \"foo(param INTEGER)\", \"foo(param1 INTEGER, param2 DOUBLE) -> DOUBLE\" (position in the property " + KEY_UDFS + ": 2-33)", e.getMessage()); + } + + // UDF with missing definition part (or wrong since there is no comma): + try{ + new ConfigurableServiceConnection(udfsWithMissingDefProp2); + fail("This MUST have failed because one UDFs list item has no signature part!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Missing UDF declaration! (position in the property " + KEY_UDFS + ": 2-2)", e.getMessage()); + } + + // Empty UDF item (without comma): + try{ + ServiceConnection connection = new ConfigurableServiceConnection(emptyUdfItemProp1); + assertNotNull(connection.getUDFs()); + assertEquals(0, connection.getUDFs().size()); + }catch(Exception e){ + fail("This MUST have succeeded because the given list of UDFs contains one empty UDF (which should be merely ignored)! \nCaught exception: " + getPertinentMessage(e)); + } + + // Empty UDF item (with comma): + try{ + ServiceConnection connection = new ConfigurableServiceConnection(emptyUdfItemProp2); + assertNotNull(connection.getUDFs()); + assertEquals(0, connection.getUDFs().size()); + }catch(Exception e){ + fail("This MUST have succeeded because the given list of UDFs contains one empty UDF (which should be merely ignored)! \nCaught exception: " + getPertinentMessage(e)); + } + + // UDF item without its closing bracket: + try{ + new ConfigurableServiceConnection(udfWithMissingEndBracketProp); + fail("This MUST have failed because one UDFs list item has no closing bracket!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Wrong UDF declaration syntax: missing closing bracket at position 24!", e.getMessage()); + } + + // Valid custom TAPFactory: + try{ + ServiceConnection connection = new ConfigurableServiceConnection(customFactoryProp); + assertNotNull(connection.getFactory()); + assertEquals(CustomTAPFactory.class, connection.getFactory().getClass()); + }catch(Exception e){ + fail("This MUST have succeeded because the given custom TAPFactory exists and have the required constructor! \nCaught exception: " + getPertinentMessage(e)); + } + + // Bad custom TAPFactory (required constructor missing): + try{ + new ConfigurableServiceConnection(badCustomFactoryProp); + fail("This MUST have failed because the specified TAPFactory extension does not have a constructor with ServiceConnection!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Missing constructor tap.config.TestConfigurableServiceConnection$BadCustomTAPFactory(tap.ServiceConnection)! See the value \"{tap.config.TestConfigurableServiceConnection$BadCustomTAPFactory}\" of the property \"" + KEY_TAP_FACTORY + "\".", e.getMessage()); + } + } + + @Test + public void testGetFile(){ + final String rootPath = "/ROOT", propertyName = "SuperProperty"; + String path; + + try{ + // NULL test => NULL must be returned. + assertNull(ConfigurableServiceConnection.getFile(null, rootPath, propertyName)); + + // Valid file URI: + path = "/custom/user/dir"; + assertEquals(path, ConfigurableServiceConnection.getFile("file://" + path, rootPath, propertyName).getAbsolutePath()); + + // Valid absolute file path: + assertEquals(path, ConfigurableServiceConnection.getFile(path, rootPath, propertyName).getAbsolutePath()); + + // File name relative to the given rootPath: + path = "dir"; + assertEquals(rootPath + File.separator + path, ConfigurableServiceConnection.getFile(path, rootPath, propertyName).getAbsolutePath()); + + // Idem but with a relative file path: + path = "gmantele/workspace"; + assertEquals(rootPath + File.separator + path, ConfigurableServiceConnection.getFile(path, rootPath, propertyName).getAbsolutePath()); + + }catch(Exception ex){ + ex.printStackTrace(); + fail("None of these tests should have failed!"); + } + + // Test with a file URI having a bad syntax: + path = "file:#toto^foo"; + try{ + ConfigurableServiceConnection.getFile(path, rootPath, propertyName); + fail("This test should have failed, because the given file URI has a bad syntax!"); + }catch(Exception ex){ + assertEquals(TAPException.class, ex.getClass()); + assertEquals("Incorrect file URI for the property \"" + propertyName + "\": \"" + path + "\"! Bad syntax for the given file URI.", ex.getMessage()); + } + + // Test with an URL: + path = "http://www.google.com"; + try{ + ConfigurableServiceConnection.getFile(path, rootPath, propertyName); + fail("This test should have failed, because the given URI uses the HTTP protocol (actually, it uses a protocol different from \"file\"!"); + }catch(Exception ex){ + assertEquals(TAPException.class, ex.getClass()); + assertEquals("Incorrect file URI for the property \"" + propertyName + "\": \"" + path + "\"! Only URI with the protocol \"file:\" are allowed.", ex.getMessage()); + } + + } + + public static final String getPertinentMessage(final Exception ex){ + return (ex.getCause() == null || ex.getMessage().equals(ex.getCause().getMessage())) ? ex.getMessage() : ex.getCause().getMessage(); + } + + /** + * A UWSFileManager to test the load of a UWSFileManager from the configuration file with a class path. + * + * @author Grégory Mantelet (ARI) + * @version 01/2015 + * @see TestConfigurableServiceConnection#testDefaultServiceConnectionProperties() + */ + public static class FileManagerTest extends LocalUWSFileManager { + public FileManagerTest(Properties tapConfig) throws UWSException{ + super(new File(tapConfig.getProperty("file_root_path")), true, false); + } + } + + /** + * A UserIdentifier which always return the same user...that's to say, all users are in a way still anonymous :-) + * This class is only for test purpose. + * + * @author Grégory Mantelet (ARI) + * @version 02/2015 + */ + public static class UserIdentifierTest implements UserIdentifier { + private static final long serialVersionUID = 1L; + + private final JobOwner everybody = new DefaultJobOwner("everybody"); + + @Override + public JobOwner extractUserId(UWSUrl urlInterpreter, HttpServletRequest request) throws UWSException{ + return everybody; + } + + @Override + public JobOwner restoreUser(String id, String pseudo, Map otherData) throws UWSException{ + return everybody; + } + + } + + /** + * TAPFactory just to test whether the property tap_factory works well. + * + * @author Grégory Mantelet (ARI) + * @version 02/2015 + */ + private static class CustomTAPFactory extends AbstractTAPFactory { + + private final JDBCConnection dbConn; + + public CustomTAPFactory(final ServiceConnection conn) throws DBException{ + super(conn); + dbConn = new JDBCConnection("", "jdbc:postgresql:gmantele", "gmantele", null, new PostgreSQLTranslator(), "TheOnlyConnection", conn.getLogger()); + } + + @Override + public DBConnection getConnection(final String jobID) throws TAPException{ + return dbConn; + } + + @Override + public void freeConnection(final DBConnection conn){} + + @Override + public void destroy(){ + try{ + dbConn.getInnerConnection().close(); + }catch(Exception ex){} + } + + } + + /** + * TAPFactory just to test whether the property tap_factory is rejected when no constructor with a single parameter of type ServiceConnection exists. + * + * @author Grégory Mantelet (ARI) + * @version 02/2015 + */ + private static class BadCustomTAPFactory extends AbstractTAPFactory { + + public BadCustomTAPFactory() throws DBException{ + super(null); + } + + @Override + public DBConnection getConnection(final String jobID) throws TAPException{ + return null; + } + + @Override + public void freeConnection(final DBConnection conn){} + + @Override + public void destroy(){} + + } + +} diff --git a/test/tap/config/TestConfigurableTAPFactory.java b/test/tap/config/TestConfigurableTAPFactory.java new file mode 100644 index 0000000000000000000000000000000000000000..c6bf1006c15def6481952a1cd627f37aef3e2890 --- /dev/null +++ b/test/tap/config/TestConfigurableTAPFactory.java @@ -0,0 +1,512 @@ +package tap.config; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static tap.config.TAPConfiguration.KEY_BACKUP_BY_USER; +import static tap.config.TAPConfiguration.KEY_BACKUP_FREQUENCY; +import static tap.config.TAPConfiguration.KEY_DATABASE_ACCESS; +import static tap.config.TAPConfiguration.KEY_DATASOURCE_JNDI_NAME; +import static tap.config.TAPConfiguration.KEY_DB_PASSWORD; +import static tap.config.TAPConfiguration.KEY_DB_USERNAME; +import static tap.config.TAPConfiguration.KEY_JDBC_DRIVER; +import static tap.config.TAPConfiguration.KEY_JDBC_URL; +import static tap.config.TAPConfiguration.KEY_SQL_TRANSLATOR; +import static tap.config.TAPConfiguration.VALUE_JDBC; +import static tap.config.TAPConfiguration.VALUE_JNDI; +import static tap.config.TAPConfiguration.VALUE_PGSPHERE; +import static tap.config.TAPConfiguration.VALUE_POSTGRESQL; + +import java.io.File; +import java.sql.SQLException; +import java.util.Collection; +import java.util.Iterator; +import java.util.Map; +import java.util.Properties; + +import javax.naming.Context; +import javax.naming.InitialContext; +import javax.naming.NamingException; +import javax.servlet.http.HttpServletRequest; + +import org.junit.BeforeClass; +import org.junit.Test; +import org.postgresql.ds.PGSimpleDataSource; +import org.postgresql.util.PSQLException; + +import tap.ServiceConnection; +import tap.TAPException; +import tap.TAPFactory; +import tap.backup.DefaultTAPBackupManager; +import tap.db.DBConnection; +import tap.db.DBException; +import tap.db.JDBCConnection; +import tap.formatter.OutputFormat; +import tap.log.DefaultTAPLog; +import tap.log.TAPLog; +import tap.metadata.TAPMetadata; +import uws.UWSException; +import uws.job.user.JobOwner; +import uws.service.UWSService; +import uws.service.UWSUrl; +import uws.service.UserIdentifier; +import uws.service.file.LocalUWSFileManager; +import uws.service.file.UWSFileManager; +import adql.db.FunctionDef; + +public class TestConfigurableTAPFactory { + + private static Properties validJDBCProp, validJNDIProp, + incorrectDBAccessProp, missingDBAccessProp, + missingDatasourceJNDINameProp, wrongDatasourceJNDINameProp, + noJdbcProp1, noJdbcProp2, noJdbcProp3, badJdbcProp, + missingTranslatorProp, badTranslatorProp, badDBNameProp, + badUsernameProp, badPasswordProp, validBackupFrequency, noBackup, + userBackup, badBackupFrequency; + + private static ServiceConnection serviceConnection = null; + + private static void setJNDIDatasource() throws NamingException{ + // Create an initial JNDI context: + /* note: this requires that the simple-jndi jar is in the classpath. (https://code.google.com/p/osjava/downloads/detail?name=simple-jndi-0.11.4.1.zip&can=2&q=) */ + System.setProperty(Context.INITIAL_CONTEXT_FACTORY, "org.osjava.sj.memory.MemoryContextFactory"); + System.setProperty("org.osjava.sj.jndi.shared", "true"); // memory shared between all instances of InitialContext + + // Context initialization: + InitialContext ic = new InitialContext(); + + // Creation of a reference on a DataSource: + PGSimpleDataSource datasource = new PGSimpleDataSource(); + datasource.setServerName("localhost"); + datasource.setDatabaseName("gmantele"); + + // Link the datasource with the context: + ic.rebind("jdbc/MyDataSource", datasource); + } + + @BeforeClass + public static void beforeClass() throws Exception{ + // BUILD A FAKE SERVICE CONNECTION: + serviceConnection = new ServiceConnectionTest(); + + // LOAD ALL PROPERTIES FILES NEEDED FOR ALL THE TESTS: + validJDBCProp = AllTAPConfigTests.getValidProperties(); + + setJNDIDatasource(); + validJNDIProp = (Properties)validJDBCProp.clone(); + validJNDIProp.setProperty(KEY_DATABASE_ACCESS, "jndi"); + validJNDIProp.setProperty(KEY_DATASOURCE_JNDI_NAME, "jdbc/MyDataSource"); + validJNDIProp.remove(KEY_JDBC_URL); + validJNDIProp.remove(KEY_JDBC_DRIVER); + validJNDIProp.remove(KEY_DB_USERNAME); + validJNDIProp.remove(KEY_DB_PASSWORD); + + incorrectDBAccessProp = (Properties)validJDBCProp.clone(); + incorrectDBAccessProp.setProperty(KEY_DATABASE_ACCESS, "foo"); + + missingDBAccessProp = (Properties)validJDBCProp.clone(); + missingDBAccessProp.remove(KEY_DATABASE_ACCESS); + + missingDatasourceJNDINameProp = (Properties)validJNDIProp.clone(); + missingDatasourceJNDINameProp.remove(KEY_DATASOURCE_JNDI_NAME); + + wrongDatasourceJNDINameProp = (Properties)validJNDIProp.clone(); + wrongDatasourceJNDINameProp.setProperty(KEY_DATASOURCE_JNDI_NAME, "foo"); + + noJdbcProp1 = (Properties)validJDBCProp.clone(); + noJdbcProp1.remove(KEY_JDBC_DRIVER); + + noJdbcProp2 = (Properties)noJdbcProp1.clone(); + noJdbcProp2.setProperty(KEY_JDBC_URL, "jdbc:foo:gmantele"); + + noJdbcProp3 = (Properties)noJdbcProp1.clone(); + noJdbcProp3.remove(KEY_JDBC_URL); + + badJdbcProp = (Properties)validJDBCProp.clone(); + badJdbcProp.setProperty(KEY_JDBC_DRIVER, "foo"); + badJdbcProp.setProperty(KEY_JDBC_URL, "jdbc:foo:gmantele"); + + missingTranslatorProp = (Properties)validJDBCProp.clone(); + missingTranslatorProp.remove(KEY_SQL_TRANSLATOR); + + badTranslatorProp = (Properties)validJDBCProp.clone(); + badTranslatorProp.setProperty(KEY_SQL_TRANSLATOR, "foo"); + + badDBNameProp = (Properties)validJDBCProp.clone(); + badDBNameProp.setProperty(KEY_JDBC_URL, "jdbc:postgresql:foo"); + + badUsernameProp = (Properties)validJDBCProp.clone(); + badUsernameProp.setProperty(KEY_DB_USERNAME, "foo"); + + badPasswordProp = (Properties)validJDBCProp.clone(); + badPasswordProp.setProperty(KEY_DB_PASSWORD, "foo"); + + validBackupFrequency = (Properties)validJDBCProp.clone(); + validBackupFrequency.setProperty(KEY_BACKUP_FREQUENCY, "3600"); + + noBackup = (Properties)validJDBCProp.clone(); + noBackup.setProperty(KEY_BACKUP_FREQUENCY, "never"); + + userBackup = (Properties)validJDBCProp.clone(); + userBackup.setProperty(KEY_BACKUP_FREQUENCY, "user_action"); + + badBackupFrequency = (Properties)validJDBCProp.clone(); + badBackupFrequency.setProperty(KEY_BACKUP_FREQUENCY, "foo"); + } + + @Test + public void testDefaultServiceConnection(){ + // Correct Parameters (JDBC CASE): + DBConnection connection = null; + try{ + TAPFactory factory = new ConfigurableTAPFactory(serviceConnection, validJDBCProp); + connection = factory.getConnection("0"); + assertNotNull(connection); + assertNull(factory.createUWSBackupManager(new UWSService(factory, new LocalUWSFileManager(new File("."))))); + }catch(Exception ex){ + fail(getPertinentMessage(ex)); + }finally{ + if (connection != null){ + try{ + ((JDBCConnection)connection).getInnerConnection().close(); + connection = null; + }catch(SQLException se){} + } + } + + // Correct Parameters (JNDI CASE): + try{ + TAPFactory factory = new ConfigurableTAPFactory(serviceConnection, validJNDIProp); + connection = factory.getConnection("0"); + assertNotNull(connection); + }catch(Exception ex){ + fail(getPertinentMessage(ex)); + }finally{ + if (connection != null){ + try{ + ((JDBCConnection)connection).getInnerConnection().close(); + connection = null; + }catch(SQLException se){} + } + } + + // Incorrect database access method: + try{ + new ConfigurableServiceConnection(incorrectDBAccessProp); + fail("This MUST have failed because the value of the property '" + KEY_DATABASE_ACCESS + "' is incorrect!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("Unsupported value for the property " + KEY_DATABASE_ACCESS + ": \"foo\"! Allowed values: \"" + VALUE_JNDI + "\" or \"" + VALUE_JDBC + "\".", e.getMessage()); + } + + // Missing database access method: + try{ + new ConfigurableServiceConnection(missingDBAccessProp); + fail("This MUST have failed because the property '" + KEY_DATABASE_ACCESS + "' is missing!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("The property \"" + KEY_DATABASE_ACCESS + "\" is missing! It is required to connect to the database. Two possible values: \"" + VALUE_JDBC + "\" and \"" + VALUE_JNDI + "\".", e.getMessage()); + } + + // Missing JNDI name: + try{ + new ConfigurableServiceConnection(missingDatasourceJNDINameProp); + fail("This MUST have failed because the property '" + KEY_DATASOURCE_JNDI_NAME + "' is missing!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("The property \"" + KEY_DATASOURCE_JNDI_NAME + "\" is missing! Since the choosen database access method is \"" + VALUE_JNDI + "\", this property is required.", e.getMessage()); + } + + // Wrong JNDI name: + try{ + new ConfigurableServiceConnection(wrongDatasourceJNDINameProp); + fail("This MUST have failed because the value of the property '" + KEY_DATASOURCE_JNDI_NAME + "' is incorrect!"); + }catch(Exception e){ + assertEquals(TAPException.class, e.getClass()); + assertEquals("No datasource found with the JNDI name \"foo\"!", e.getMessage()); + } + + // No JDBC Driver but the database type is known: + try{ + new ConfigurableTAPFactory(serviceConnection, noJdbcProp1); + }catch(Exception ex){ + fail(getPertinentMessage(ex)); + } + + // No JDBC Driver but the database type is UNKNOWN: + try{ + new ConfigurableTAPFactory(serviceConnection, noJdbcProp2); + fail("This MUST have failed because no JDBC Driver has been successfully guessed from the database type!"); + }catch(Exception ex){ + assertEquals(TAPException.class, ex.getClass()); + assertTrue(ex.getMessage().matches("No JDBC driver known for the DBMS \"[^\\\"]*\"!")); + } + + // Missing JDBC URL: + try{ + new ConfigurableTAPFactory(serviceConnection, noJdbcProp3); + fail("This MUST have failed because the property \"" + KEY_JDBC_URL + "\" is missing!"); + }catch(Exception ex){ + assertEquals(TAPException.class, ex.getClass()); + assertTrue(ex.getMessage().matches("The property \"" + KEY_JDBC_URL + "\" is missing! Since the choosen database access method is \"" + VALUE_JDBC + "\", this property is required.")); + } + + // Bad JDBC Driver: + try{ + new ConfigurableTAPFactory(serviceConnection, badJdbcProp); + fail("This MUST have failed because the provided JDBC Driver doesn't exist!"); + }catch(Exception ex){ + assertEquals(DBException.class, ex.getClass()); + assertTrue(ex.getMessage().matches("Impossible to find the JDBC driver \"[^\\\"]*\" !")); + } + + // Missing Translator: + try{ + new ConfigurableTAPFactory(serviceConnection, missingTranslatorProp); + fail("This MUST have failed because the provided SQL translator is missing!"); + }catch(Exception ex){ + assertEquals(TAPException.class, ex.getClass()); + assertTrue(ex.getMessage().matches("The property \"" + KEY_SQL_TRANSLATOR + "\" is missing! ADQL queries can not be translated without it. Allowed values: \"" + VALUE_POSTGRESQL + "\", \"" + VALUE_PGSPHERE + "\" or a class path of a class implementing SQLTranslator.")); + } + + // Bad Translator: + try{ + new ConfigurableTAPFactory(serviceConnection, badTranslatorProp); + fail("This MUST have failed because the provided SQL translator is incorrect!"); + }catch(Exception ex){ + assertEquals(TAPException.class, ex.getClass()); + assertTrue(ex.getMessage().matches("Unsupported value for the property sql_translator: \"[^\\\"]*\" !")); + } + + // Bad DB Name: + try{ + new ConfigurableTAPFactory(serviceConnection, badDBNameProp); + fail("This MUST have failed because the provided database name is incorrect!"); + }catch(Exception ex){ + assertEquals(DBException.class, ex.getClass()); + assertTrue(ex.getMessage().matches("Impossible to establish a connection to the database \"[^\\\"]*\"!")); + assertEquals(PSQLException.class, ex.getCause().getClass()); + assertTrue(ex.getCause().getMessage().matches("FATAL: password authentication failed for user \"[^\\\"]*\"")); + } + + // Bad DB Username: ABORTED BECAUSE THE BAD USERNAME IS NOT DETECTED FOR THE DB WHICH HAS THE SAME NAME AS THE USERNAME ! + try{ + new ConfigurableTAPFactory(serviceConnection, badUsernameProp); + fail("This MUST have failed because the provided database username is incorrect!"); + }catch(Exception ex){ + assertEquals(DBException.class, ex.getClass()); + assertTrue(ex.getMessage().matches("Impossible to establish a connection to the database \"[^\\\"]*\"!")); + assertEquals(PSQLException.class, ex.getCause().getClass()); + assertTrue(ex.getCause().getMessage().matches("FATAL: password authentication failed for user \"[^\\\"]*\"")); + } + + // Bad DB Password: + try{ + new ConfigurableTAPFactory(serviceConnection, badPasswordProp); + //fail("This MUST have failed because the provided database password is incorrect!"); // NOTE: In function of the database configuration, a password may be required or not. So this test is not automatic! + }catch(Exception ex){ + assertEquals(DBException.class, ex.getClass()); + assertTrue(ex.getMessage().matches("Impossible to establish a connection to the database \"[^\\\"]*\"!")); + assertEquals(PSQLException.class, ex.getCause().getClass()); + assertTrue(ex.getCause().getMessage().matches("FATAL: password authentication failed for user \"[^\\\"]*\"")); + } + + // Valid backup frequency: + try{ + ConfigurableTAPFactory factory = new ConfigurableTAPFactory(serviceConnection, validBackupFrequency); + DefaultTAPBackupManager backupManager = (DefaultTAPBackupManager)factory.createUWSBackupManager(new UWSService(factory, new LocalUWSFileManager(new File("/tmp")))); + assertEquals(3600L, backupManager.getBackupFreq()); + }catch(Exception ex){ + fail(getPertinentMessage(ex)); + } + + // No backup: + try{ + ConfigurableTAPFactory factory = new ConfigurableTAPFactory(serviceConnection, noBackup); + assertNull(factory.createUWSBackupManager(new UWSService(factory, new LocalUWSFileManager(new File("/tmp"))))); + }catch(Exception ex){ + fail(getPertinentMessage(ex)); + } + + // User backup: + try{ + UWSService uws; + UserIdentifier userIdent = new UserIdentifier(){ + private static final long serialVersionUID = 1L; + + @Override + public JobOwner restoreUser(String id, String pseudo, Map otherData) throws UWSException{ + return null; + } + + @Override + public JobOwner extractUserId(UWSUrl urlInterpreter, HttpServletRequest request) throws UWSException{ + return null; + } + }; + /* The value user_action has no effect if the by_user mode is not enabled. + * So, if this value is given, it's falling back to manual.*/ + userBackup.setProperty(KEY_BACKUP_BY_USER, "false"); + ConfigurableTAPFactory factory = new ConfigurableTAPFactory(serviceConnection, userBackup); + uws = new UWSService(factory, new LocalUWSFileManager(new File("/tmp"))); + DefaultTAPBackupManager backupManager = (DefaultTAPBackupManager)factory.createUWSBackupManager(uws); + assertEquals(DefaultTAPBackupManager.MANUAL, backupManager.getBackupFreq()); + + /* After having enabled the by_user mode, it should now work. */ + userBackup.setProperty(KEY_BACKUP_BY_USER, "true"); + factory = new ConfigurableTAPFactory(serviceConnection, userBackup); + uws = new UWSService(factory, new LocalUWSFileManager(new File("/tmp"))); + uws.setUserIdentifier(userIdent); + backupManager = (DefaultTAPBackupManager)factory.createUWSBackupManager(uws); + assertEquals(DefaultTAPBackupManager.AT_USER_ACTION, backupManager.getBackupFreq()); + }catch(Exception ex){ + fail(getPertinentMessage(ex)); + } + + // Bad backup frequency: + try{ + new ConfigurableTAPFactory(serviceConnection, badBackupFrequency); + }catch(Exception ex){ + assertEquals(TAPException.class, ex.getClass()); + assertEquals("Long expected for the property \"" + KEY_BACKUP_FREQUENCY + "\", instead of: \"foo\"!", ex.getMessage()); + } + } + + public static final String getPertinentMessage(final Exception ex){ + return (ex.getCause() == null || ex.getMessage().equals(ex.getCause().getMessage())) ? ex.getMessage() : ex.getCause().getMessage(); + } + + public static class ServiceConnectionTest implements ServiceConnection { + + private TAPLog logger = new DefaultTAPLog((UWSFileManager)null); + private boolean isAvailable = true; + + @Override + public String getProviderName(){ + return null; + } + + @Override + public String getProviderDescription(){ + return null; + } + + @Override + public boolean isAvailable(){ + return isAvailable; + } + + @Override + public String getAvailability(){ + return null; + } + + @Override + public int[] getRetentionPeriod(){ + return null; + } + + @Override + public int[] getExecutionDuration(){ + return null; + } + + @Override + public int[] getOutputLimit(){ + return null; + } + + @Override + public tap.ServiceConnection.LimitUnit[] getOutputLimitType(){ + return null; + } + + @Override + public UserIdentifier getUserIdentifier(){ + return null; + } + + @Override + public boolean uploadEnabled(){ + return false; + } + + @Override + public int[] getUploadLimit(){ + return null; + } + + @Override + public tap.ServiceConnection.LimitUnit[] getUploadLimitType(){ + return null; + } + + @Override + public int getMaxUploadSize(){ + return 0; + } + + @Override + public TAPMetadata getTAPMetadata(){ + return null; + } + + @Override + public Collection getCoordinateSystems(){ + return null; + } + + @Override + public TAPLog getLogger(){ + return logger; + } + + @Override + public TAPFactory getFactory(){ + return null; + } + + @Override + public UWSFileManager getFileManager(){ + return null; + } + + @Override + public Iterator getOutputFormats(){ + return null; + } + + @Override + public OutputFormat getOutputFormat(String mimeOrAlias){ + return null; + } + + @Override + public void setAvailable(boolean isAvailable, String message){ + this.isAvailable = isAvailable; + } + + @Override + public Collection getGeometries(){ + return null; + } + + @Override + public Collection getUDFs(){ + return null; + } + + @Override + public int getNbMaxAsyncJobs(){ + return -1; + } + + @Override + public int[] getFetchSize(){ + return null; + } + } + +} diff --git a/test/tap/config/TestTAPConfiguration.java b/test/tap/config/TestTAPConfiguration.java new file mode 100644 index 0000000000000000000000000000000000000000..61a4104250964ee88730598361fd0985d488f4b5 --- /dev/null +++ b/test/tap/config/TestTAPConfiguration.java @@ -0,0 +1,416 @@ +package tap.config; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static tap.config.TAPConfiguration.KEY_DEFAULT_OUTPUT_LIMIT; +import static tap.config.TAPConfiguration.KEY_FILE_MANAGER; +import static tap.config.TAPConfiguration.KEY_MAX_OUTPUT_LIMIT; +import static tap.config.TAPConfiguration.fetchClass; +import static tap.config.TAPConfiguration.isClassName; +import static tap.config.TAPConfiguration.newInstance; +import static tap.config.TAPConfiguration.parseLimit; + +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; + +import org.junit.Before; +import org.junit.Test; + +import tap.ServiceConnection.LimitUnit; +import tap.TAPException; +import tap.metadata.TAPMetadata; +import tap.metadata.TAPSchema; +import adql.query.ColumnReference; + +public class TestTAPConfiguration { + + @Before + public void setUp() throws Exception{} + + /** + * TEST isClassName(String): + * - null, "", "{}", "an incorrect syntax" => FALSE must be returned + * - "{ }", "{ }", "{class.path}", "{ class.path }" => TRUE must be returned + * + * @see ConfigurableServiceConnection#isClassName(String) + */ + @Test + public void testIsClassPath(){ + // NULL and EMPTY: + assertFalse(isClassName(null)); + assertFalse(isClassName("")); + + // EMPTY CLASSPATH: + assertFalse(isClassName("{}")); + + // INCORRECT CLASSPATH: + assertFalse(isClassName("incorrect class name ; missing {}")); + + // VALID CLASSPATH: + assertTrue(isClassName("{class.path}")); + + // CLASSPATH VALID ONLY IN THE SYNTAX: + assertTrue(isClassName("{ }")); + assertTrue(isClassName("{ }")); + + // NOT TRIM CLASSPATH: + assertTrue(isClassName("{ class.name }")); + } + + /** + * TEST getClass(String,String,String): + * - null, "", "{}", "an incorrect syntax", "{ }", "{ }" => NULL must be returned + * - "{java.lang.String}", "{ java.lang.String }" => a valid DefaultServiceConnection must be returned + * - "{mypackage.foo}", "{java.util.ArrayList}" (while a String is expected) => a TAPException must be thrown + */ + @Test + public void testGetClassStringStringString(){ + // NULL and EMPTY: + try{ + assertNull(fetchClass(null, KEY_FILE_MANAGER, String.class)); + }catch(TAPException e){ + fail("If a NULL value is provided as class name: getClass(...) MUST return null!\nCaught exception: " + getPertinentMessage(e)); + } + try{ + assertNull(fetchClass("", KEY_FILE_MANAGER, String.class)); + }catch(TAPException e){ + fail("If an EMPTY value is provided as class name: getClass(...) MUST return null!\nCaught exception: " + getPertinentMessage(e)); + } + + // EMPTY CLASS NAME: + try{ + assertNull(fetchClass("{}", KEY_FILE_MANAGER, String.class)); + }catch(TAPException e){ + fail("If an EMPTY class name is provided: getClass(...) MUST return null!\nCaught exception: " + getPertinentMessage(e)); + } + + // INCORRECT SYNTAX: + try{ + assertNull(fetchClass("incorrect class name ; missing {}", KEY_FILE_MANAGER, String.class)); + }catch(TAPException e){ + fail("If an incorrect class name is provided: getClass(...) MUST return null!\nCaught exception: " + getPertinentMessage(e)); + } + + // VALID CLASS NAME: + try{ + Class classObject = fetchClass("{java.lang.String}", KEY_FILE_MANAGER, String.class); + assertNotNull(classObject); + assertEquals(classObject.getName(), "java.lang.String"); + }catch(TAPException e){ + fail("If a VALID class name is provided: getClass(...) MUST return a Class object of the wanted type!\nCaught exception: " + getPertinentMessage(e)); + } + + // INCORRECT CLASS NAME: + try{ + fetchClass("{mypackage.foo}", KEY_FILE_MANAGER, String.class); + fail("This MUST have failed because an incorrect class name is provided!"); + }catch(TAPException e){ + assertEquals(e.getClass(), TAPException.class); + assertEquals(e.getMessage(), "The class specified by the property \"" + KEY_FILE_MANAGER + "\" ({mypackage.foo}) can not be found."); + } + + // INCOMPATIBLE TYPES: + try{ + @SuppressWarnings("unused") + Class classObject = fetchClass("{java.util.ArrayList}", KEY_FILE_MANAGER, String.class); + fail("This MUST have failed because a class of a different type has been asked!"); + }catch(TAPException e){ + assertEquals(e.getClass(), TAPException.class); + assertEquals(e.getMessage(), "The class specified by the property \"" + KEY_FILE_MANAGER + "\" ({java.util.ArrayList}) is not implementing " + String.class.getName() + "."); + } + + // CLASS NAME VALID ONLY IN THE SYNTAX: + try{ + assertNull(fetchClass("{ }", KEY_FILE_MANAGER, String.class)); + }catch(TAPException e){ + fail("If an EMPTY class name is provided: getClass(...) MUST return null!\nCaught exception: " + getPertinentMessage(e)); + } + try{ + assertNull(fetchClass("{ }", KEY_FILE_MANAGER, String.class)); + }catch(TAPException e){ + fail("If an EMPTY class name is provided: getClass(...) MUST return null!\nCaught exception: " + getPertinentMessage(e)); + } + + // NOT TRIM CLASS NAME: + try{ + Class classObject = fetchClass("{ java.lang.String }", KEY_FILE_MANAGER, String.class); + assertNotNull(classObject); + assertEquals(classObject.getName(), "java.lang.String"); + }catch(TAPException e){ + fail("If a VALID class name is provided: getClass(...) MUST return a Class object of the wanted type!\nCaught exception: " + getPertinentMessage(e)); + } + } + + @Test + public void testNewInstance(){ + // VALID CONSTRUCTOR with no parameters: + try{ + TAPMetadata metadata = newInstance("{tap.metadata.TAPMetadata}", "metadata", TAPMetadata.class); + assertNotNull(metadata); + assertEquals("tap.metadata.TAPMetadata", metadata.getClass().getName()); + }catch(Exception ex){ + ex.printStackTrace(); + fail("This test should have succeeded: the parameters of newInstance(...) are all valid."); + } + + // VALID CONSTRUCTOR with some parameters: + try{ + final String schemaName = "MySuperSchema", description = "And its less super description.", utype = "UTYPE"; + TAPSchema schema = newInstance("{tap.metadata.TAPSchema}", "schema", TAPSchema.class, new Class[]{String.class,String.class,String.class}, new String[]{schemaName,description,utype}); + assertNotNull(schema); + assertEquals("tap.metadata.TAPSchema", schema.getClass().getName()); + assertEquals(schemaName, schema.getADQLName()); + assertEquals(description, schema.getDescription()); + assertEquals(utype, schema.getUtype()); + }catch(Exception ex){ + ex.printStackTrace(); + fail("This test should have succeeded: the constructor TAPSchema(String,String,String) exists."); + } + + // VALID CONSTRUCTOR with some parameters whose the type is an extension (not the exact type): + OutputStream output = null; + File tmp = new File("tmp.empty"); + try{ + output = newInstance("{java.io.BufferedOutputStream}", "stream", OutputStream.class, new Class[]{OutputStream.class}, new OutputStream[]{new FileOutputStream(tmp)}); + assertNotNull(output); + assertEquals(BufferedOutputStream.class, output.getClass()); + }catch(Exception ex){ + ex.printStackTrace(); + fail("This test should have succeeded: the constructor TAPSchema(String,String,String) exists."); + }finally{ + try{ + tmp.delete(); + if (output != null) + output.close(); + }catch(IOException ioe){} + } + + // NOT A CLASS NAME: + try{ + newInstance("tap.metadata.TAPMetadata", "metadata", TAPMetadata.class); + fail("This MUST have failed because the property value is not a class name!"); + }catch(Exception ex){ + assertEquals(TAPException.class, ex.getClass()); + assertEquals("Class name expected for the property \"metadata\" instead of: \"tap.metadata.TAPMetadata\"! The specified class must extend/implement tap.metadata.TAPMetadata.", ex.getMessage()); + } + + // NO MATCHING CONSTRUCTOR: + try{ + newInstance("{tap.metadata.TAPSchema}", "schema", TAPSchema.class, new Class[]{Integer.class}, new Object[]{new Integer(123)}); + fail("This MUST have failed because the specified class does not have any expected constructor!"); + }catch(Exception ex){ + assertEquals(TAPException.class, ex.getClass()); + assertEquals("Missing constructor tap.metadata.TAPSchema(java.lang.Integer)! See the value \"{tap.metadata.TAPSchema}\" of the property \"schema\".", ex.getMessage()); + } + + // VALID CONSTRUCTOR with primitive type: + try{ + ColumnReference colRef = newInstance("{adql.query.ColumnReference}", "colRef", ColumnReference.class, new Class[]{int.class}, new Object[]{123}); + assertNotNull(colRef); + assertEquals(ColumnReference.class, colRef.getClass()); + assertEquals(123, colRef.getColumnIndex()); + colRef = newInstance("{adql.query.ColumnReference}", "colRef", ColumnReference.class, new Class[]{int.class}, new Object[]{new Integer(123)}); + assertNotNull(colRef); + assertEquals(ColumnReference.class, colRef.getClass()); + assertEquals(123, colRef.getColumnIndex()); + }catch(Exception ex){ + ex.printStackTrace(); + fail("This test should have succeeded: the constructor ColumnReference(int) exists."); + } + + // WRONG CONSTRUCTOR with primitive type: + try{ + newInstance("{adql.query.ColumnReference}", "colRef", ColumnReference.class, new Class[]{Integer.class}, new Object[]{new Integer(123)}); + fail("This MUST have failed because the constructor of the specified class expects an int, not an java.lang.Integer!"); + }catch(Exception ex){ + assertEquals(TAPException.class, ex.getClass()); + assertEquals("Missing constructor adql.query.ColumnReference(java.lang.Integer)! See the value \"{adql.query.ColumnReference}\" of the property \"colRef\".", ex.getMessage()); + } + + // THE CONSTRUCTOR THROWS AN EXCEPTION: + try{ + newInstance("{tap.metadata.TAPSchema}", "schema", TAPSchema.class, new Class[]{String.class}, new Object[]{null}); + fail("This MUST have failed because the constructor of the specified class throws an exception!"); + }catch(Exception ex){ + assertEquals(TAPException.class, ex.getClass()); + assertNotNull(ex.getCause()); + assertEquals(NullPointerException.class, ex.getCause().getClass()); + assertEquals("Missing schema name!", ex.getCause().getMessage()); + } + + // THE CONSTRUCTOR THROWS A TAPEXCEPTION: + try{ + newInstance("{tap.config.TestTAPConfiguration$ClassAlwaysThrowTAPError}", "tapError", ClassAlwaysThrowTAPError.class); + fail("This MUST have failed because the constructor of the specified class throws a TAPException!"); + }catch(Exception ex){ + assertEquals(TAPException.class, ex.getClass()); + assertEquals("This error is always thrown by ClassAlwaysThrowTAPError ^^", ex.getMessage()); + } + } + + /** + * TEST parseLimit(String,String): + * - nothing, -123, 0 => {-1,LimitUnit.rows} + * - 20, 20r, 20R => {20,LimitUnit.rows} + * - 100B, 100 B => {100,LimitUnit.bytes} + * - 100kB, 100 k B => {100000,LimitUnit.bytes} + * - 100MB, 1 0 0MB => {100000000,LimitUnit.bytes} + * - 100GB, 1 0 0 G B => {100000000000,LimitUnit.bytes} + * - r => {-1,LimitUnit.rows} + * - kB => {-1,LimitUnit.bytes} + * - foo, 100b, 100TB, 1foo => an exception must occur + */ + @Test + public void testParseLimitStringString(){ + final String propertyName = KEY_DEFAULT_OUTPUT_LIMIT + " or " + KEY_MAX_OUTPUT_LIMIT; + // Test empty or negative or null values => OK! + try{ + String[] testValues = new String[]{null,""," ","-123"}; + Object[] limit; + for(String v : testValues){ + limit = parseLimit(v, propertyName, false); + assertEquals(limit[0], -1); + assertEquals(limit[1], LimitUnit.rows); + } + // 0 test: + limit = parseLimit("0", propertyName, false); + assertEquals(limit[0], 0); + assertEquals(limit[1], LimitUnit.rows); + }catch(TAPException te){ + fail("All these empty limit values are valid, so these tests should have succeeded!\nCaught exception: " + getPertinentMessage(te)); + } + + // Test all accepted rows values: + try{ + String[] testValues = new String[]{"20","20r","20 R"}; + Object[] limit; + for(String v : testValues){ + limit = parseLimit(v, propertyName, false); + assertEquals(limit[0], 20); + assertEquals(limit[1], LimitUnit.rows); + } + }catch(TAPException te){ + fail("All these rows limit values are valid, so these tests should have succeeded!\nCaught exception: " + getPertinentMessage(te)); + } + + // Test all accepted bytes values: + try{ + String[] testValues = new String[]{"100B","100 B"}; + Object[] limit; + for(String v : testValues){ + limit = parseLimit(v, propertyName, true); + assertEquals(limit[0], 100); + assertEquals(limit[1], LimitUnit.bytes); + } + }catch(TAPException te){ + fail("All these bytes limit values are valid, so these tests should have succeeded!\nCaught exception: " + getPertinentMessage(te)); + } + + // Test all accepted kilo-bytes values: + try{ + String[] testValues = new String[]{"100kB","100 k B"}; + Object[] limit; + for(String v : testValues){ + limit = parseLimit(v, propertyName, true); + assertEquals(limit[0], 100); + assertEquals(limit[1], LimitUnit.kilobytes); + } + }catch(TAPException te){ + fail("All these kilo-bytes limit values are valid, so these tests should have succeeded!\nCaught exception: " + getPertinentMessage(te)); + } + + // Test all accepted mega-bytes values: + try{ + String[] testValues = new String[]{"100MB","1 0 0MB"}; + Object[] limit; + for(String v : testValues){ + limit = parseLimit(v, propertyName, true); + assertEquals(limit[0], 100); + assertEquals(limit[1], LimitUnit.megabytes); + } + }catch(TAPException te){ + fail("All these mega-bytes limit values are valid, so these tests should have succeeded!\nCaught exception: " + getPertinentMessage(te)); + } + + // Test all accepted giga-bytes values: + try{ + String[] testValues = new String[]{"100GB","1 0 0 G B"}; + Object[] limit; + for(String v : testValues){ + limit = parseLimit(v, propertyName, true); + assertEquals(limit[0], 100); + assertEquals(limit[1], LimitUnit.gigabytes); + } + }catch(TAPException te){ + fail("All these giga-bytes limit values are valid, so these tests should have succeeded!\nCaught exception: " + getPertinentMessage(te)); + } + + // Test with only the ROWS unit provided: + try{ + Object[] limit = parseLimit("r", propertyName, false); + assertEquals(limit[0], -1); + assertEquals(limit[1], LimitUnit.rows); + }catch(TAPException te){ + fail("Providing only the ROWS unit is valid, so this test should have succeeded!\nCaught exception: " + getPertinentMessage(te)); + } + + // Test with only the BYTES unit provided: + try{ + Object[] limit = parseLimit("kB", propertyName, true); + assertEquals(limit[0], -1); + assertEquals(limit[1], LimitUnit.kilobytes); + }catch(TAPException te){ + fail("Providing only the BYTES unit is valid, so this test should have succeeded!\nCaught exception: " + getPertinentMessage(te)); + } + + // Test with incorrect limit formats: + String[] values = new String[]{"","100","100","1"}; + String[] unitPart = new String[]{"foo","b","TB","foo"}; + for(int i = 0; i < values.length; i++){ + try{ + parseLimit(values[i] + unitPart[i], propertyName, true); + fail("This test should have failed because an incorrect limit is provided: \"" + values[i] + unitPart[i] + "\"!"); + }catch(TAPException te){ + assertEquals(te.getClass(), TAPException.class); + assertEquals(te.getMessage(), "Unknown limit unit (" + unitPart[i] + ") for the property " + propertyName + ": \"" + values[i] + unitPart[i] + "\"!"); + + } + } + // Test with an incorrect numeric limit value: + try{ + parseLimit("abc100b", propertyName, true); + fail("This test should have failed because an incorrect limit is provided: \"abc100b\"!"); + }catch(TAPException te){ + assertEquals(te.getClass(), TAPException.class); + assertEquals(te.getMessage(), "Integer expected for the property " + propertyName + " for the substring \"abc100\" of the whole value: \"abc100b\"!"); + } + + // Test with a BYTES unit whereas the BYTES unit is forbidden: + try{ + parseLimit("100B", propertyName, false); + fail("This test should have failed because an incorrect limit is provided: \"100B\"!"); + }catch(TAPException te){ + assertEquals(te.getClass(), TAPException.class); + assertEquals(te.getMessage(), "BYTES unit is not allowed for the property " + propertyName + " (100B)!"); + } + } + + public static final String getPertinentMessage(final Exception ex){ + return (ex.getCause() == null || ex.getMessage().equals(ex.getCause().getMessage())) ? ex.getMessage() : ex.getCause().getMessage(); + } + + private static class ClassAlwaysThrowTAPError { + @SuppressWarnings("unused") + public ClassAlwaysThrowTAPError() throws TAPException{ + throw new TAPException("This error is always thrown by ClassAlwaysThrowTAPError ^^"); + } + } + +} diff --git a/test/tap/data/ResultSetTableIteratorTest.java b/test/tap/data/ResultSetTableIteratorTest.java new file mode 100644 index 0000000000000000000000000000000000000000..bc7891760d5eb4396c54fe60984aa03e25fb10a5 --- /dev/null +++ b/test/tap/data/ResultSetTableIteratorTest.java @@ -0,0 +1,127 @@ +package tap.data; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.sql.Connection; +import java.sql.ResultSet; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import testtools.DBTools; + +public class ResultSetTableIteratorTest { + + private static Connection conn; + + @BeforeClass + public static void setUpBeforeClass() throws Exception{ + conn = DBTools.createConnection("postgresql", "127.0.0.1", null, "gmantele", "gmantele", "pwd"); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception{ + DBTools.closeConnection(conn); + } + + @Test + public void testWithRSNULL(){ + try{ + new ResultSetTableIterator(null); + fail("The constructor should have failed, because: the given ResultSet is NULL."); + }catch(Exception ex){ + assertEquals("java.lang.NullPointerException", ex.getClass().getName()); + assertEquals("Missing ResultSet object over which to iterate!", ex.getMessage()); + } + } + + @Test + public void testWithData(){ + TableIterator it = null; + try{ + ResultSet rs = DBTools.select(conn, "SELECT id, ra, deg, gmag FROM gums LIMIT 10;"); + + it = new ResultSetTableIterator(rs); + // TEST there is column metadata before starting the iteration: + assertTrue(it.getMetadata() != null); + final int expectedNbLines = 10, expectedNbColumns = 4; + int countLines = 0, countColumns = 0; + while(it.nextRow()){ + // count lines: + countLines++; + // reset columns count: + countColumns = 0; + while(it.hasNextCol()){ + it.nextCol(); + // count columns + countColumns++; + // TEST the column type is set (not null): + assertTrue(it.getColType() != null); + } + // TEST that all columns have been read: + assertEquals(expectedNbColumns, countColumns); + } + // TEST that all lines have been read: + assertEquals(expectedNbLines, countLines); + + }catch(Exception ex){ + ex.printStackTrace(System.err); + fail("An exception occurs while reading a correct ResultSet (containing some valid rows)."); + }finally{ + if (it != null){ + try{ + it.close(); + }catch(DataReadException dre){} + } + } + } + + @Test + public void testWithEmptySet(){ + TableIterator it = null; + try{ + ResultSet rs = DBTools.select(conn, "SELECT * FROM gums WHERE id = 'foo';"); + + it = new ResultSetTableIterator(rs); + // TEST there is column metadata before starting the iteration: + assertTrue(it.getMetadata() != null); + int countLines = 0; + // count lines: + while(it.nextRow()) + countLines++; + // TEST that no line has been read: + assertEquals(countLines, 0); + + }catch(Exception ex){ + ex.printStackTrace(System.err); + fail("An exception occurs while reading a correct ResultSet (containing some valid rows)."); + }finally{ + if (it != null){ + try{ + it.close(); + }catch(DataReadException dre){} + } + } + } + + @Test + public void testWithClosedSet(){ + try{ + // create a valid ResultSet: + ResultSet rs = DBTools.select(conn, "SELECT * FROM gums WHERE id = 'foo';"); + + // close the ResultSet: + rs.close(); + + // TRY to create a TableIterator with a closed ResultSet: + new ResultSetTableIterator(rs); + + fail("The constructor should have failed, because: the given ResultSet is closed."); + }catch(Exception ex){ + assertEquals(ex.getClass().getName(), "tap.data.DataReadException"); + } + } +} diff --git a/test/tap/data/VOTableIteratorTest.java b/test/tap/data/VOTableIteratorTest.java new file mode 100644 index 0000000000000000000000000000000000000000..1bf1bf0ff9bc95bc798d12223328a353c423caa1 --- /dev/null +++ b/test/tap/data/VOTableIteratorTest.java @@ -0,0 +1,196 @@ +package tap.data; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.BufferedInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; + +import org.junit.Test; + +public class VOTableIteratorTest { + + public final static String directory = "/home/gmantele/workspace/tap/test/tap/data/"; + + public final static File dataVOTable = new File(directory + "testdata.vot"); + public final static File binaryVOTable = new File(directory + "testdata_binary.vot"); + + public final static File emptyVOTable = new File(directory + "emptyset.vot"); + public final static File emptyBinaryVOTable = new File(directory + "emptyset_binary.vot"); + + @Test + public void testWithNULL(){ + try{ + new VOTableIterator(null); + fail("The constructor should have failed, because: the given VOTable is NULL."); + }catch(Exception ex){ + assertEquals(ex.getClass().getName(), "java.lang.NullPointerException"); + } + } + + @Test + public void testWithData(){ + InputStream input = null; + TableIterator it = null; + try{ + input = new BufferedInputStream(new FileInputStream(dataVOTable)); + it = new VOTableIterator(input); + // TEST there is column metadata before starting the iteration: + assertTrue(it.getMetadata() != null); + final int expectedNbLines = 100, expectedNbColumns = 4; + int countLines = 0, countColumns = 0; + while(it.nextRow()){ + // count lines: + countLines++; + // reset columns count: + countColumns = 0; + while(it.hasNextCol()){ + it.nextCol(); + // count columns + countColumns++; + // TEST the column type is set (not null): + assertTrue(it.getColType() != null); + } + // TEST that all columns have been read: + assertEquals(expectedNbColumns, countColumns); + } + // TEST that all lines have been read: + assertEquals(expectedNbLines, countLines); + + }catch(Exception ex){ + ex.printStackTrace(System.err); + fail("An exception occurs while reading a correct VOTable (containing some valid rows)."); + }finally{ + try{ + if (input != null) + input.close(); + }catch(IOException e){ + e.printStackTrace(); + } + if (it != null){ + try{ + it.close(); + }catch(DataReadException dre){} + } + } + } + + @Test + public void testWithBinary(){ + InputStream input = null; + TableIterator it = null; + try{ + input = new BufferedInputStream(new FileInputStream(binaryVOTable)); + it = new VOTableIterator(input); + // TEST there is column metadata before starting the iteration: + assertTrue(it.getMetadata() != null); + final int expectedNbLines = 100, expectedNbColumns = 4; + int countLines = 0, countColumns = 0; + while(it.nextRow()){ + // count lines: + countLines++; + // reset columns count: + countColumns = 0; + while(it.hasNextCol()){ + it.nextCol(); + // count columns + countColumns++; + // TEST the column type is set (not null): + assertTrue(it.getColType() != null); + } + // TEST that all columns have been read: + assertEquals(expectedNbColumns, countColumns); + } + // TEST that all lines have been read: + assertEquals(expectedNbLines, countLines); + + }catch(Exception ex){ + ex.printStackTrace(System.err); + fail("An exception occurs while reading a correct VOTable (containing some valid rows)."); + }finally{ + try{ + if (input != null) + input.close(); + }catch(IOException e){ + e.printStackTrace(); + } + if (it != null){ + try{ + it.close(); + }catch(DataReadException dre){} + } + } + } + + @Test + public void testWithEmptySet(){ + InputStream input = null; + TableIterator it = null; + try{ + input = new BufferedInputStream(new FileInputStream(emptyVOTable)); + it = new VOTableIterator(input); + // TEST there is column metadata before starting the iteration: + assertTrue(it.getMetadata() != null); + int countLines = 0; + // count lines: + while(it.nextRow()) + countLines++; + // TEST that no line has been read: + assertEquals(countLines, 0); + + }catch(Exception ex){ + ex.printStackTrace(System.err); + fail("An exception occurs while reading a correct VOTable (even if empty)."); + }finally{ + try{ + if (input != null) + input.close(); + }catch(IOException e){ + e.printStackTrace(); + } + if (it != null){ + try{ + it.close(); + }catch(DataReadException dre){} + } + } + } + + @Test + public void testWithEmptyBinarySet(){ + InputStream input = null; + TableIterator it = null; + try{ + input = new BufferedInputStream(new FileInputStream(emptyBinaryVOTable)); + it = new VOTableIterator(input); + // TEST there is column metadata before starting the iteration: + assertTrue(it.getMetadata() != null); + int countLines = 0; + // count lines: + while(it.nextRow()) + countLines++; + // TEST that no line has been read: + assertEquals(countLines, 0); + + }catch(Exception ex){ + ex.printStackTrace(System.err); + fail("An exception occurs while reading a correct binary VOTable (even if empty)."); + }finally{ + try{ + if (input != null) + input.close(); + }catch(IOException e){ + e.printStackTrace(); + } + if (it != null){ + try{ + it.close(); + }catch(DataReadException dre){} + } + } + } +} diff --git a/test/tap/data/emptyset.vot b/test/tap/data/emptyset.vot new file mode 100644 index 0000000000000000000000000000000000000000..fe9b28f07d38b03eb805da25fe3f0b04fe100048 --- /dev/null +++ b/test/tap/data/emptyset.vot @@ -0,0 +1,123 @@ + + + + + + + + + + + + + + + +GUMS-10 is the 10th version of the Gaia Universe Model Snapshot, a +simulation of the expected contents of the Gaia cataloge run at the +MareNostrum supercomputer. The models used and the characteristics of +GUMS-10 are described in: A.C. Robin et al, "Gaia Universe Model +Snapshot. A statistical analysis of the expected contents of the Gaia +catalogue", Astronomy & Astrophysics (2012), in press. For more +details see also http://gaia.am.ub.es/GUMS-10/ + + +Supernovae in the GUMS-10 simulated GAIA result set. + + +If you use this data, please acknowledge that GUMS was created using +the MareNostrum supercomputer. + + +Query successful + + +Short name for TAP service + + +TAP service title + + +Unique resource registry identifier + + +Publisher for TAP service + + +Descriptive URL for search resource + + +Individual to contact about this service + + +Intrinsic apparent V magnitude + + +Intrinsic V-I color. + + +Mean absolute V magnitude. + + +Object redshift. + + +Right ascention of the barycenter at J2010 reference epoch in the ICRS frame + + +Declination of the barycenter at J2010 reference epoch in the ICRS frame + + +Distance from the barycenter of the Solar System to the barycenter of the source at J2010 reference epoch + + +Proper motion along right ascention at J2010 reference epoch + + +Proper motion along declination at J2010 reference epoch + + +Radial Velocity at J2010 reference epoch + + +Interstellar absorption in the G band assuming the extinction law of 1989ApJ...345..245C. + + +Interstellar absorption in the V-band assuming the extinction law of 1989ApJ...345..245C. + + +Extinction parameter according to 2003A&A...409..205D. + + +GAIA G band apparent magnitude at reference epoch. The GAIA G-band has a wide bandpass between 350 and 150 nm. This is close to Johnson V for V-I between -0.4 and 1.4. + + +GAIA G_BP band apparent magnitude at reference epoch. The GAIA G_BP band has a bandpass between 350 and 770 nm. + + +GAIA G_RP band apparent magnitude at reference epoch. The GAIA G_RP band has a bandpass between 650 and 1050 nm. + + +GAIA G_RVS band apparent magnitude at reference epoch. The GAIA G_RVS band has a narrow bandpass between 850 and 880 nm. + + +Supernova type (one of Ia, Ib/c, II-L, II-P) + + +GUMS source identifier + + + +GUMS extended source identifier + + + + + +
    +
    +
    diff --git a/test/tap/data/emptyset_binary.vot b/test/tap/data/emptyset_binary.vot new file mode 100644 index 0000000000000000000000000000000000000000..e123464d597c1fccbaf981edcad57d4553fc5491 --- /dev/null +++ b/test/tap/data/emptyset_binary.vot @@ -0,0 +1,125 @@ + + + + + + + + + + + + + + + +GUMS-10 is the 10th version of the Gaia Universe Model Snapshot, a +simulation of the expected contents of the Gaia cataloge run at the +MareNostrum supercomputer. The models used and the characteristics of +GUMS-10 are described in: A.C. Robin et al, "Gaia Universe Model +Snapshot. A statistical analysis of the expected contents of the Gaia +catalogue", Astronomy & Astrophysics (2012), in press. For more +details see also http://gaia.am.ub.es/GUMS-10/ + + +Supernovae in the GUMS-10 simulated GAIA result set. + + +If you use this data, please acknowledge that GUMS was created using +the MareNostrum supercomputer. + + +Query successful + + +Short name for TAP service + + +TAP service title + + +Unique resource registry identifier + + +Publisher for TAP service + + +Descriptive URL for search resource + + +Individual to contact about this service + + +Intrinsic apparent V magnitude + + +Intrinsic V-I color. + + +Mean absolute V magnitude. + + +Object redshift. + + +Right ascention of the barycenter at J2010 reference epoch in the ICRS frame + + +Declination of the barycenter at J2010 reference epoch in the ICRS frame + + +Distance from the barycenter of the Solar System to the barycenter of the source at J2010 reference epoch + + +Proper motion along right ascention at J2010 reference epoch + + +Proper motion along declination at J2010 reference epoch + + +Radial Velocity at J2010 reference epoch + + +Interstellar absorption in the G band assuming the extinction law of 1989ApJ...345..245C. + + +Interstellar absorption in the V-band assuming the extinction law of 1989ApJ...345..245C. + + +Extinction parameter according to 2003A&A...409..205D. + + +GAIA G band apparent magnitude at reference epoch. The GAIA G-band has a wide bandpass between 350 and 150 nm. This is close to Johnson V for V-I between -0.4 and 1.4. + + +GAIA G_BP band apparent magnitude at reference epoch. The GAIA G_BP band has a bandpass between 350 and 770 nm. + + +GAIA G_RP band apparent magnitude at reference epoch. The GAIA G_RP band has a bandpass between 650 and 1050 nm. + + +GAIA G_RVS band apparent magnitude at reference epoch. The GAIA G_RVS band has a narrow bandpass between 850 and 880 nm. + + +Supernova type (one of Ia, Ib/c, II-L, II-P) + + +GUMS source identifier + + + +GUMS extended source identifier + + + + + + + +
    +
    +
    diff --git a/test/tap/data/testdata.vot b/test/tap/data/testdata.vot new file mode 100644 index 0000000000000000000000000000000000000000..bfec957f865ae0e186109adc3edafb4163f34872 --- /dev/null +++ b/test/tap/data/testdata.vot @@ -0,0 +1,676 @@ + + + + + + + + + + + + + + + +GUMS-10 is the 10th version of the Gaia Universe Model Snapshot, a +simulation of the expected contents of the Gaia cataloge run at the +MareNostrum supercomputer. The models used and the characteristics of +GUMS-10 are described in: A.C. Robin et al, "Gaia Universe Model +Snapshot. A statistical analysis of the expected contents of the Gaia +catalogue", Astronomy & Astrophysics (2012), in press. For more +details see also http://gaia.am.ub.es/GUMS-10/ + + +Supernovae in the GUMS-10 simulated GAIA result set. + + +If you use this data, please acknowledge that GUMS was created using +the MareNostrum supercomputer. + + +Query successful + + + +Short name for TAP service + + +TAP service title + + +Unique resource registry identifier + + +Publisher for TAP service + + +Descriptive URL for search resource + + +Individual to contact about this service + + +GUMS source identifier + + + +Right ascention of the barycenter at J2010 reference epoch in the ICRS frame + + +Declination of the barycenter at J2010 reference epoch in the ICRS frame + + +GAIA G band apparent magnitude at reference epoch. The GAIA G-band has a wide bandpass between 350 and 150 nm. This is close to Johnson V for V-I between -0.4 and 1.4. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    5227706337957249025315.19628886221935.83480695019760.0
    5228535898005569537316.14792101640836.64463270384960.0
    5228619830256467969318.33075000654637.14270906949810.0
    5228490199553540097317.27748666080737.03936222834720.0
    5228526638056079361315.53965150215536.83307992230320.0
    5228191235470000129314.54754433643337.03163818919780.0
    5228254186805657601313.51515837991237.45944945117250.0
    5228511485411459073315.1463013300537.304639204680.0
    5228510390194798593315.13872441345637.39393574196660.0
    5228207019474812929315.10509057177937.79676638844290.0
    5228645273642729473316.44559396706837.77246108902850.0
    5228210176275775489314.20230957162237.76220180744210.0
    5228340180640858113313.92694076089238.16812566865320.0
    5228338668812369921313.9007812617838.34683031113870.0
    5228678658423521281318.63831534002737.45990618237210.0
    5228643048849670145319.84261859804437.91205442803580.0
    5228662732684787713317.82238275293137.7301991426550.0
    5228557072194338817320.85113216583738.07016787573650.0
    5228059294074667009322.68360302989638.7328465130210.0
    5227925888095485953320.55388286402838.67896391879460.0
    5227935367088308225320.10129065997939.04618980406040.0
    5227934688483475457320.56765795442239.25691873314240.0
    5228419671895572481316.50119712407138.35429489661480.0
    5228404837078532097317.53040384091939.57113780233830.0
    5228457149780197377319.12995797034538.89036315744430.0
    5228411159270391809318.35016753345639.37397763646560.0
    5228411343953985537318.48229132170239.46828134668950.0
    5224886425639452673319.91533016839940.54746066853770.0
    5226038743890132993310.80914271364737.29816030415020.0
    5225901627059208193310.19529762062438.16829838052550.0
    5228389010124046337314.31127836060639.11323442433410.0
    5228357485064093697313.76548769408739.20102059460550.0
    5228357416344616961313.94825377071739.23413349189520.0
    5228138918473367553311.60021505581438.78295799623780.0
    5225905883371798529309.20906548301338.39229399340940.0
    5225922745413402625308.27318763430838.6234727174390.0
    5228170748475998209311.50480935525939.27369492390310.0
    5228128756580745217310.06011765724139.61509236671960.0
    5228125286247170049310.39197560767739.85674478636780.0
    5224523603982155777310.15254846688140.59333977905030.0
    5228287051895406593315.25419508303840.05401245156790.0
    5228273862050840577316.99935233799340.0275603977330.0
    5225021858843197441318.64447845853740.67794380999240.0
    5225035018622992385317.99146351987140.96774411441940.0
    5225072127140429825316.86744117805641.90577726631510.0
    5225072367658598401317.07590799751841.95974788519520.0
    5224716538208059393313.03338159042441.08621109829320.0
    5224690454871670785311.94845897719841.25306754526540.0
    5225070142865539073317.08355465527842.12593272386970.0
    5224684089730138113314.08916189225741.97082848725330.0
    5224636059110866945314.40032150297542.30877268517340.0
    5224644606095785985313.64570938685942.49715494818610.0
    5225193335412490241315.24405685240942.9063874510960.0
    5228101612387434497323.94674963199938.97748114075510.0
    5228088950823845889322.96576053397639.48163582297340.0
    5228086506987454465322.52464216575139.39134129852950.0
    5228082809020612609323.08652599603839.95690587782030.0
    5228085098238181377323.37897674512639.99099656719130.0
    5227900238550794241325.94764479528739.86226849273860.0
    5224755742669537281327.29207806486440.4735804884630.0
    5224801402466861057326.4156946150241.16750128811960.0
    5224907135971753985321.81458559330740.50171079874820.0
    5224912689364467713322.47466810337640.78764615253130.0
    5224941637444042753320.73832821072940.56299256949480.0
    5224940666781433857320.25089399298441.16341300353940.0
    5224927141929418753321.86354218178441.57292542112980.0
    5224986902104375297325.24623815594842.15657778124230.0
    5225019698474647553323.39678505719741.58803073987690.0
    5224955600382722049322.51780085803741.82376102718470.0
    5224831699166167041323.11849460086942.37503580624870.0
    5224858555596668929324.91811616015642.37525149433180.0
    5224856356573413377324.478481630442.469641158570.0
    5260489398125854721328.50119247274540.71503466153740.0
    5264154912194822145328.84957710811741.06957878151770.0
    5260548449631207425328.80098806444442.06648454407370.0
    5224757619570245633326.10137957423841.8062103705320.0
    5224756915195609089326.47195686454442.01262823122720.0
    5260532828835151873327.43529414076642.39922626729420.0
    5260531072193527809327.19890951608242.74784196361050.0
    5260542758799540225327.88760656793242.62365661885350.0
    5260660973479395329326.07385800842943.35464558283750.0
    5260725440938508289326.55822113875643.41061677113290.0
    5264186024937914369329.48648962038642.65163993874620.0
    5260511212264751105328.3197909565943.16637259658850.0
    5264374758685802497329.51522294269843.99048083699810.0
    5264303453638754305329.43547386359644.07587327077290.0
    5260592159513378817329.12851838638644.26147041132090.0
    5225085673467281409319.12073151292441.55409528884620.0
    5224915403783798785320.79198979350742.22865228533180.0
    5225266367036391425320.76234723673542.46490333148460.0
    5224960535300145153322.5400306215142.24075631011430.0
    5224862219203772417323.33310116147842.54947909462440.0
    5224835594701504513322.06506264050643.13387234898860.0
    5225263253185101825316.39088985420742.74842186980540.0
    5225213848176295937316.47276521481643.4994511765080.0
    5225112469768241153319.65765992345443.44087872498050.0
    5225153065799122945320.38766986108843.87256378887190.0
    5225098042973093889321.97084737119544.27314775868040.0
    5224328779970641921321.40278639325944.54593683196670.0
    5224353149615079425318.91878858993544.38091599121240.0
    +
    +
    diff --git a/test/tap/data/testdata_binary.vot b/test/tap/data/testdata_binary.vot new file mode 100644 index 0000000000000000000000000000000000000000..829a5b88f3bfbf8b803d35405779860be2e4d303 --- /dev/null +++ b/test/tap/data/testdata_binary.vot @@ -0,0 +1,137 @@ + + + + + + + + + + + + + + + +GUMS-10 is the 10th version of the Gaia Universe Model Snapshot, a +simulation of the expected contents of the Gaia cataloge run at the +MareNostrum supercomputer. The models used and the characteristics of +GUMS-10 are described in: A.C. Robin et al, "Gaia Universe Model +Snapshot. A statistical analysis of the expected contents of the Gaia +catalogue", Astronomy & Astrophysics (2012), in press. For more +details see also http://gaia.am.ub.es/GUMS-10/ + + +Supernovae in the GUMS-10 simulated GAIA result set. + + +If you use this data, please acknowledge that GUMS was created using +the MareNostrum supercomputer. + + +Query successful + + + +Short name for TAP service + + +TAP service title + + +Unique resource registry identifier + + +Publisher for TAP service + + +Descriptive URL for search resource + + +Individual to contact about this service + + +GUMS source identifier + + + +Right ascention of the barycenter at J2010 reference epoch in the ICRS frame + + +Declination of the barycenter at J2010 reference epoch in the ICRS frame + + +GAIA G band apparent magnitude at reference epoch. The GAIA G-band has a wide bandpass between 350 and 150 nm. This is close to Johnson V for V-I between -0.4 and 1.4. + + + + +SIyLN4AAAAFAc7Mj/8o8y0BB6tr0Qsk+AAAAAEiPfbKAAAABQHPCXeJtfdFAQlKD +Uw57qQAAAABIj8oIgAAAAUBz5UrAhNRFQEKSREpxKyEAAAAASI9UIoAAAAFAc9Rw +ldpT4UBChQnSTbl2AAAAAEiPdUaAAAABQHO4ommdD+NAQmqiXOafjAAAAABIjkQ6 +gAAAAUBzqMK92aFwQEKEDLhd9GEAAAAASI59e4AAAAFAc5g+FrafuEBCus89V3m8 +AAAAAEiPZ36AAAABQHOyV0AQPtFAQqb+at6XDAAAAABIj2Z/gAAAAUBzsjg3Fy80 +QEKybHyEPGwAAAAASI5SlYAAAAFAc7Guc3OOikBC5fxw5nUHAAAAAEiP4SyAAAAB +QHPHIScjvaJAQuLgAUVnrQAAAABIjlV0gAAAAUBzozyo9hyMQELhj9Qt9T0AAAAA +SI7LsYAAAAFAc57Uv9XVxUBDFYUkVD3nAAAAAEiOylGAAAABQHOeaZmcwx5AQyxk +74XNFgAAAABIj/+JgAAAAUBz6jaKJV84QEK63jSuQhkAAAAASI/fJoAAAAFAc/17 +XaOZm0BC9L4zEkr6AAAAAEiP8Q2AAAABQHPdKHrRSiFAQt13Kl6inwAAAABIj5D0 +gAAAAUB0DZ48ww1/QEMI+0LNwlAAAAAASI3MOoAAAAFAdCrwCbsNmUBDXc3qHzTY +AAAAAEiNUuWAAAABQHQI3LRHLQtAQ1boSikxgwAAAABIjVuEgAAAAUB0AZ7i9IAA +QEOF6Ywo7I0AAAAASI1a5oAAAAFAdAkVIIHY6EBDoOK2iknGAAAAAEiPE/2AAAAB +QHPIBOdGi79AQy1ZiQEM1gAAAABIjwZ/gAAAAUBz2HyIvOa4QEPJGwsjRrIAAAAA +SI82E4AAAAFAc/IUTs8HyEBDcfdrgWSuAAAAAEiPDD+AAAABQHPlmklFhQhAQ6/e +f8sHDgAAAABIjwxqgAAAAUBz57d3Gt2tQEO78KSmrdsAAAAASIKGhYAAAAFAc/6l +MT8lDkBERhMw8ZudAAAAAEiGnoyAAAABQHNs8j+hTpJAQqYqHemlMAAAAABIhiHX +gAAAAUBzYx/wZdkRQEMVis0kKdQAAAAASI74GoAAAAFAc6T6/wSsFkBDjn53MqXv +AAAAAEiO226AAAABQHOcP3AGOYJAQ5m7CvfTmQAAAABIjttegAAAAUBznywMJVic +QEOd+BYVS0kAAAAASI4UpYAAAAFAc3maexo0m0BDZDf3tf3UAAAAAEiGJbaAAAAB +QHNTWFUMRDZAQzI2sIgOKQAAAABIhjUMgAAAAUBzRF75/zBkQENPzfQ5rKQAAAAA +SI4xmIAAAAFAc3gTsvl41kBDowhvbZ9dAAAAAEiOC2eAAAABQHNg9j3uvDJAQ867 +WL+KOwAAAABIjgg/gAAAAUBzZkWINvzXQEPtqdArO+4AAAAASIE8iYAAAAFAc2Jw +1qlE8UBES/KO0TfsAAAAAEiOm1+AAAABQHO0ES7dBzxARAbp4UiHzAAAAABIjo9g +gAAAAUBzz/1Y4I3HQEQDhxlfdsgAAAAASIMBsoAAAAFAc+pPyKTmSUBEVsbc3ji0 +AAAAAEiDDaqAAAABQHPf3QjaEF5ARHvfCgUp9gAAAABIgy9qgAAAAUBzzeEKAC9F +QETz8IJsJE4AAAAASIMvooAAAAFAc9E2603thEBE+tkEyajGAAAAAEiB7AKAAAAB +QHOQiLsico5ARIsI9xvcWwAAAABIgdRJgAAAAUBzfyzjUgqcQESgZIRvTAUAAAAA +SIMtnIAAAAFAc9FWPWf9kEBFEB6QQUIfAAAAAEiBzn+AAAABQHOhbTUFNLFARPxE +G51jmAAAAABIgaLQgAAAAUBzpme3hTKcQEUnhd0EW+MAAAAASIGqloAAAAFAc5pU +0120eUBFP6LF+cB/AAAAAEiDnaeAAAABQHOz56god2JARXQEgQX7JwAAAABIjfK3 +gAAAAUB0PyXi8S75QEN9Hhod//4AAAAASI3nM4AAAAFAdC9zwVFTHEBDvaY+HiBc +AAAAAEiN5PqAAAABQHQoZO8vAA1AQ7IXeL9hEgAAAABIjeGdgAAAAUB0MWJpFTPL +QEP6e+RNS08AAAAASI3jsoAAAAFAdDYQSetkLEBD/tj5u0R4AAAAAEiNO5GAAAAB +QHRfKY2Wv7dAQ+5e0GBXfQAAAABIgg+qgAAAAUB0dKxaDIeEQEQ8nkkS/HQAAAAA +SII5MYAAAAFAdGamr2WKJkBElXCupUF7AAAAAEiCmVuAAAABQHQdCIrnML5AREA4 +DzhWNwAAAABIgp5ogAAAAUB0J5g9lMdFQERk0ZbQ+JkAAAAASIK4vIAAAAFAdAvQ +MT3suUBESBAj+O+DAAAAAEiCt9qAAAABQHQEA6lrahFARJTqt6D4rwAAAABIgquN +gAAAAUB0HdERm1epQETJVZ7FZlcAAAAASILh54AAAAFAdFPwl2utL0BFFAq9oNuj +AAAAAEiC/7uAAAABQHQ2WTtJwz5ARMtEl15oMAAAAABIgsVvgAAAAUB0KEjpjXHB +QETpcQBXvSQAAAAASIJUv4AAAAFAdDHlWpg3wkBFMAEsXVVlAAAAAEiCbSyAAAAB +QHROsJqSHMlARTAIPa/3EQAAAABIgmssgAAAAUB0R6fcWqTgQEU8HTOUdPQAAAAA +SQEDPIAAAAFAdIgE4mX3D0BEW4ZBe2egAAAAAEkOCQCAAAABQHSNl94qbK9ARIjn +9R+OmQAAAABJATjxgAAAAUB0jNDY3FRgQEUIgpDHPjYAAAAASIIRX4AAAAFAdGGf +QDA9W0BE5zHmw5DEAAAAAEiCELuAAAABQHRnjSKkJW1ARQGdzUgQRgAAAABJASq8 +gAAAAUB0dvb2/SuwQEUzGdio3csAAAAASQEpI4AAAAFAdHMuu76m90BFX7lJFCRK +AAAAAEkBM8SAAAABQHR+M6Lxz7lARU/T+ub0cgAAAABJAZ9IgAAAAUB0YS6FvCwB +QEWtZQbF+p0AAAAASQHZ6oAAAAFAdGjueUnuSUBFtI8XIZo7AAAAAEkOJUyAAAAB +QHSXyKlXFmxARVNo8ADXWAAAAABJARcTgAAAAUB0hR3dH0HFQEWVS7J+piwAAAAA +SQ7Q84AAAAFAdJg+WmmQl0BF/sgTeRxcAAAAAEkOkBmAAAABQHSW97NxJoVARgm2 +NyBOGgAAAABJAWCygAAAAUB0kg5pS6dnQEYhd9zIv1oAAAAASIM7vIAAAAFAc/Hu +hCq5r0BExuyYLDsmAAAAAEiCoOCAAAABQHQMq/19Xg5ARR1EemPT7wAAAABIg+AT +gAAAAUB0DDKTBB+RQEU7gfPOQ48AAAAASILJ7IAAAAFAdCij9yYjlkBFHtEaTx+P +AAAAAEiCcIGAAAABQHQ1VGHiLOxARUZVVLqfrgAAAABIglhKgAAAAUB0IQp/H5Kj +QEWRIrqoX1MAAAAASIPdPoAAAAFAc8ZBFbhChEBFX8xJrzZyAAAAAEiDsE+AAAAB +QHPHkHJCBSNARb/uBCKGegAAAABIg1QbgAAAAUBz+oXGaXH/QEW4brbMpY8AAAAA +SIN5B4AAAAFAdAYz5U/wSUBF77ArlHB4AAAAAEiDRvyAAAABQHQfiJdAywtARiL2 +gXlBBwAAAABIgItYgAAAAUB0FnHQJSUkQEZF4UITfUoAAAAASIChgoAAAAFAc+6z +W6obV0BGMMHa7mPtAAAAAA== + + + +
    +
    +
    diff --git a/test/tap/db/JDBCConnectionTest.java b/test/tap/db/JDBCConnectionTest.java new file mode 100644 index 0000000000000000000000000000000000000000..b018612580c7cc793a546908fd15d0515f62846f --- /dev/null +++ b/test/tap/db/JDBCConnectionTest.java @@ -0,0 +1,1094 @@ +package tap.db; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.Iterator; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import tap.data.DataReadException; +import tap.data.TableIterator; +import tap.data.VOTableIterator; +import tap.metadata.TAPColumn; +import tap.metadata.TAPForeignKey; +import tap.metadata.TAPMetadata; +import tap.metadata.TAPMetadata.STDSchema; +import tap.metadata.TAPMetadata.STDTable; +import tap.metadata.TAPSchema; +import tap.metadata.TAPTable; +import testtools.DBTools; +import adql.db.DBChecker; +import adql.db.DBColumn; +import adql.db.DBTable; +import adql.db.DBType; +import adql.db.DBType.DBDatatype; +import adql.parser.ADQLParser; +import adql.parser.ParseException; +import adql.query.ADQLQuery; +import adql.query.IdentifierField; +import adql.translator.PostgreSQLTranslator; + +public class JDBCConnectionTest { + + private static Connection pgConnection; + private static JDBCConnection pgJDBCConnection; + private static JDBCConnection sensPgJDBCConnection; + + private static Connection sqliteConnection; + private static JDBCConnection sqliteJDBCConnection; + private static JDBCConnection sensSqliteJDBCConnection; + + private static String uploadExamplePath; + + @BeforeClass + public static void setUpBeforeClass() throws Exception{ + + String projectDir = (new File("")).getAbsolutePath(); + uploadExamplePath = projectDir + "/test/tap/db/upload_example.vot"; + + final String sqliteDbFile = projectDir + "/test/tap/db/TestTAPDb.db"; + + pgConnection = DBTools.createConnection("postgresql", "127.0.0.1", null, "gmantele", "gmantele", "pwd"); + pgJDBCConnection = new JDBCConnection(pgConnection, new PostgreSQLTranslator(false), "POSTGRES", null); + sensPgJDBCConnection = new JDBCConnection(pgConnection, new PostgreSQLTranslator(true, true, true, true), "SensitivePSQL", null); + + sqliteConnection = DBTools.createConnection("sqlite", null, null, sqliteDbFile, null, null); + sqliteJDBCConnection = new JDBCConnection(sqliteConnection, new PostgreSQLTranslator(false), "SQLITE", null); + sensSqliteJDBCConnection = new JDBCConnection(sqliteConnection, new PostgreSQLTranslator(true), "SensitiveSQLite", null); + + } + + @AfterClass + public static void tearDownAfterClass() throws Exception{ + // There should be no difference between a POSTGRESQL connection and a SQLITE one! + JDBCConnection[] connections = new JDBCConnection[]{pgJDBCConnection,sensPgJDBCConnection,sqliteJDBCConnection,sensSqliteJDBCConnection}; + for(JDBCConnection conn : connections){ + dropSchema(STDSchema.TAPSCHEMA.label, conn); + dropSchema(STDSchema.UPLOADSCHEMA.label, conn); + } + pgConnection.close(); + sqliteConnection.close(); + } + + /* ***** */ + /* TESTS */ + /* ***** */ + + @Test + public void testGetTAPSchemaTablesDef(){ + // There should be no difference between a POSTGRESQL connection and a SQLITE one! + JDBCConnection[] connections = new JDBCConnection[]{pgJDBCConnection,sensPgJDBCConnection,sqliteJDBCConnection,sensSqliteJDBCConnection}; + for(JDBCConnection conn : connections){ + TAPMetadata meta = createCustomSchema(); + TAPTable customColumns = meta.getTable(STDSchema.TAPSCHEMA.toString(), STDTable.COLUMNS.toString()); + TAPTable[] tapTables = conn.mergeTAPSchemaDefs(meta); + TAPSchema stdSchema = TAPMetadata.getStdSchema(conn.supportsSchema); + assertEquals(5, tapTables.length); + assertTrue(equals(tapTables[0], stdSchema.getTable(STDTable.SCHEMAS.label))); + assertEquals(customColumns.getSchema(), tapTables[0].getSchema()); + assertTrue(equals(tapTables[1], stdSchema.getTable(STDTable.TABLES.label))); + assertEquals(customColumns.getSchema(), tapTables[1].getSchema()); + assertTrue(equals(tapTables[2], customColumns)); + assertTrue(equals(tapTables[3], stdSchema.getTable(STDTable.KEYS.label))); + assertEquals(customColumns.getSchema(), tapTables[3].getSchema()); + assertTrue(equals(tapTables[4], stdSchema.getTable(STDTable.KEY_COLUMNS.label))); + assertEquals(customColumns.getSchema(), tapTables[4].getSchema()); + } + } + + @Test + public void testSetTAPSchema(){ + // There should be no difference between a POSTGRESQL connection and a SQLITE one! + JDBCConnection[] connections = new JDBCConnection[]{pgJDBCConnection,sensPgJDBCConnection,sqliteJDBCConnection,sensSqliteJDBCConnection}; + for(JDBCConnection conn : connections){ + short cnt = -1; + while(cnt < 1){ + /* NO CUSTOM DEFINITION */ + // Prepare the test: + if (cnt == -1) + dropSchema(STDSchema.TAPSCHEMA.label, conn); + else + createTAPSchema(conn); + // Do the test: + try{ + TAPMetadata meta = new TAPMetadata(); + int[] expectedCounts = getStats(meta); + conn.setTAPSchema(meta); + int[] effectiveCounts = getStats(conn, meta); + for(int i = 0; i < expectedCounts.length; i++) + assertEquals(expectedCounts[i], effectiveCounts[i]); + }catch(DBException dbe){ + dbe.printStackTrace(System.err); + fail("[" + conn.getID() + ";no def] No error should happen here ; when an empty list of metadata is given, at least the TAP_SCHEMA should be created and filled with a description of itself."); + } + + /* CUSTOM DEFINITION */ + // Prepare the test: + if (cnt == -1) + dropSchema(STDSchema.TAPSCHEMA.label, conn); + // Do the test: + try{ + TAPMetadata meta = createCustomSchema(); + int[] expectedCounts = getStats(meta); + conn.setTAPSchema(meta); + int[] effectiveCounts = getStats(conn, meta); + for(int i = 0; i < expectedCounts.length; i++) + assertEquals(expectedCounts[i], effectiveCounts[i]); + }catch(DBException dbe){ + dbe.printStackTrace(System.err); + fail("[" + conn.getID() + ";custom def] No error should happen here!"); + } + + cnt++; + } + } + } + + @Test + public void testGetCreationOrder(){ + // There should be no difference between a POSTGRESQL connection and a SQLITE one! + JDBCConnection[] connections = new JDBCConnection[]{pgJDBCConnection,sensPgJDBCConnection,sqliteJDBCConnection,sensSqliteJDBCConnection}; + for(JDBCConnection conn : connections){ + assertEquals(-1, conn.getCreationOrder(null)); + assertEquals(0, conn.getCreationOrder(STDTable.SCHEMAS)); + assertEquals(1, conn.getCreationOrder(STDTable.TABLES)); + assertEquals(2, conn.getCreationOrder(STDTable.COLUMNS)); + assertEquals(3, conn.getCreationOrder(STDTable.KEYS)); + assertEquals(4, conn.getCreationOrder(STDTable.KEY_COLUMNS)); + } + } + + @Test + public void testGetDBMSDatatype(){ + assertEquals("VARCHAR", pgJDBCConnection.defaultTypeConversion(null)); + assertEquals("TEXT", sqliteJDBCConnection.defaultTypeConversion(null)); + + assertEquals("bytea", pgJDBCConnection.defaultTypeConversion(new DBType(DBDatatype.VARBINARY))); + assertEquals("BLOB", sqliteJDBCConnection.defaultTypeConversion(new DBType(DBDatatype.VARBINARY))); + } + + @Test + public void testMergeTAPSchemaDefs(){ + // There should be no difference between a POSTGRESQL connection and a SQLITE one! + JDBCConnection[] connections = new JDBCConnection[]{pgJDBCConnection,sensPgJDBCConnection,sqliteJDBCConnection,sensSqliteJDBCConnection}; + for(JDBCConnection conn : connections){ + + // TEST WITH NO METADATA OBJECT: + // -> expected: throws a NULL exception. + try{ + conn.mergeTAPSchemaDefs(null); + }catch(Exception e){ + assertEquals(NullPointerException.class, e.getClass()); + } + + // TEST WITH EMPTY METADATA OBJECT: + // -> expected: returns at least the 5 tables of the TAP_SCHEMA. + TAPTable[] stdTables = conn.mergeTAPSchemaDefs(new TAPMetadata()); + + assertEquals(5, stdTables.length); + + for(TAPTable t : stdTables) + assertEquals(STDSchema.TAPSCHEMA.toString(), t.getADQLSchemaName()); + + assertEquals(STDTable.SCHEMAS.toString(), stdTables[0].getADQLName()); + assertEquals(STDTable.TABLES.toString(), stdTables[1].getADQLName()); + assertEquals(STDTable.COLUMNS.toString(), stdTables[2].getADQLName()); + assertEquals(STDTable.KEYS.toString(), stdTables[3].getADQLName()); + assertEquals(STDTable.KEY_COLUMNS.toString(), stdTables[4].getADQLName()); + + // TEST WITH INCOMPLETE TAP_SCHEMA TABLES LIST + 1 CUSTOM TAP_SCHEMA TABLE (here: TAP_SCHEMA.columns): + // -> expected: the 5 tables of the TAP_SCHEMA including the modification of the standard tables & ignore the additional table(s) if any (which is the case here). + TAPMetadata customMeta = createCustomSchema(); + stdTables = conn.mergeTAPSchemaDefs(customMeta); + + assertEquals(5, stdTables.length); + + for(TAPTable t : stdTables) + assertEquals(STDSchema.TAPSCHEMA.toString(), t.getADQLSchemaName()); + + assertEquals(STDTable.SCHEMAS.toString(), stdTables[0].getADQLName()); + assertEquals(STDTable.TABLES.toString(), stdTables[1].getADQLName()); + assertEquals(STDTable.COLUMNS.toString(), stdTables[2].getADQLName()); + assertEquals("Columns", stdTables[2].getDBName()); + assertNotNull(stdTables[2].getColumn("TestNewColumn")); + assertEquals(STDTable.KEYS.toString(), stdTables[3].getADQLName()); + assertEquals(STDTable.KEY_COLUMNS.toString(), stdTables[4].getADQLName()); + } + } + + @Test + public void testEquals(){ + // There should be no difference between a POSTGRESQL connection and a SQLITE one! + JDBCConnection[] connections = new JDBCConnection[]{pgJDBCConnection,sensPgJDBCConnection,sqliteJDBCConnection,sensSqliteJDBCConnection}; + for(JDBCConnection conn : connections){ + // NULL tests: + assertFalse(conn.equals("tap_schema", null, false)); + assertFalse(conn.equals("tap_schema", null, true)); + assertFalse(conn.equals(null, "tap_schema", false)); + assertFalse(conn.equals(null, "tap_schema", true)); + assertFalse(conn.equals(null, null, false)); + assertFalse(conn.equals(null, null, true)); + + // CASE SENSITIVE tests: + if (conn.supportsMixedCaseQuotedIdentifier || conn.mixedCaseQuoted){ + assertFalse(conn.equals("tap_schema", "TAP_SCHEMA", true)); + assertTrue(conn.equals("TAP_SCHEMA", "TAP_SCHEMA", true)); + assertFalse(conn.equals("TAP_SCHEMA", "tap_schema", true)); + assertFalse(conn.equals("Columns", "columns", true)); + assertFalse(conn.equals("columns", "Columns", true)); + }else if (conn.lowerCaseQuoted){ + assertTrue(conn.equals("tap_schema", "TAP_SCHEMA", true)); + assertFalse(conn.equals("TAP_SCHEMA", "TAP_SCHEMA", true)); + assertFalse(conn.equals("TAP_SCHEMA", "tap_schema", true)); + assertFalse(conn.equals("Columns", "columns", true)); + assertTrue(conn.equals("columns", "Columns", true)); + }else if (conn.upperCaseQuoted){ + assertFalse(conn.equals("tap_schema", "TAP_SCHEMA", true)); + assertTrue(conn.equals("TAP_SCHEMA", "TAP_SCHEMA", true)); + assertTrue(conn.equals("TAP_SCHEMA", "tap_schema", true)); + assertFalse(conn.equals("Columns", "columns", true)); + assertFalse(conn.equals("columns", "Columns", true)); + }else{ + assertTrue(conn.equals("tap_schema", "TAP_SCHEMA", true)); + assertTrue(conn.equals("TAP_SCHEMA", "TAP_SCHEMA", true)); + assertTrue(conn.equals("TAP_SCHEMA", "tap_schema", true)); + assertTrue(conn.equals("Columns", "columns", true)); + assertTrue(conn.equals("columns", "Columns", true)); + } + + // CASE INSENSITIVE tests: + if (conn.supportsMixedCaseUnquotedIdentifier){ + assertTrue(conn.equals("tap_schema", "TAP_SCHEMA", false)); + assertTrue(conn.equals("TAP_SCHEMA", "TAP_SCHEMA", false)); + assertTrue(conn.equals("TAP_SCHEMA", "tap_schema", false)); + assertTrue(conn.equals("Columns", "columns", false)); + assertTrue(conn.equals("columns", "Columns", false)); + }else if (conn.lowerCaseUnquoted){ + assertTrue(conn.equals("tap_schema", "TAP_SCHEMA", false)); + assertFalse(conn.equals("TAP_SCHEMA", "TAP_SCHEMA", false)); + assertFalse(conn.equals("TAP_SCHEMA", "tap_schema", false)); + assertFalse(conn.equals("Columns", "columns", false)); + assertTrue(conn.equals("columns", "Columns", false)); + }else if (conn.upperCaseUnquoted){ + assertFalse(conn.equals("tap_schema", "TAP_SCHEMA", false)); + assertTrue(conn.equals("TAP_SCHEMA", "TAP_SCHEMA", false)); + assertTrue(conn.equals("TAP_SCHEMA", "tap_schema", false)); + assertFalse(conn.equals("Columns", "columns", false)); + assertFalse(conn.equals("columns", "Columns", false)); + }else{ + assertTrue(conn.equals("tap_schema", "TAP_SCHEMA", false)); + assertTrue(conn.equals("TAP_SCHEMA", "TAP_SCHEMA", false)); + assertTrue(conn.equals("TAP_SCHEMA", "tap_schema", false)); + assertTrue(conn.equals("Columns", "columns", false)); + assertTrue(conn.equals("columns", "Columns", false)); + } + } + } + + @Test + public void testGetTAPSchema(){ + // There should be no difference between a POSTGRESQL connection and a SQLITE one! + JDBCConnection[] connections = new JDBCConnection[]{pgJDBCConnection,sensPgJDBCConnection,sqliteJDBCConnection,sensSqliteJDBCConnection}; + for(JDBCConnection conn : connections){ + try{ + // Prepare the test: + createTAPSchema(conn); + // Try to get it (which should work without any problem here): + conn.getTAPSchema(); + }catch(DBException de){ + de.printStackTrace(System.err); + fail("No pbm should happen here (either for the creation of a std TAP_SCHEMA or for its reading)! CAUSE: " + de.getMessage()); + } + + try{ + // Prepare the test: + dropSchema(STDSchema.TAPSCHEMA.label, conn); + // Try to get it (which should work without any problem here): + conn.getTAPSchema(); + fail("DBException expected, because none of the TAP_SCHEMA tables exist."); + }catch(DBException de){ + assertTrue(de.getMessage().equals("Impossible to load schemas from TAP_SCHEMA.schemas!")); + } + } + } + + @Test + public void testIsTableExisting(){ + // There should be no difference between a POSTGRESQL connection and a SQLITE one! + JDBCConnection[] connections = new JDBCConnection[]{pgJDBCConnection,sensPgJDBCConnection,sqliteJDBCConnection,sensSqliteJDBCConnection}; + for(JDBCConnection conn : connections){ + try{ + // Get the database metadata: + DatabaseMetaData dbMeta = conn.connection.getMetaData(); + + // Prepare the test: + createTAPSchema(conn); + // Test the existence of all TAP_SCHEMA tables: + assertTrue(conn.isTableExisting(STDSchema.TAPSCHEMA.label, (conn.supportsSchema ? STDTable.SCHEMAS.label : STDSchema.TAPSCHEMA.label + "_" + STDTable.SCHEMAS.label), dbMeta)); + assertTrue(conn.isTableExisting(STDSchema.TAPSCHEMA.label, (conn.supportsSchema ? STDTable.TABLES.label : STDSchema.TAPSCHEMA.label + "_" + STDTable.TABLES.label), dbMeta)); + assertTrue(conn.isTableExisting(STDSchema.TAPSCHEMA.label, (conn.supportsSchema ? STDTable.COLUMNS.label : STDSchema.TAPSCHEMA.label + "_" + STDTable.COLUMNS.label), dbMeta)); + assertTrue(conn.isTableExisting(STDSchema.TAPSCHEMA.label, (conn.supportsSchema ? STDTable.KEYS.label : STDSchema.TAPSCHEMA.label + "_" + STDTable.KEYS.label), dbMeta)); + assertTrue(conn.isTableExisting(STDSchema.TAPSCHEMA.label, (conn.supportsSchema ? STDTable.KEY_COLUMNS.label : STDSchema.TAPSCHEMA.label + "_" + STDTable.KEY_COLUMNS.label), dbMeta)); + // Test the non-existence of any other table: + assertFalse(conn.isTableExisting(null, "foo", dbMeta)); + + // Prepare the test: + dropSchema(STDSchema.TAPSCHEMA.label, conn); + // Test the non-existence of all TAP_SCHEMA tables: + assertFalse(conn.isTableExisting(STDSchema.TAPSCHEMA.label, (conn.supportsSchema ? STDTable.SCHEMAS.label : STDSchema.TAPSCHEMA.label + "_" + STDTable.SCHEMAS.label), dbMeta)); + assertFalse(conn.isTableExisting(STDSchema.TAPSCHEMA.label, (conn.supportsSchema ? STDTable.TABLES.label : STDSchema.TAPSCHEMA.label + "_" + STDTable.TABLES.label), dbMeta)); + assertFalse(conn.isTableExisting(STDSchema.TAPSCHEMA.label, (conn.supportsSchema ? STDTable.COLUMNS.label : STDSchema.TAPSCHEMA.label + "_" + STDTable.COLUMNS.label), dbMeta)); + assertFalse(conn.isTableExisting(STDSchema.TAPSCHEMA.label, (conn.supportsSchema ? STDTable.KEYS.label : STDSchema.TAPSCHEMA.label + "_" + STDTable.KEYS.label), dbMeta)); + assertFalse(conn.isTableExisting(STDSchema.TAPSCHEMA.label, (conn.supportsSchema ? STDTable.KEY_COLUMNS.label : STDSchema.TAPSCHEMA.label + "_" + STDTable.KEY_COLUMNS.label), dbMeta)); + }catch(Exception ex){ + ex.printStackTrace(System.err); + fail("{" + conn.getID() + "} Testing the existence of a table should not throw an error!"); + } + } + } + + @Test + public void testIsColumnExisting(){ + // There should be no difference between a POSTGRESQL connection and a SQLITE one! + JDBCConnection[] connections = new JDBCConnection[]{pgJDBCConnection,sensPgJDBCConnection,sqliteJDBCConnection,sensSqliteJDBCConnection}; + int i = -1; + for(JDBCConnection conn : connections){ + i++; + try{ + // Get the database metadata: + DatabaseMetaData dbMeta = conn.connection.getMetaData(); + + // Prepare the test: + createTAPSchema(conn); + // Test the existence of one column for all TAP_SCHEMA tables: + assertTrue(conn.isColumnExisting(STDSchema.TAPSCHEMA.label, (conn.supportsSchema ? STDTable.SCHEMAS.label : STDSchema.TAPSCHEMA.label + "_" + STDTable.SCHEMAS.label), "schema_name", dbMeta)); + assertTrue(conn.isColumnExisting(STDSchema.TAPSCHEMA.label, (conn.supportsSchema ? STDTable.TABLES.label : STDSchema.TAPSCHEMA.label + "_" + STDTable.TABLES.label), "table_name", dbMeta)); + assertTrue(conn.isColumnExisting(STDSchema.TAPSCHEMA.label, (conn.supportsSchema ? STDTable.COLUMNS.label : STDSchema.TAPSCHEMA.label + "_" + STDTable.COLUMNS.label), "column_name", dbMeta)); + assertTrue(conn.isColumnExisting(STDSchema.TAPSCHEMA.label, (conn.supportsSchema ? STDTable.KEYS.label : STDSchema.TAPSCHEMA.label + "_" + STDTable.KEYS.label), "key_id", dbMeta)); + assertTrue(conn.isColumnExisting(STDSchema.TAPSCHEMA.label, (conn.supportsSchema ? STDTable.KEY_COLUMNS.label : STDSchema.TAPSCHEMA.label + "_" + STDTable.KEY_COLUMNS.label), "key_id", dbMeta)); + // Test the non-existence of any column: + assertFalse(conn.isColumnExisting(null, null, "foo", dbMeta)); + + // Prepare the test: + dropSchema(STDSchema.TAPSCHEMA.label, conn); + // Test the non-existence of the same column for all TAP_SCHEMA tables: + assertFalse(conn.isColumnExisting(STDSchema.TAPSCHEMA.label, (conn.supportsSchema ? STDTable.SCHEMAS.label : STDSchema.TAPSCHEMA.label + "_" + STDTable.SCHEMAS.label), "schema_name", dbMeta)); + assertFalse(conn.isColumnExisting(STDSchema.TAPSCHEMA.label, (conn.supportsSchema ? STDTable.TABLES.label : STDSchema.TAPSCHEMA.label + "_" + STDTable.TABLES.label), "table_name", dbMeta)); + assertFalse(conn.isColumnExisting(STDSchema.TAPSCHEMA.label, (conn.supportsSchema ? STDTable.COLUMNS.label : STDSchema.TAPSCHEMA.label + "_" + STDTable.COLUMNS.label), "column_name", dbMeta)); + assertFalse(conn.isColumnExisting(STDSchema.TAPSCHEMA.label, (conn.supportsSchema ? STDTable.KEYS.label : STDSchema.TAPSCHEMA.label + "_" + STDTable.KEYS.label), "key_id", dbMeta)); + assertFalse(conn.isColumnExisting(STDSchema.TAPSCHEMA.label, (conn.supportsSchema ? STDTable.KEY_COLUMNS.label : STDSchema.TAPSCHEMA.label + "_" + STDTable.KEY_COLUMNS.label), "key_id", dbMeta)); + }catch(Exception ex){ + ex.printStackTrace(System.err); + fail("{" + conn.getID() + "} Testing the existence of a column should not throw an error!"); + } + } + } + + @Test + public void testAddUploadedTable(){ + // There should be no difference between a POSTGRESQL connection and a SQLITE one! + JDBCConnection[] connections = new JDBCConnection[]{pgJDBCConnection,sensPgJDBCConnection,sqliteJDBCConnection,sensSqliteJDBCConnection}; + TAPTable tableDef = null; + for(JDBCConnection conn : connections){ + InputStream io = null; + try{ + io = new FileInputStream(uploadExamplePath); + TableIterator it = new VOTableIterator(io); + + TAPColumn[] cols = it.getMetadata(); + tableDef = new TAPTable("UploadExample"); + for(TAPColumn c : cols) + tableDef.addColumn(c); + + // Test with no schema set: + try{ + conn.addUploadedTable(tableDef, it); + fail("The table is not inside a TAPSchema, so this test should have failed!"); + }catch(Exception ex){ + assertTrue(ex instanceof DBException); + assertEquals("Missing upload schema! An uploaded table must be inside a schema whose the ADQL name is strictly equals to \"" + STDSchema.UPLOADSCHEMA.label + "\" (but the DB name may be different).", ex.getMessage()); + } + + // Specify the UPLOAD schema for the table to upload: + TAPSchema schema = new TAPSchema(STDSchema.UPLOADSCHEMA.label); + schema.addTable(tableDef); + + // Prepare the test: no TAP_UPLOAD schema and no table TAP_UPLOAD.UploadExample: + dropSchema(STDSchema.UPLOADSCHEMA.label, conn); + // Test: + try{ + assertTrue(conn.addUploadedTable(tableDef, it)); + }catch(Exception ex){ + ex.printStackTrace(System.err); + fail("{" + conn.ID + "} This error should not happen: no TAP_UPLOAD schema."); + } + + close(io); + io = new FileInputStream(uploadExamplePath); + it = new VOTableIterator(io); + + // Prepare the test: the TAP_UPLOAD schema exist but not the table TAP_UPLOAD.UploadExample: + dropTable(tableDef.getDBSchemaName(), tableDef.getDBName(), conn); + // Test: + try{ + assertTrue(conn.addUploadedTable(tableDef, it)); + }catch(Exception ex){ + ex.printStackTrace(System.err); + fail("{" + conn.ID + "} This error should not happen: no TAP_UPLOAD schema."); + } + + close(io); + io = new FileInputStream(uploadExamplePath); + it = new VOTableIterator(io); + + // Prepare the test: the TAP_UPLOAD schema and the table TAP_UPLOAD.UploadExample BOTH exist: + ; + // Test: + try{ + assertFalse(conn.addUploadedTable(tableDef, it)); + }catch(Exception ex){ + if (ex instanceof DBException) + assertEquals("Impossible to create the user uploaded table in the database: " + conn.translator.getTableName(tableDef, conn.supportsSchema) + "! This table already exists.", ex.getMessage()); + else{ + ex.printStackTrace(System.err); + fail("{" + conn.ID + "} DBException was the expected exception!"); + } + } + + }catch(Exception ex){ + ex.printStackTrace(System.err); + fail("{" + conn.ID + "} This error should never happen except there is a problem with the file (" + uploadExamplePath + ")."); + }finally{ + close(io); + } + } + } + + @Test + public void testDropUploadedTable(){ + TAPTable tableDef = new TAPTable("TableToDrop"); + TAPSchema uploadSchema = new TAPSchema(STDSchema.UPLOADSCHEMA.label); + uploadSchema.addTable(tableDef); + + // There should be no difference between a POSTGRESQL connection and a SQLITE one! + JDBCConnection[] connections = new JDBCConnection[]{pgJDBCConnection,sensPgJDBCConnection,sqliteJDBCConnection,sensSqliteJDBCConnection}; + for(JDBCConnection conn : connections){ + try{ + // 1st TEST CASE: the schema TAP_UPLOAD does not exist -> no error should be raised! + // drop the TAP_UPLOAD schema: + dropSchema(uploadSchema.getDBName(), conn); + // try to drop the table: + assertTrue(conn.dropUploadedTable(tableDef)); + + // 2nd TEST CASE: the table does not exists -> no error should be raised! + // create the TAP_UPLOAD schema, but not the table: + createSchema(uploadSchema.getDBName(), conn); + // try to drop the table: + assertTrue(conn.dropUploadedTable(tableDef)); + + // 3rd TEST CASE: the table and the schema exist -> the table should be created without any error! + // create the fake uploaded table: + createFooTable(tableDef.getDBSchemaName(), tableDef.getDBName(), conn); + // try to drop the table: + assertTrue(conn.dropUploadedTable(tableDef)); + + }catch(Exception ex){ + ex.printStackTrace(System.err); + fail("{" + conn.ID + "} This error should not happen. The table should be dropped and even if it does not exist, no error should be thrown."); + } + } + } + + @Test + public void testExecuteQuery(){ + // There should be no difference between a POSTGRESQL connection and a SQLITE one! + JDBCConnection[] connections = new JDBCConnection[]{pgJDBCConnection,sensPgJDBCConnection,sqliteJDBCConnection,sensSqliteJDBCConnection}; + for(JDBCConnection conn : connections){ + + TAPSchema schema = TAPMetadata.getStdSchema(conn.supportsSchema); + ArrayList tables = new ArrayList(schema.getNbTables()); + for(TAPTable t : schema) + tables.add(t); + + ADQLParser parser = new ADQLParser(new DBChecker(tables)); + parser.setDebug(false); + + /*if (conn.ID.equalsIgnoreCase("SQLITE")){ + for(DBTable t : tables){ + TAPTable tapT = (TAPTable)t; + tapT.getSchema().setDBName(null); + tapT.setDBName(tapT.getSchema().getADQLName() + "_" + tapT.getDBName()); + } + }*/ + + TableIterator result = null; + try{ + // Prepare the test: create the TAP_SCHEMA: + dropSchema(STDSchema.TAPSCHEMA.label, conn); + // Build the ADQLQuery object: + ADQLQuery query = parser.parseQuery("SELECT table_name FROM TAP_SCHEMA.tables;"); + // Execute the query: + result = conn.executeQuery(query); + fail("{" + conn.ID + "} This test should have failed because TAP_SCHEMA was supposed to not exist!"); + }catch(DBException de){ + assertTrue(de.getMessage().startsWith("Unexpected error while executing a SQL query: ")); + assertTrue(de.getMessage().indexOf("tap_schema") > 0 || de.getMessage().indexOf("TAP_SCHEMA") > 0); + }catch(ParseException pe){ + pe.printStackTrace(System.err); + fail("There should be no pbm to parse the ADQL expression!"); + }finally{ + if (result != null){ + try{ + result.close(); + }catch(DataReadException de){} + result = null; + } + } + + try{ + // Prepare the test: create the TAP_SCHEMA: + createTAPSchema(conn); + // Build the ADQLQuery object: + ADQLQuery query = parser.parseQuery("SELECT table_name FROM TAP_SCHEMA.tables;"); + // Execute the query: + result = conn.executeQuery(query); + assertEquals(1, result.getMetadata().length); + int cntRow = 0; + while(result.nextRow()){ + cntRow++; + assertTrue(result.hasNextCol()); + assertNotNull(TAPMetadata.resolveStdTable((String)result.nextCol())); + assertFalse(result.hasNextCol()); + } + assertEquals(5, cntRow); + }catch(DBException de){ + de.printStackTrace(System.err); + fail("No ADQL/SQL query error was expected here!"); + }catch(ParseException pe){ + fail("There should be no pbm to parse the ADQL expression!"); + }catch(DataReadException e){ + e.printStackTrace(System.err); + fail("There should be no pbm when accessing rows and the first (and only) columns of the result!"); + }catch(Exception ex){ + ex.printStackTrace(System.err); + fail("There should be no pbm when reading the query result!"); + }finally{ + if (result != null){ + try{ + result.close(); + }catch(DataReadException de){} + result = null; + } + } + } + } + + /* ************** */ + /* TOOL FUNCTIONS */ + /* ************** */ + + public final static void main(final String[] args) throws Throwable{ + JDBCConnection conn = new JDBCConnection(DBTools.createConnection("postgresql", "127.0.0.1", null, "gmantele", "gmantele", "pwd"), new PostgreSQLTranslator(), "TEST_POSTGRES", null); + JDBCConnectionTest.createTAPSchema(conn); + JDBCConnectionTest.dropSchema(STDSchema.TAPSCHEMA.label, conn); + } + + /** + *

    Build a table prefix with the given schema name.

    + * + *

    By default, this function returns: schemaName + "_".

    + * + *

    CAUTION: + * This function is used only when schemas are not supported by the DBMS connection. + * It aims to propose an alternative of the schema notion by prefixing the table name by the schema name. + *

    + * + *

    Note: + * If the given schema is NULL or is an empty string, an empty string will be returned. + * Thus, no prefix will be set....which is very useful when the table name has already been prefixed + * (in such case, the DB name of its schema has theoretically set to NULL). + *

    + * + * @param schemaName (DB) Schema name. + * + * @return The corresponding table prefix, or "" if the given schema name is an empty string or NULL. + */ + protected static String getTablePrefix(final String schemaName){ + if (schemaName != null && schemaName.trim().length() > 0) + return schemaName + "_"; + else + return ""; + } + + private static void dropSchema(final String schemaName, final JDBCConnection conn){ + Statement stmt = null; + ResultSet rs = null; + try{ + stmt = conn.connection.createStatement(); + + final boolean caseSensitive = conn.translator.isCaseSensitive(IdentifierField.SCHEMA); + if (conn.supportsSchema) + stmt.executeUpdate("DROP SCHEMA IF EXISTS " + formatIdentifier(schemaName, caseSensitive) + " CASCADE;"); + else{ + startTransaction(conn); + final String tablePrefix = getTablePrefix(schemaName); + final int prefixLen = tablePrefix.length(); + if (prefixLen <= 0) + return; + rs = conn.connection.getMetaData().getTables(null, null, null, null); + ArrayList tablesToDrop = new ArrayList(); + while(rs.next()){ + String table = rs.getString(3); + if (table.length() > prefixLen){ + if (equals(schemaName, table.substring(0, prefixLen - 1), caseSensitive)) + tablesToDrop.add(table); + } + } + close(rs); + rs = null; + for(String t : tablesToDrop) + stmt.executeUpdate("DROP TABLE IF EXISTS \"" + t + "\";"); + commit(conn); + } + }catch(Exception ex){ + rollback(conn); + ex.printStackTrace(System.err); + fail("{" + conn.ID + "} Impossible to prepare a test by: dropping the schema " + schemaName + "!"); + }finally{ + close(rs); + close(stmt); + } + } + + private static void dropTable(final String schemaName, final String tableName, final JDBCConnection conn){ + Statement stmt = null; + ResultSet rs = null; + try{ + final boolean sCaseSensitive = conn.translator.isCaseSensitive(IdentifierField.SCHEMA); + final boolean tCaseSensitive = conn.translator.isCaseSensitive(IdentifierField.TABLE); + stmt = conn.connection.createStatement(); + if (conn.supportsSchema) + stmt.executeUpdate("DROP TABLE IF EXISTS " + formatIdentifier(schemaName, sCaseSensitive) + "." + formatIdentifier(tableName, tCaseSensitive) + ";"); + else{ + rs = conn.connection.getMetaData().getTables(null, null, null, null); + String tableToDrop = null; + while(rs.next()){ + String table = rs.getString(3); + if (equals(tableName, table, tCaseSensitive)){ + tableToDrop = table; + break; + } + } + close(rs); + if (tableToDrop != null) + stmt.executeUpdate("DROP TABLE IF EXISTS \"" + tableToDrop + "\";"); + } + }catch(Exception ex){ + ex.printStackTrace(System.err); + fail("{" + conn.ID + "} Impossible to prepare a test by: dropping the table " + schemaName + "." + tableName + "!"); + }finally{ + close(rs); + close(stmt); + } + } + + private static void createSchema(final String schemaName, final JDBCConnection conn){ + if (!conn.supportsSchema) + return; + + dropSchema(schemaName, conn); + + Statement stmt = null; + ResultSet rs = null; + try{ + final boolean sCaseSensitive = conn.translator.isCaseSensitive(IdentifierField.SCHEMA); + stmt = conn.connection.createStatement(); + stmt.executeUpdate("CREATE SCHEMA " + formatIdentifier(schemaName, sCaseSensitive) + ";"); + }catch(Exception ex){ + ex.printStackTrace(System.err); + fail("{" + conn.ID + "} Impossible to prepare a test by: creating the schema " + schemaName + "!"); + }finally{ + close(rs); + close(stmt); + } + } + + private static void createFooTable(final String schemaName, final String tableName, final JDBCConnection conn){ + dropTable(schemaName, tableName, conn); + + Statement stmt = null; + ResultSet rs = null; + try{ + final boolean sCaseSensitive = conn.translator.isCaseSensitive(IdentifierField.SCHEMA); + final boolean tCaseSensitive = conn.translator.isCaseSensitive(IdentifierField.TABLE); + String tablePrefix = formatIdentifier(schemaName, sCaseSensitive); + if (!conn.supportsSchema || tablePrefix == null) + tablePrefix = ""; + else + tablePrefix += "."; + stmt = conn.connection.createStatement(); + stmt.executeUpdate("CREATE TABLE " + tablePrefix + formatIdentifier(tableName, tCaseSensitive) + " (ID integer);"); + }catch(Exception ex){ + ex.printStackTrace(System.err); + fail("{" + conn.ID + "} Impossible to prepare a test by: creating the table " + schemaName + "." + tableName + "!"); + }finally{ + close(rs); + close(stmt); + } + } + + private static TAPMetadata createTAPSchema(final JDBCConnection conn){ + dropSchema(STDSchema.TAPSCHEMA.label, conn); + + TAPMetadata metadata = new TAPMetadata(); + Statement stmt = null; + try{ + final boolean sCaseSensitive = conn.translator.isCaseSensitive(IdentifierField.SCHEMA); + final boolean tCaseSensitive = conn.translator.isCaseSensitive(IdentifierField.TABLE); + String[] tableNames = new String[]{STDTable.SCHEMAS.label,STDTable.TABLES.label,STDTable.COLUMNS.label,STDTable.KEYS.label,STDTable.KEY_COLUMNS.label}; + if (conn.supportsSchema){ + for(int i = 0; i < tableNames.length; i++) + tableNames[i] = formatIdentifier(STDSchema.TAPSCHEMA.label, sCaseSensitive) + "." + formatIdentifier(tableNames[i], tCaseSensitive); + }else{ + for(int i = 0; i < tableNames.length; i++) + tableNames[i] = formatIdentifier(getTablePrefix(STDSchema.TAPSCHEMA.label) + tableNames[i], tCaseSensitive); + } + + startTransaction(conn); + + stmt = conn.connection.createStatement(); + + if (conn.supportsSchema) + stmt.executeUpdate("CREATE SCHEMA " + formatIdentifier(STDSchema.TAPSCHEMA.label, sCaseSensitive) + ";"); + + stmt.executeUpdate("CREATE TABLE " + tableNames[0] + "(\"schema_name\" VARCHAR,\"description\" VARCHAR,\"utype\" VARCHAR,\"dbname\" VARCHAR, PRIMARY KEY(\"schema_name\"));"); + stmt.executeUpdate("DELETE FROM " + tableNames[0] + ";"); + + stmt.executeUpdate("CREATE TABLE " + tableNames[1] + "(\"schema_name\" VARCHAR,\"table_name\" VARCHAR,\"table_type\" VARCHAR,\"description\" VARCHAR,\"utype\" VARCHAR,\"dbname\" VARCHAR, PRIMARY KEY(\"schema_name\", \"table_name\"));"); + stmt.executeUpdate("DELETE FROM " + tableNames[1] + ";"); + + stmt.executeUpdate("CREATE TABLE " + tableNames[2] + "(\"table_name\" VARCHAR,\"column_name\" VARCHAR,\"description\" VARCHAR,\"unit\" VARCHAR,\"ucd\" VARCHAR,\"utype\" VARCHAR,\"datatype\" VARCHAR,\"size\" INTEGER,\"principal\" INTEGER,\"indexed\" INTEGER,\"std\" INTEGER,\"dbname\" VARCHAR, PRIMARY KEY(\"table_name\", \"column_name\"));"); + stmt.executeUpdate("DELETE FROM " + tableNames[2] + ";"); + + stmt.executeUpdate("CREATE TABLE " + tableNames[3] + "(\"key_id\" VARCHAR,\"from_table\" VARCHAR,\"target_table\" VARCHAR,\"description\" VARCHAR,\"utype\" VARCHAR, PRIMARY KEY(\"key_id\"));"); + stmt.executeUpdate("DELETE FROM " + tableNames[3] + ";"); + + stmt.executeUpdate("CREATE TABLE " + tableNames[4] + "(\"key_id\" VARCHAR,\"from_column\" VARCHAR,\"target_column\" VARCHAR, PRIMARY KEY(\"key_id\"));"); + stmt.executeUpdate("DELETE FROM " + tableNames[4] + ";"); + + /*if (!conn.supportsSchema){ + TAPSchema stdSchema = TAPMetadata.getStdSchema(); + for(TAPTable t : stdSchema) + t.setDBName(getTablePrefix(STDSchema.TAPSCHEMA.label) + t.getADQLName()); + metadata.addSchema(stdSchema); + }else*/ + metadata.addSchema(TAPMetadata.getStdSchema(conn.supportsSchema)); + + ArrayList lstTables = new ArrayList(); + for(TAPSchema schema : metadata){ + stmt.executeUpdate("INSERT INTO " + tableNames[0] + " VALUES('" + schema.getADQLName() + "','" + schema.getDescription() + "','" + schema.getUtype() + "','" + schema.getDBName() + "')"); + for(TAPTable t : schema) + lstTables.add(t); + } + + ArrayList lstCols = new ArrayList(); + for(TAPTable table : lstTables){ + stmt.executeUpdate("INSERT INTO " + tableNames[1] + " VALUES('" + table.getADQLSchemaName() + "','" + table.getADQLName() + "','" + table.getType() + "','" + table.getDescription() + "','" + table.getUtype() + "','" + table.getDBName() + "')"); + for(DBColumn c : table) + lstCols.add(c); + + } + lstTables = null; + + for(DBColumn c : lstCols){ + TAPColumn col = (TAPColumn)c; + stmt.executeUpdate("INSERT INTO " + tableNames[2] + " VALUES('" + col.getTable().getADQLName() + "','" + col.getADQLName() + "','" + col.getDescription() + "','" + col.getUnit() + "','" + col.getUcd() + "','" + col.getUtype() + "','" + col.getDatatype().type + "'," + col.getDatatype().length + "," + (col.isPrincipal() ? 1 : 0) + "," + (col.isIndexed() ? 1 : 0) + "," + (col.isStd() ? 1 : 0) + ",'" + col.getDBName() + "')"); + } + + commit(conn); + + }catch(Exception ex){ + rollback(conn); + ex.printStackTrace(System.err); + fail("{" + conn.ID + "} Impossible to prepare a test by: creating TAP_SCHEMA!"); + }finally{ + close(stmt); + } + + return metadata; + } + + private static void startTransaction(final JDBCConnection conn){ + try{ + conn.connection.setAutoCommit(false); + }catch(SQLException se){} + } + + private static void commit(final JDBCConnection conn){ + try{ + conn.connection.commit(); + conn.connection.setAutoCommit(true); + }catch(SQLException se){} + + } + + private static void rollback(final JDBCConnection conn){ + try{ + conn.connection.rollback(); + conn.connection.setAutoCommit(true); + }catch(SQLException se){} + + } + + private static String formatIdentifier(final String identifier, final boolean caseSensitive){ + if (identifier == null) + return null; + else if (identifier.charAt(0) == '"') + return identifier; + else if (caseSensitive) + return "\"" + identifier + "\""; + else + return identifier; + } + + private static boolean equals(final String name1, final String name2, final boolean caseSensitive){ + return (name1 != null && name2 != null && (caseSensitive ? name1.equals(name2) : name1.equalsIgnoreCase(name2))); + } + + private static boolean equals(final TAPTable table1, final TAPTable table2){ + if (table1 == null || table2 == null){ + //System.out.println("[EQUALS] tables null!"); + return false; + } + + if (!table1.getFullName().equals(table2.getFullName())){ + //System.out.println("[EQUALS] tables name different: " + table1.getFullName() + " != " + table2.getFullName() + "!"); + return false; + } + + if (table1.getType() != table2.getType()){ + //System.out.println("[EQUALS] tables type different: " + table1.getType() + " != " + table2.getType() + "!"); + return false; + } + + if (table1.getNbColumns() != table2.getNbColumns()){ + //System.out.println("[EQUALS] tables length different: " + table1.getNbColumns() + " columns != " + table2.getNbColumns() + " columns!"); + return false; + } + + Iterator it = table1.getColumns(); + while(it.hasNext()){ + TAPColumn col1 = it.next(); + if (!equals(col1, table2.getColumn(col1.getADQLName()))){ + //System.out.println("[EQUALS] tables columns different!"); + return false; + } + } + + return true; + } + + private static boolean equals(final TAPColumn col1, final TAPColumn col2){ + if (col1 == null || col2 == null){ + //System.out.println("[EQUALS] columns null!"); + return false; + } + + if (!col1.getADQLName().equals(col2.getADQLName())){ + //System.out.println("[EQUALS] columns name different: " + col1.getADQLName() + " != " + col2.getADQLName() + "!"); + return false; + } + + if (!equals(col1.getDatatype(), col2.getDatatype())){ + //System.out.println("[EQUALS] columns type different: " + col1.getDatatype() + " != " + col2.getDatatype() + "!"); + return false; + } + + if (col1.getUnit() != col2.getUnit()){ + //System.out.println("[EQUALS] columns unit different: " + col1.getUnit() + " != " + col2.getUnit() + "!"); + return false; + } + + if (col1.getUcd() != col2.getUcd()){ + //System.out.println("[EQUALS] columns ucd different: " + col1.getUcd() + " != " + col2.getUcd() + "!"); + return false; + } + + return true; + } + + private static boolean equals(final DBType type1, final DBType type2){ + return type1 != null && type2 != null && type1.type == type2.type && type1.length == type2.length; + } + + private static TAPMetadata createCustomSchema(){ + TAPMetadata tapMeta = new TAPMetadata(); + TAPSchema tapSchema = new TAPSchema(STDSchema.TAPSCHEMA.toString()); + TAPTable customColumns = (TAPTable)TAPMetadata.getStdTable(STDTable.COLUMNS).copy("Columns", STDTable.COLUMNS.label); + customColumns.addColumn("TestNewColumn", new DBType(DBDatatype.VARCHAR), "This is a fake column, just for test purpose.", null, null, null); + tapSchema.addTable(customColumns); + TAPTable addTable = new TAPTable("AdditionalTable"); + addTable.addColumn("Blabla"); + addTable.addColumn("Foo"); + tapSchema.addTable(addTable); + tapMeta.addSchema(tapSchema); + return tapMeta; + } + + /** + *

    Get the expected counts after a call of {@link JDBCConnection#setTAPSchema(TAPMetadata)}.

    + * + *

    Counts are computed from the given metadata ; the same metadata that will be given to {@link JDBCConnection#setTAPSchema(TAPMetadata)}.

    + * + * @param meta + * + * @return An integer array with the following values: [0]=nbSchemas, [1]=nbTables, [2]=nbColumns, [3]=nbKeys and [4]=nbKeyColumns. + */ + private static int[] getStats(final TAPMetadata meta){ + int[] counts = new int[]{1,5,0,0,0}; + + int[] stdColCounts = new int[]{3,5,11,5,3}; + for(int c = 0; c < stdColCounts.length; c++) + counts[2] += stdColCounts[c]; + + Iterator itSchemas = meta.iterator(); + while(itSchemas.hasNext()){ + TAPSchema schema = itSchemas.next(); + + boolean isTapSchema = (schema.getADQLName().equalsIgnoreCase(STDSchema.TAPSCHEMA.toString())); + if (!isTapSchema) + counts[0]++; + + Iterator itTables = schema.iterator(); + while(itTables.hasNext()){ + TAPTable table = itTables.next(); + if (isTapSchema && TAPMetadata.resolveStdTable(table.getADQLName()) != null){ + int ind = pgJDBCConnection.getCreationOrder(TAPMetadata.resolveStdTable(table.getADQLName())); + counts[2] -= stdColCounts[ind]; + }else + counts[1]++; + + Iterator itColumns = table.iterator(); + while(itColumns.hasNext()){ + itColumns.next(); + counts[2]++; + } + + Iterator itKeys = table.getForeignKeys(); + while(itKeys.hasNext()){ + TAPForeignKey fk = itKeys.next(); + counts[3]++; + counts[4] += fk.getNbRelations(); + } + } + } + + return counts; + } + + /** + *

    Get the effective counts after a call of {@link JDBCConnection#setTAPSchema(TAPMetadata)}.

    + * + *

    Counts are computed directly from the DB using the given connection; the same connection used to set the TAP schema in {@link JDBCConnection#setTAPSchema(TAPMetadata)}.

    + * + * @param conn + * @param meta Metadata, in order to get the standard TAP tables' name. + * + * @return An integer array with the following values: [0]=nbSchemas, [1]=nbTables, [2]=nbColumns, [3]=nbKeys and [4]=nbKeyColumns. + */ + private static int[] getStats(final JDBCConnection conn, final TAPMetadata meta){ + int[] counts = new int[5]; + + Statement stmt = null; + try{ + stmt = conn.connection.createStatement(); + + TAPSchema tapSchema = meta.getSchema(STDSchema.TAPSCHEMA.toString()); + + String schemaPrefix = formatIdentifier(tapSchema.getDBName(), conn.translator.isCaseSensitive(IdentifierField.SCHEMA)); + if (!conn.supportsSchema || schemaPrefix == null) + schemaPrefix = ""; + else + schemaPrefix += "."; + + boolean tCaseSensitive = conn.translator.isCaseSensitive(IdentifierField.TABLE); + TAPTable tapTable = tapSchema.getTable(STDTable.SCHEMAS.toString()); + counts[0] = count(stmt, schemaPrefix + formatIdentifier(tapTable.getDBName(), tCaseSensitive), tapSchema.getADQLName() + "." + tapTable.getADQLName()); + + tapTable = tapSchema.getTable(STDTable.TABLES.toString()); + counts[1] = count(stmt, schemaPrefix + formatIdentifier(tapTable.getDBName(), tCaseSensitive), tapSchema.getADQLName() + "." + tapTable.getADQLName()); + + tapTable = tapSchema.getTable(STDTable.COLUMNS.toString()); + counts[2] = count(stmt, schemaPrefix + formatIdentifier(tapTable.getDBName(), tCaseSensitive), tapSchema.getADQLName() + "." + tapTable.getADQLName()); + + tapTable = tapSchema.getTable(STDTable.KEYS.toString()); + counts[3] = count(stmt, schemaPrefix + formatIdentifier(tapTable.getDBName(), tCaseSensitive), tapSchema.getADQLName() + "." + tapTable.getADQLName()); + + tapTable = tapSchema.getTable(STDTable.KEY_COLUMNS.toString()); + counts[4] = count(stmt, schemaPrefix + formatIdentifier(tapTable.getDBName(), tCaseSensitive), tapSchema.getADQLName() + "." + tapTable.getADQLName()); + + }catch(SQLException se){ + fail("Can not create a statement!"); + }finally{ + try{ + if (stmt != null) + stmt.close(); + }catch(SQLException ex){} + } + return counts; + } + + private static int count(final Statement stmt, final String qualifiedTableName, final String adqlTableName){ + ResultSet rs = null; + try{ + rs = stmt.executeQuery("SELECT COUNT(*) FROM " + qualifiedTableName + ";"); + rs.next(); + return rs.getInt(1); + }catch(Exception e){ + e.printStackTrace(System.err); + fail("Can not count! Maybe " + qualifiedTableName + " (in ADQL: " + adqlTableName + ") does not exist."); + return -1; + }finally{ + close(rs); + } + } + + private static void close(final ResultSet rs){ + if (rs == null) + return; + try{ + rs.close(); + }catch(SQLException se){} + } + + private static void close(final Statement stmt){ + try{ + if (stmt != null) + stmt.close(); + }catch(SQLException se){} + } + + private static void close(final InputStream io){ + try{ + if (io != null) + io.close(); + }catch(IOException ioe){} + } + +} diff --git a/test/tap/db/TestTAPDb.db b/test/tap/db/TestTAPDb.db new file mode 100644 index 0000000000000000000000000000000000000000..c36006ea9746e081c41096322c5a83eae4b678d4 Binary files /dev/null and b/test/tap/db/TestTAPDb.db differ diff --git a/test/tap/db/upload_example.vot b/test/tap/db/upload_example.vot new file mode 100644 index 0000000000000000000000000000000000000000..84f83a82b2ad7be3337c342e79ed74c0cb971554 --- /dev/null +++ b/test/tap/db/upload_example.vot @@ -0,0 +1,75 @@ + + + + VizieR Astronomical Server vizier.u-strasbg.fr + Date: 2014-07-18T16:16:29 [V1.99+ (14-Oct-2013)] + Explanations and Statistics of UCDs: See LINK below + In case of problem, please report to: cds-question@unistra.fr + In this version, NULL integer columns are written as an empty string + <TD></TD>, explicitely possible from VOTable-1.3 + + + + + + Polarisation of Be stars (McDavid, 1986-1999) + + + Standard and Program Be stars + + + Right ascension (FK5, Equinox=J2000.0) (computed by VizieR, not part of the original data) + + + Declination (FK5, Equinox=J2000.0) (computed by VizieR, not part of the original data) + + + [p/s] Program or Standard star + + + Star name + + + HD (Cat. <III/135>) catalog number + + + BS (Cat. <V/50>) catalog number + + + Visual magnitude (BSC4, See Cat. <V/50>) + + + Right Ascension J2000 + + + Declination J2000 + + + MK Spectral type (1) + + + ? projected rotational velocity (1) [NULL integer written as an empty string] + + + + ask the {\bf\fg{FireBrick}Simbad} data-base about this object + + + + + + + + + + + + + +
    052.2671+59.9403s2H Cam2129110354.2303 29 04.1+59 56 25B9IaSimbad
    245.1587-24.1689somi Sco14708460814.5516 20 38.1-24 10 08A5IISimbad
    014.1758+60.7169pgam Cas53942642.4700 56 42.2+60 43 01B0.5IVe230Simbad
    025.9142+50.6889pphi Per105164964.0701 43 39.4+50 41 20B1.5(V:)e-shell400Simbad
    062.1646+47.7131p48 Per2594012734.0404 08 39.5+47 42 47B4Ve200Simbad
    084.4108+21.1428pzet Tau3720219103.0005 37 38.6+21 08 34B1IVe-shell220Simbad
    239.5471-14.2792p48 Lib14298359414.8815 58 11.3-14 16 45B3:IV:e-shell400Simbad
    246.7554-18.4558pchi Oph14818461184.4216 27 01.3-18 27 21B1.5Ve140Simbad
    336.3187+1.3772ppi Aqr21257185394.6622 25 16.5+01 22 38B1III-IVe300Simbad
    345.4796+42.3261pomi And21767587623.6223 01 55.1+42 19 34B6III260Simbad
    + + +
    +
    diff --git a/test/tap/formatter/JSONFormatTest.java b/test/tap/formatter/JSONFormatTest.java new file mode 100644 index 0000000000000000000000000000000000000000..33deaf6202fea1a070705207b0241a247b87e206 --- /dev/null +++ b/test/tap/formatter/JSONFormatTest.java @@ -0,0 +1,142 @@ +package tap.formatter; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.OutputStream; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.HashMap; + +import org.json.JSONObject; +import org.json.JSONTokener; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import tap.ServiceConnection; +import tap.TAPExecutionReport; +import tap.TAPJob; +import tap.data.ResultSetTableIterator; +import tap.data.TableIterator; +import tap.metadata.TAPColumn; +import tap.parameters.TAPParameters; +import testtools.DBTools; +import adql.db.DBType; +import adql.db.DBType.DBDatatype; + +/** + *

    Test the JSONFormat function {@link JSONFormat#writeResult(TableIterator, OutputStream, TAPExecutionReport, Thread)}.

    + * + *

    2 test ares done: 1 with an overflow and another without.

    + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (07/2014) + */ +public class JSONFormatTest { + + private static Connection conn; + private static ServiceConnection serviceConn; + private static TAPColumn[] resultingColumns; + private static File jsonFile = new File("/home/gmantele/Desktop/json_test.json"); + + @BeforeClass + public static void setUpBeforeClass() throws Exception{ + conn = DBTools.createConnection("postgresql", "127.0.0.1", null, "gmantele", "gmantele", "pwd"); + serviceConn = new ServiceConnection4Test(); + + resultingColumns = new TAPColumn[4]; + resultingColumns[0] = new TAPColumn("ID", new DBType(DBDatatype.VARCHAR)); + resultingColumns[1] = new TAPColumn("ra", new DBType(DBDatatype.DOUBLE), "Right ascension", "deg", "pos.eq.ra", null); + resultingColumns[2] = new TAPColumn("deg", new DBType(DBDatatype.DOUBLE), "Declination", "deg", "pos.eq.dec", null); + resultingColumns[3] = new TAPColumn("gmag", new DBType(DBDatatype.DOUBLE), "G magnitude", "mag", "phot.mag;em.opt.B", null); + + if (!jsonFile.exists()) + jsonFile.createNewFile(); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception{ + DBTools.closeConnection(conn); + jsonFile.delete(); + } + + @Test + public void testWriteResult(){ + ResultSet rs = null; + try{ + rs = DBTools.select(conn, "SELECT id, ra, deg, gmag FROM gums LIMIT 10;"); + + HashMap tapParams = new HashMap(1); + tapParams.put(TAPJob.PARAM_MAX_REC, "100"); + TAPParameters params = new TAPParameters(serviceConn, tapParams); + TAPExecutionReport report = new TAPExecutionReport("123456A", true, params); + report.resultingColumns = resultingColumns; + + TableIterator it = new ResultSetTableIterator(rs); + + JSONFormat formatter = new JSONFormat(serviceConn); + OutputStream output = new BufferedOutputStream(new FileOutputStream(jsonFile)); + formatter.writeResult(it, output, report, Thread.currentThread()); + output.close(); + + JSONTokener tok = new JSONTokener(new FileInputStream(jsonFile)); + JSONObject obj = (JSONObject)tok.nextValue(); + assertEquals(obj.getJSONArray("data").length(), 10); + + }catch(Exception t){ + t.printStackTrace(); + fail("Unexpected exception!"); + }finally{ + if (rs != null){ + try{ + rs.close(); + }catch(SQLException se){} + } + } + } + + @Test + public void testWriteResultWithOverflow(){ + ResultSet rs = null; + try{ + rs = DBTools.select(conn, "SELECT id, ra, deg, gmag FROM gums LIMIT 10;"); + + HashMap tapParams = new HashMap(1); + tapParams.put(TAPJob.PARAM_MAX_REC, "5"); + TAPParameters params = new TAPParameters(serviceConn, tapParams); + TAPExecutionReport report = new TAPExecutionReport("123456A", true, params); + report.resultingColumns = resultingColumns; + + TableIterator it = new ResultSetTableIterator(rs); + + JSONFormat formatter = new JSONFormat(serviceConn); + OutputStream output = new BufferedOutputStream(new FileOutputStream(jsonFile)); + formatter.writeResult(it, output, report, Thread.currentThread()); + output.close(); + + JSONTokener tok = new JSONTokener(new FileInputStream(jsonFile)); + JSONObject obj = (JSONObject)tok.nextValue(); + assertEquals(obj.getJSONArray("data").length(), 5); + + }catch(Exception t){ + t.printStackTrace(); + fail("Unexpected exception!"); + }finally{ + if (rs != null){ + try{ + rs.close(); + }catch(SQLException e){ + System.err.println("Can not close the RESULTSET!"); + e.printStackTrace(); + } + } + } + } + +} diff --git a/test/tap/formatter/SVFormatTest.java b/test/tap/formatter/SVFormatTest.java new file mode 100644 index 0000000000000000000000000000000000000000..933f9c999d7588992ab205bbe98943c44530708c --- /dev/null +++ b/test/tap/formatter/SVFormatTest.java @@ -0,0 +1,136 @@ +package tap.formatter; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.OutputStream; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.HashMap; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import tap.ServiceConnection; +import tap.TAPExecutionReport; +import tap.TAPJob; +import tap.data.ResultSetTableIterator; +import tap.data.TableIterator; +import tap.metadata.TAPColumn; +import tap.parameters.TAPParameters; +import testtools.CommandExecute; +import testtools.DBTools; +import adql.db.DBType; +import adql.db.DBType.DBDatatype; + +/** + *

    Test the SVFormat function {@link SVFormat#writeResult(TableIterator, OutputStream, TAPExecutionReport, Thread)}.

    + * + *

    2 test ares done: 1 with an overflow and another without.

    + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (09/2014) + */ +public class SVFormatTest { + + private static Connection conn; + private static ServiceConnection serviceConn; + private static TAPColumn[] resultingColumns; + private static File svFile = new File("/home/gmantele/Desktop/sv_test.txt"); + + @BeforeClass + public static void setUpBeforeClass() throws Exception{ + conn = DBTools.createConnection("postgresql", "127.0.0.1", null, "gmantele", "gmantele", "pwd"); + serviceConn = new ServiceConnection4Test(); + + resultingColumns = new TAPColumn[4]; + resultingColumns[0] = new TAPColumn("ID", new DBType(DBDatatype.VARCHAR)); + resultingColumns[1] = new TAPColumn("ra", new DBType(DBDatatype.DOUBLE), "Right ascension", "deg", "pos.eq.ra", null); + resultingColumns[2] = new TAPColumn("deg", new DBType(DBDatatype.DOUBLE), "Declination", "deg", "pos.eq.dec", null); + resultingColumns[3] = new TAPColumn("gmag", new DBType(DBDatatype.DOUBLE), "G magnitude", "mag", "phot.mag;em.opt.B", null); + + if (!svFile.exists()) + svFile.createNewFile(); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception{ + DBTools.closeConnection(conn); + svFile.delete(); + } + + @Test + public void testWriteResult(){ + ResultSet rs = null; + try{ + rs = DBTools.select(conn, "SELECT id, ra, deg, gmag FROM gums LIMIT 10;"); + + HashMap tapParams = new HashMap(1); + tapParams.put(TAPJob.PARAM_MAX_REC, "100"); + TAPParameters params = new TAPParameters(serviceConn, tapParams); + TAPExecutionReport report = new TAPExecutionReport("123456A", true, params); + report.resultingColumns = resultingColumns; + + TableIterator it = new ResultSetTableIterator(rs); + + SVFormat formatter = new SVFormat(serviceConn, SVFormat.COMMA_SEPARATOR); + OutputStream output = new BufferedOutputStream(new FileOutputStream(svFile)); + formatter.writeResult(it, output, report, Thread.currentThread()); + output.close(); + + assertTrue(CommandExecute.execute("wc -l < \"" + svFile.getAbsolutePath() + "\"").trim().equals("11")); + + }catch(Exception t){ + t.printStackTrace(); + fail("Unexpected exception!"); + }finally{ + if (rs != null){ + try{ + rs.close(); + }catch(SQLException se){} + } + } + } + + @Test + public void testWriteResultWithOverflow(){ + ResultSet rs = null; + try{ + rs = DBTools.select(conn, "SELECT id, ra, deg, gmag FROM gums LIMIT 10;"); + + HashMap tapParams = new HashMap(1); + tapParams.put(TAPJob.PARAM_MAX_REC, "5"); + TAPParameters params = new TAPParameters(serviceConn, tapParams); + TAPExecutionReport report = new TAPExecutionReport("123456A", true, params); + report.resultingColumns = resultingColumns; + + TableIterator it = new ResultSetTableIterator(rs); + + SVFormat formatter = new SVFormat(serviceConn, SVFormat.COMMA_SEPARATOR); + OutputStream output = new BufferedOutputStream(new FileOutputStream(svFile)); + formatter.writeResult(it, output, report, Thread.currentThread()); + output.close(); + + assertTrue(CommandExecute.execute("wc -l < \"" + svFile.getAbsolutePath() + "\"").trim().equals("6")); + + }catch(Exception t){ + t.printStackTrace(); + fail("Unexpected exception!"); + }finally{ + if (rs != null){ + try{ + rs.close(); + }catch(SQLException e){ + System.err.println("Can not close the RESULTSET!"); + e.printStackTrace(); + } + } + } + } + +} diff --git a/test/tap/formatter/ServiceConnection4Test.java b/test/tap/formatter/ServiceConnection4Test.java new file mode 100644 index 0000000000000000000000000000000000000000..074448adc7051023bd40451f441a52e3094d0e17 --- /dev/null +++ b/test/tap/formatter/ServiceConnection4Test.java @@ -0,0 +1,139 @@ +package tap.formatter; + +import java.util.Collection; +import java.util.Iterator; + +import tap.ServiceConnection; +import tap.TAPFactory; +import tap.log.TAPLog; +import tap.metadata.TAPMetadata; +import uws.service.UserIdentifier; +import uws.service.file.UWSFileManager; +import adql.db.FunctionDef; + +public class ServiceConnection4Test implements ServiceConnection { + + @Override + public int[] getOutputLimit(){ + return new int[]{1000000,1000000}; + } + + @Override + public LimitUnit[] getOutputLimitType(){ + return new LimitUnit[]{LimitUnit.bytes,LimitUnit.bytes}; + } + + @Override + public String getProviderName(){ + return null; + } + + @Override + public String getProviderDescription(){ + return null; + } + + @Override + public boolean isAvailable(){ + return true; + } + + @Override + public String getAvailability(){ + return "AVAILABLE"; + } + + @Override + public int[] getRetentionPeriod(){ + return null; + } + + @Override + public int[] getExecutionDuration(){ + return null; + } + + @Override + public UserIdentifier getUserIdentifier(){ + return null; + } + + @Override + public boolean uploadEnabled(){ + return false; + } + + @Override + public int[] getUploadLimit(){ + return null; + } + + @Override + public LimitUnit[] getUploadLimitType(){ + return null; + } + + @Override + public int getMaxUploadSize(){ + return 0; + } + + @Override + public TAPMetadata getTAPMetadata(){ + return null; + } + + @Override + public Collection getCoordinateSystems(){ + return null; + } + + @Override + public Collection getGeometries(){ + return null; + } + + @Override + public Collection getUDFs(){ + return null; + } + + @Override + public TAPLog getLogger(){ + return null; + } + + @Override + public TAPFactory getFactory(){ + return null; + } + + @Override + public UWSFileManager getFileManager(){ + return null; + } + + @Override + public Iterator getOutputFormats(){ + return null; + } + + @Override + public OutputFormat getOutputFormat(String mimeOrAlias){ + return null; + } + + @Override + public int getNbMaxAsyncJobs(){ + return -1; + } + + @Override + public void setAvailable(boolean isAvailable, String message){} + + @Override + public int[] getFetchSize(){ + return null; + } + +} \ No newline at end of file diff --git a/test/tap/formatter/TextFormatTest.java b/test/tap/formatter/TextFormatTest.java new file mode 100644 index 0000000000000000000000000000000000000000..dfb266ef62b1288ef485e5ec1340841f88bfc0b4 --- /dev/null +++ b/test/tap/formatter/TextFormatTest.java @@ -0,0 +1,136 @@ +package tap.formatter; + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.OutputStream; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.HashMap; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import tap.ServiceConnection; +import tap.TAPExecutionReport; +import tap.TAPJob; +import tap.data.ResultSetTableIterator; +import tap.data.TableIterator; +import tap.metadata.TAPColumn; +import tap.parameters.TAPParameters; +import testtools.CommandExecute; +import testtools.DBTools; +import adql.db.DBType; +import adql.db.DBType.DBDatatype; + +/** + *

    Test the TestFormat function {@link TestFormat#writeResult(TableIterator, OutputStream, TAPExecutionReport, Thread)}.

    + * + *

    2 test ares done: 1 with an overflow and another without.

    + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (09/2014) + */ +public class TextFormatTest { + + private static Connection conn; + private static ServiceConnection serviceConn; + private static TAPColumn[] resultingColumns; + private static File textFile = new File("/home/gmantele/Desktop/text_test.txt"); + + @BeforeClass + public static void setUpBeforeClass() throws Exception{ + conn = DBTools.createConnection("postgresql", "127.0.0.1", null, "gmantele", "gmantele", "pwd"); + serviceConn = new ServiceConnection4Test(); + + resultingColumns = new TAPColumn[4]; + resultingColumns[0] = new TAPColumn("ID", new DBType(DBDatatype.VARCHAR)); + resultingColumns[1] = new TAPColumn("ra", new DBType(DBDatatype.DOUBLE), "Right ascension", "deg", "pos.eq.ra", null); + resultingColumns[2] = new TAPColumn("deg", new DBType(DBDatatype.DOUBLE), "Declination", "deg", "pos.eq.dec", null); + resultingColumns[3] = new TAPColumn("gmag", new DBType(DBDatatype.DOUBLE), "G magnitude", "mag", "phot.mag;em.opt.B", null); + + if (!textFile.exists()) + textFile.createNewFile(); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception{ + DBTools.closeConnection(conn); + textFile.delete(); + } + + @Test + public void testWriteResult(){ + ResultSet rs = null; + try{ + rs = DBTools.select(conn, "SELECT id, ra, deg, gmag FROM gums LIMIT 10;"); + + HashMap tapParams = new HashMap(1); + tapParams.put(TAPJob.PARAM_MAX_REC, "100"); + TAPParameters params = new TAPParameters(serviceConn, tapParams); + TAPExecutionReport report = new TAPExecutionReport("123456A", true, params); + report.resultingColumns = resultingColumns; + + TableIterator it = new ResultSetTableIterator(rs); + + TextFormat formatter = new TextFormat(serviceConn); + OutputStream output = new BufferedOutputStream(new FileOutputStream(textFile)); + formatter.writeResult(it, output, report, Thread.currentThread()); + output.close(); + + assertTrue(CommandExecute.execute("wc -l < \"" + textFile.getAbsolutePath() + "\"").trim().equals("12")); + + }catch(Exception t){ + t.printStackTrace(); + fail("Unexpected exception!"); + }finally{ + if (rs != null){ + try{ + rs.close(); + }catch(SQLException se){} + } + } + } + + @Test + public void testWriteResultWithOverflow(){ + ResultSet rs = null; + try{ + rs = DBTools.select(conn, "SELECT id, ra, deg, gmag FROM gums LIMIT 10;"); + + HashMap tapParams = new HashMap(1); + tapParams.put(TAPJob.PARAM_MAX_REC, "5"); + TAPParameters params = new TAPParameters(serviceConn, tapParams); + TAPExecutionReport report = new TAPExecutionReport("123456A", true, params); + report.resultingColumns = resultingColumns; + + TableIterator it = new ResultSetTableIterator(rs); + + TextFormat formatter = new TextFormat(serviceConn); + OutputStream output = new BufferedOutputStream(new FileOutputStream(textFile)); + formatter.writeResult(it, output, report, Thread.currentThread()); + output.close(); + + assertTrue(CommandExecute.execute("wc -l < \"" + textFile.getAbsolutePath() + "\"").trim().equals("7")); + + }catch(Exception t){ + t.printStackTrace(); + fail("Unexpected exception!"); + }finally{ + if (rs != null){ + try{ + rs.close(); + }catch(SQLException e){ + System.err.println("Can not close the RESULTSET!"); + e.printStackTrace(); + } + } + } + } + +} diff --git a/test/tap/formatter/VOTableFormatTest.java b/test/tap/formatter/VOTableFormatTest.java new file mode 100644 index 0000000000000000000000000000000000000000..eb28fdeb5590fd15bef0ca63a2dc798e2e340578 --- /dev/null +++ b/test/tap/formatter/VOTableFormatTest.java @@ -0,0 +1,141 @@ +package tap.formatter; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.OutputStream; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.HashMap; + +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import tap.ServiceConnection; +import tap.TAPExecutionReport; +import tap.TAPJob; +import tap.data.ResultSetTableIterator; +import tap.data.TableIterator; +import tap.metadata.TAPColumn; +import tap.parameters.TAPParameters; +import testtools.CommandExecute; +import testtools.DBTools; +import uk.ac.starlink.votable.DataFormat; +import adql.db.DBType; +import adql.db.DBType.DBDatatype; + +/** + *

    Test the VOTableFormat function {@link VOTableFormat#writeResult(TableIterator, OutputStream, TAPExecutionReport, Thread)}.

    + * + *

    2 test ares done: 1 with an overflow and another without.

    + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (09/2014) + */ +public class VOTableFormatTest { + + private static Connection conn; + private static ServiceConnection serviceConn; + private static TAPColumn[] resultingColumns; + private static File votableFile = new File("/home/gmantele/Desktop/votable_test.xml"); + + @BeforeClass + public static void setUpBeforeClass() throws Exception{ + conn = DBTools.createConnection("postgresql", "127.0.0.1", null, "gmantele", "gmantele", "pwd"); + serviceConn = new ServiceConnection4Test(); + + resultingColumns = new TAPColumn[4]; + resultingColumns[0] = new TAPColumn("ID", new DBType(DBDatatype.VARCHAR)); + resultingColumns[1] = new TAPColumn("ra", new DBType(DBDatatype.DOUBLE), "Right ascension", "deg", "pos.eq.ra", null); + resultingColumns[2] = new TAPColumn("deg", new DBType(DBDatatype.DOUBLE), "Declination", "deg", "pos.eq.dec", null); + resultingColumns[3] = new TAPColumn("gmag", new DBType(DBDatatype.DOUBLE), "G magnitude", "mag", "phot.mag;em.opt.B", null); + + if (!votableFile.exists()) + votableFile.createNewFile(); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception{ + DBTools.closeConnection(conn); + votableFile.delete(); + } + + @Test + public void testWriteResult(){ + ResultSet rs = null; + try{ + rs = DBTools.select(conn, "SELECT id, ra, deg, gmag FROM gums LIMIT 10;"); + + HashMap tapParams = new HashMap(1); + tapParams.put(TAPJob.PARAM_MAX_REC, "100"); + TAPParameters params = new TAPParameters(serviceConn, tapParams); + TAPExecutionReport report = new TAPExecutionReport("123456A", true, params); + report.resultingColumns = resultingColumns; + + TableIterator it = new ResultSetTableIterator(rs); + + VOTableFormat formatter = new VOTableFormat(serviceConn, DataFormat.TABLEDATA); + OutputStream output = new BufferedOutputStream(new FileOutputStream(votableFile)); + formatter.writeResult(it, output, report, Thread.currentThread()); + output.close(); + + // note: due to the pipe (|), we must call /bin/sh as a command whose the command to execute in is the "grep ... | wc -l": + assertEquals("10", CommandExecute.execute("grep \"\" \"" + votableFile.getAbsolutePath() + "\" | wc -l").trim()); + assertEquals("0", CommandExecute.execute("grep \"\" \"" + votableFile.getAbsolutePath() + "\" | wc -l").trim()); + + }catch(Exception t){ + t.printStackTrace(); + fail("Unexpected exception!"); + }finally{ + if (rs != null){ + try{ + rs.close(); + }catch(SQLException se){} + } + } + } + + @Test + public void testWriteResultWithOverflow(){ + ResultSet rs = null; + try{ + rs = DBTools.select(conn, "SELECT id, ra, deg, gmag FROM gums LIMIT 10;"); + + HashMap tapParams = new HashMap(1); + tapParams.put(TAPJob.PARAM_MAX_REC, "5"); + TAPParameters params = new TAPParameters(serviceConn, tapParams); + TAPExecutionReport report = new TAPExecutionReport("123456A", true, params); + report.resultingColumns = resultingColumns; + + TableIterator it = new ResultSetTableIterator(rs); + + VOTableFormat formatter = new VOTableFormat(serviceConn, DataFormat.TABLEDATA); + OutputStream output = new BufferedOutputStream(new FileOutputStream(votableFile)); + formatter.writeResult(it, output, report, Thread.currentThread()); + output.close(); + + // note: due to the pipe (|), we must call /bin/sh as a command whose the command to execute in is the "grep ... | wc -l": + assertEquals("5", CommandExecute.execute("grep \"\" \"" + votableFile.getAbsolutePath() + "\" | wc -l").trim()); + assertEquals("1", CommandExecute.execute("grep \"\" \"" + votableFile.getAbsolutePath() + "\" | wc -l").trim()); + + }catch(Exception t){ + t.printStackTrace(); + fail("Unexpected exception!"); + }finally{ + if (rs != null){ + try{ + rs.close(); + }catch(SQLException e){ + System.err.println("Can not close the RESULTSET!"); + e.printStackTrace(); + } + } + } + } + +} diff --git a/test/tap/metadata/MetadataExtractionTest.java b/test/tap/metadata/MetadataExtractionTest.java new file mode 100644 index 0000000000000000000000000000000000000000..bc35a1270d951148469eb0ded360de4205e5478f --- /dev/null +++ b/test/tap/metadata/MetadataExtractionTest.java @@ -0,0 +1,147 @@ +package tap.metadata; + +/* + * This file is part of TAPLibrary. + * + * TAPLibrary is free software: you can redistribute it and/or modify + * it under the terms of the GNU Lesser General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * TAPLibrary is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public License + * along with TAPLibrary. If not, see . + * + * Copyright 2014 - Astronomisches Rechen Institute (ARI) + */ + +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; + +import tap.metadata.TAPTable.TableType; + +/** + * @author Grégory Mantelet (ARI) - gmantele@ari.uni-heidelberg.de + * @version 1.1 (04/2014) + */ +public class MetadataExtractionTest { + + public static void main(String[] args) throws Throwable{ + MetadataExtractionTest extractor = new MetadataExtractionTest(); + try{ + extractor.connect(); + extractor.printTableMetadata("gums"); + }finally{ + extractor.close(); + } + } + + private Connection connection = null; + private Statement statement = null; + + public void connect(){ + try{ + Class.forName("org.postgresql.Driver"); + connection = DriverManager.getConnection("jdbc:postgresql:gmantele", "gmantele", "pwd"); + statement = connection.createStatement(); + System.out.println("[OK] DB connection successfully established !"); + }catch(ClassNotFoundException notFoundException){ + notFoundException.printStackTrace(); + System.err.println("[ERROR] Connection error !"); + }catch(SQLException sqlException){ + sqlException.printStackTrace(); + System.err.println("[ERROR] Connection error !"); + } + } + + public ResultSet query(String requet){ + ResultSet resultat = null; + try{ + resultat = statement.executeQuery(requet); + }catch(SQLException e){ + e.printStackTrace(); + System.out.println("Erreur dans la requête: " + requet); + } + return resultat; + + } + + public TAPSchema printTableMetadata(final String table){ + try{ + + DatabaseMetaData dbMeta = connection.getMetaData(); + TAPSchema tapSchema = null; + TAPTable tapTable = null; + + // Extract Table metadata (schema, table, type): + ResultSet rs = dbMeta.getTables(null, null, table, null); + rs.last(); + if (rs.getRow() == 0) + System.err.println("[ERROR] No found table for \"" + table + "\" !"); + else if (rs.getRow() > 1){ + rs.first(); + System.err.println("[ERROR] More than one match for \"" + table + "\":"); + while(rs.next()) + System.err.println(rs.getString(2) + "." + rs.getString(3) + " : " + rs.getString(4)); + }else{ + rs.first(); + tapSchema = new TAPSchema(rs.getString(2)); + TableType tableType = TableType.table; + if (rs.getString(4) != null){ + try{ + tableType = TableType.valueOf(rs.getString(4)); + }catch(IllegalArgumentException iae){} + } + tapTable = new TAPTable(rs.getString(3), tableType); + tapSchema.addTable(tapTable); + System.out.println("[OK] 1 table FOUND ! => " + tapTable + " : " + tapTable.getType()); + } + + // Extract all columns metadata (type, precision, scale): + rs = dbMeta.getColumns(null, tapSchema.getDBName(), tapTable.getDBName(), null); + String type; + while(rs.next()){ + type = rs.getString(6); + if (type.endsWith("char") || type.equals("numeric")){ + type += "(" + rs.getInt(7); + if (type.startsWith("numeric")) + type += "," + rs.getInt(9); + type += ")"; + } + System.out.println(" * " + rs.getString(4) + " : " + type); + } + + // Extract all indexed columns: + rs = dbMeta.getIndexInfo(null, tapSchema.getDBName(), tapTable.getDBName(), false, true); + while(rs.next()){ + System.out.println(" # " + rs.getString(6) + " : " + rs.getShort(7) + " (unique ? " + (!rs.getBoolean(4)) + ") -> " + rs.getString(9) + " => " + rs.getInt(11) + " unique values in the index ; " + rs.getInt(12) + " pages"); + } + + return tapSchema; + + }catch(SQLException e){ + e.printStackTrace(); + return null; + } + } + + public void close(){ + try{ + connection.close(); + statement.close(); + System.out.println("[OK] Connection closed !"); + }catch(SQLException e){ + e.printStackTrace(); + System.out.println("[ERROR] Connection CAN NOT be closed !"); + } + } + +} diff --git a/test/tap/metadata/TableSetParserTest.java b/test/tap/metadata/TableSetParserTest.java new file mode 100644 index 0000000000000000000000000000000000000000..ed42f401a4e80b69f6499a071cdb1aaee6070661 --- /dev/null +++ b/test/tap/metadata/TableSetParserTest.java @@ -0,0 +1,1162 @@ +package tap.metadata; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.StringBufferInputStream; +import java.util.ArrayList; + +import javax.xml.stream.XMLInputFactory; +import javax.xml.stream.XMLStreamConstants; +import javax.xml.stream.XMLStreamException; +import javax.xml.stream.XMLStreamReader; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import tap.TAPException; +import tap.metadata.TableSetParser.ForeignKey; +import adql.db.DBType; +import adql.db.DBType.DBDatatype; + +@SuppressWarnings("deprecation") +public class TableSetParserTest { + + private static TableSetParser parser = null; + private static XMLInputFactory factory = null; + + private static final String namespaceDef = "xmlns:vs=\"http://www.ivoa.net/xml/VODataService/v1.1\" xmlns:vtm=\"http://www.ivoa.net/xml/VOSITables/v1.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://www.ivoa.net/xml/VODataService/v1.1 http://vo.ari.uni-heidelberg.de/docs/schemata/VODataService-v1.1.xsd http://www.ivoa.net/xml/VOSITables/v1.0 http://vo.ari.uni-heidelberg.de/docs/schemata/VOSITables-v1.0.xsd\""; + + @BeforeClass + public static void setUpBeforeClass() throws Exception{ + // Build an empty parser: + parser = new TableSetParser(); + + // Build the XML factory: + factory = XMLInputFactory.newInstance(); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception{} + + @Before + public void setUp() throws Exception{} + + @After + public void tearDown() throws Exception{} + + private static XMLStreamReader buildReader(final String xmlContent) throws XMLStreamException{ + return factory.createXMLStreamReader(new StringBufferInputStream(xmlContent)); + } + + private static void close(final XMLStreamReader reader){ + if (reader != null){ + try{ + reader.close(); + }catch(Throwable t){} + } + } + + @Test + public void testGetPosition(){ + XMLStreamReader reader = null; + try{ + + // Build a reader with an empty XML document: + reader = buildReader(""); + assertEquals("[l.1,c.1]", parser.getPosition(reader)); + // note: reader.next() is throwing an error on an empty document => no need to test that. + close(reader); + + // Build a reader with a simple XML: + reader = buildReader("node value"); + + // Position before starting reading: + assertEquals("[l.1,c.1]", parser.getPosition(reader)); + + // Position after getting the node: + reader.next(); // START_ELEMENT("A") + assertEquals("[l.1,c.23]", parser.getPosition(reader)); + // The position after getting an attribute should not change: + reader.getAttributeLocalName(0); // ATTRIBUTE("attrValue") + assertEquals("[l.1,c.23]", parser.getPosition(reader)); + + // Position after getting the text: + reader.next(); // CHARACTERS("node value") + assertEquals("[l.1,c.35]", parser.getPosition(reader)); + + // Position after getting the node ending tag: + reader.next(); // END_ELEMENT("A") + assertEquals("[l.1,c.37]", parser.getPosition(reader)); + + // Position once the end reached: + reader.next(); // NULL + assertEquals("[l.-1,c.-1]", parser.getPosition(reader)); + + }catch(Exception e){ + e.printStackTrace(); + if (e instanceof XMLStreamException) + fail("Unexpected error while reading the XML content: " + e.getMessage()); + else + fail("Unexpected error: " + e.getMessage()); + }finally{ + close(reader); + } + } + + @Test + public void testGoToEndTag(){ + XMLStreamReader reader = null; + try{ + + /* Test with a single empty node AND WITH NULL or "" + * => NO TAG SHOULD HAVE BEEN READ: */ + // CASE: null + reader = buildReader(""); + parser.goToEndTag(reader, null); + assertEquals("[l.1,c.1]", parser.getPosition(reader)); + close(reader); + // CASE: empty string + reader = buildReader(""); + parser.goToEndTag(reader, ""); + assertEquals("[l.1,c.1]", parser.getPosition(reader)); + close(reader); + + /* Test BEFORE having read the start element: + * => AN EXCEPTION SHOULD BE THROWN */ + reader = buildReader(""); + try{ + parser.goToEndTag(reader, "A"); + fail("This function should have failed: the START ELEMENT has not yet been read!"); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals("[l.-1,c.-1] Malformed XML document: missing an END TAG !", e.getMessage()); + else + throw e; + } + close(reader); + + /* Test AFTER having read the start element: + * => NORMAL USAGE */ + reader = buildReader(""); + reader.next(); // START ELEMENT("A") + parser.goToEndTag(reader, "A"); + assertEquals("[l.1,c.8]", parser.getPosition(reader)); + close(reader); + + /* Test AFTER having read the start element: + * => NORMAL USAGE with an embedded node */ + // search for the root node end: + reader = buildReader(""); + reader.next(); // START ELEMENT("A") + parser.goToEndTag(reader, "A"); + assertEquals("[l.1,c.15]", parser.getPosition(reader)); + close(reader); + // variant with some texts: + reader = buildReader("super blabla"); + reader.next(); // START ELEMENT("A") + parser.goToEndTag(reader, "A"); + assertEquals("[l.1,c.27]", parser.getPosition(reader)); + close(reader); + // variant with some texts + child node: + reader = buildReader("superblabla"); + reader.next(); // START ELEMENT("A") + parser.goToEndTag(reader, "A"); + assertEquals("[l.1,c.33]", parser.getPosition(reader)); + close(reader); + // search for the child node end: + reader = buildReader(""); + reader.next(); // START ELEMENT("A") + reader.next(); // START ELEMENT("B") + parser.goToEndTag(reader, "B"); + assertEquals("[l.1,c.11]", parser.getPosition(reader)); + close(reader); + // variant with some texts: + reader = buildReader("super blabla"); + reader.next(); // START ELEMENT("A") + reader.next(); // START ELEMENT("B") + parser.goToEndTag(reader, "B"); + assertEquals("[l.1,c.23]", parser.getPosition(reader)); + close(reader); + // variant with some texts + child node: + reader = buildReader("superblabla"); + reader.next(); // START ELEMENT("A") + reader.next(); // START ELEMENT("B") + parser.goToEndTag(reader, "B"); + assertEquals("[l.1,c.29]", parser.getPosition(reader)); + close(reader); + + // Test: Search the end tag while the reader is inside one of its children: + reader = buildReader("superblabla"); + reader.next(); // START ELEMENT("A") + reader.next(); // START ELEMENT("B") + parser.goToEndTag(reader, "A"); + assertEquals("[l.1,c.33]", parser.getPosition(reader)); + close(reader); + + // Test with a wrong start node name: + reader = buildReader(""); + reader.next(); // START ELEMENT("A") + try{ + parser.goToEndTag(reader, "B"); + fail("This function should have failed: the given node name is wrong (no such node in the XML document)!"); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals("[l.-1,c.-1] Malformed XML document: missing an END TAG !", e.getMessage()); + else + throw e; + } + close(reader); + + // Test with malformed XML document: + // CASE: missing end tag for the root node: + reader = buildReader(""); + reader.next(); // START ELEMENT("A") + try{ + parser.goToEndTag(reader, "A"); + fail("This function should have failed: the node A has no END TAG!"); + }catch(Exception e){ + if (e instanceof XMLStreamException) + assertEquals("ParseError at [row,col]:[1,11]\nMessage: XML document structures must start and end within the same entity.", e.getMessage()); + else + throw e; + } + close(reader); + // CASE: missing end tag for a child: + reader = buildReader(""); + reader.next(); // START ELEMENT("A") + try{ + parser.goToEndTag(reader, "A"); + fail("This function should have failed: the node B has no END TAG!"); + }catch(Exception e){ + if (e instanceof XMLStreamException) + assertEquals("ParseError at [row,col]:[1,9]\nMessage: The element type \"B\" must be terminated by the matching end-tag \"\".", e.getMessage()); + else + throw e; + } + close(reader); + // CASE: missing end tag for the child to search: + reader = buildReader(""); + reader.next(); // START ELEMENT("A") + reader.next(); // START ELEMENT("B") + try{ + parser.goToEndTag(reader, "B"); + fail("This function should have failed: the node B has no END TAG!"); + }catch(Exception e){ + if (e instanceof XMLStreamException) + assertEquals("ParseError at [row,col]:[1,9]\nMessage: The element type \"B\" must be terminated by the matching end-tag \"\".", e.getMessage()); + else + throw e; + } + close(reader); + + }catch(Exception e){ + e.printStackTrace(); + if (e instanceof XMLStreamException) + fail("Unexpected error while reading the XML content: " + e.getMessage()); + else + fail("Unexpected error: " + e.getMessage()); + }finally{ + close(reader); + } + } + + @Test + public void testGetText(){ + XMLStreamReader reader = null; + String txt; + try{ + + // Test with a simple XML and an empty text: + reader = buildReader(""); + txt = parser.getText(reader); + assertEquals(0, txt.length()); + assertEquals("[l.1,c.4]", parser.getPosition(reader)); + assertEquals(XMLStreamConstants.START_ELEMENT, reader.getEventType()); + close(reader); + // variant with spaces and tabs: + reader = buildReader(" "); + txt = parser.getText(reader); + assertEquals(0, txt.length()); + assertEquals("[l.1,c.8]", parser.getPosition(reader)); + assertEquals(XMLStreamConstants.START_ELEMENT, reader.getEventType()); + close(reader); + // variant with line returns: + reader = buildReader(" \n "); + txt = parser.getText(reader); + assertEquals(0, txt.length()); + assertEquals("[l.2,c.5]", parser.getPosition(reader)); + assertEquals(XMLStreamConstants.START_ELEMENT, reader.getEventType()); + close(reader); + + // Test with a single line text: + reader = buildReader(" Super blabla "); + reader.next(); // START ELEMENT("A") + txt = parser.getText(reader); + assertEquals("Super blabla", txt); + assertEquals("[l.1,c.27]", parser.getPosition(reader)); + assertEquals(XMLStreamConstants.END_ELEMENT, reader.getEventType()); + close(reader); + // variant with CDATA: + reader = buildReader(" Super "); + reader.next(); // START ELEMENT("A") + txt = parser.getText(reader); + assertEquals("Super blabla", txt); + assertEquals("[l.1,c.39]", parser.getPosition(reader)); + assertEquals(XMLStreamConstants.END_ELEMENT, reader.getEventType()); + close(reader); + + // Test with a text of 2 lines: + reader = buildReader(" Super \n blabla "); + reader.next(); // START ELEMENT("A") + txt = parser.getText(reader); + assertEquals("Super\nblabla", txt); + assertEquals("[l.2,c.18]", parser.getPosition(reader)); + assertEquals(XMLStreamConstants.END_ELEMENT, reader.getEventType()); + close(reader); + // same test but with an empty line between both: + reader = buildReader(" Super \n \n blabla "); + reader.next(); // START ELEMENT("A") + txt = parser.getText(reader); + assertEquals("Super\n\nblabla", txt); + assertEquals("[l.3,c.18]", parser.getPosition(reader)); + assertEquals(XMLStreamConstants.END_ELEMENT, reader.getEventType()); + close(reader); + // same test but starting with an empty line: + reader = buildReader("\n Super \n bla bla "); + reader.next(); // START ELEMENT("A") + txt = parser.getText(reader); + assertEquals("Super\nbla bla", txt); + assertEquals("[l.3,c.20]", parser.getPosition(reader)); + assertEquals(XMLStreamConstants.END_ELEMENT, reader.getEventType()); + close(reader); + // same test but a comment splitting a text part: + reader = buildReader(" Super \n bla bla "); + reader.next(); // START ELEMENT("A") + txt = parser.getText(reader); + assertEquals("Super\nbla bla", txt); + assertEquals("[l.2,c.44]", parser.getPosition(reader)); + assertEquals(XMLStreamConstants.END_ELEMENT, reader.getEventType()); + close(reader); + + }catch(Exception e){ + e.printStackTrace(); + if (e instanceof XMLStreamException) + fail("Unexpected error while reading the XML content: " + e.getMessage()); + else + fail("Unexpected error: " + e.getMessage()); + }finally{ + close(reader); + } + } + + @Test + public void testSearchTable(){ + try{ + + // Create fake metadata: + TAPMetadata meta = new TAPMetadata(); + TAPSchema schema = new TAPSchema("SA"); + schema.addTable("TA"); + schema.addTable("TB"); + meta.addSchema(schema); + schema = new TAPSchema("SB"); + schema.addTable("TB"); + meta.addSchema(schema); + + // Create a fake position: + final String pos = "[l.10,c.1]"; + + // Search for an existing table WITHOUT SCHEMA specification: + TAPTable t = parser.searchTable("TA", meta, pos); + assertEquals("TA", t.getADQLName()); + assertEquals("SA", t.getADQLSchemaName()); + // variant with a different case: + t = parser.searchTable("ta", meta, pos); + assertEquals("TA", t.getADQLName()); + assertEquals("SA", t.getADQLSchemaName()); + + // Search for an existing table WITH SCHEMA specification: + t = parser.searchTable("SA.TA", meta, pos); + assertEquals("TA", t.getADQLName()); + assertEquals("SA", t.getADQLSchemaName()); + // variant with a different case: + t = parser.searchTable("sa.ta", meta, pos); + assertEquals("TA", t.getADQLName()); + assertEquals("SA", t.getADQLSchemaName()); + + // Search with a wrong table name: + try{ + parser.searchTable("TC", meta, pos); + fail("This test should have not failed: there is no table named TC in the given metadata."); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals(pos + " Unknown table: \"TC\"!", e.getMessage()); + else + throw e; + } + // variant with a correct schema name: + try{ + parser.searchTable("SA.TC", meta, pos); + fail("This test should have not failed: there is no table named SA.TC in the given metadata."); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals(pos + " Unknown table: \"SA.TC\"!", e.getMessage()); + else + throw e; + } + + // Search with a wrong schema name: + try{ + parser.searchTable("SC.TB", meta, pos); + fail("This test should have not failed: there is no schema named SC in the given metadata."); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals(pos + " Unknown table: \"SC.TB\"!", e.getMessage()); + else + throw e; + } + + // Search with an ambiguous table name (missing schema name): + try{ + parser.searchTable("TB", meta, pos); + fail("This test should have not failed: there are two table named TB ; a schema name is required to choose the table to select."); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals(pos + " Unresolved table: \"TB\"! Several tables have the same name but in different schemas (here: SA.TB, SB.TB). You must prefix this table name by a schema name (expected syntax: \"schema.table\").", e.getMessage()); + else + throw e; + } + + // Provide a schema + table name with a wrong syntax (missing table name or schema name): + try{ + parser.searchTable(".TB", meta, pos); + fail("This test should have not failed: the schema name is missing before the '.'."); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals(pos + " Incorrect full table name - \".TB\": empty schema name!", e.getMessage()); + else + throw e; + } + try{ + parser.searchTable("SB.", meta, pos); + fail("This test should have not failed: the table name is missing after the '.'."); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals(pos + " Incorrect full table name - \"SB.\": empty table name!", e.getMessage()); + else + throw e; + } + try{ + parser.searchTable("toto.SB.TB", meta, pos); + fail("This test should have not failed: the table name is missing after the '.'."); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals(pos + " Incorrect full table name - \"toto.SB.TB\": only a schema and a table name can be specified (expected syntax: \"schema.table\")\"!", e.getMessage()); + else + throw e; + } + + }catch(Exception e){ + e.printStackTrace(); + fail("Unexpected error: " + e.getMessage()); + } + } + + @Test + public void testParseFKey(){ + XMLStreamReader reader = null; + try{ + + // Test while search outside from the foreignKey node: + reader = buildReader("SA.TBtruc.choseForeign key\ndescription.col1col2
    "); + reader.next(); // START ELEMENT("table") + try{ + parser.parseFKey(reader); + fail("This test should have failed: the reader has not just read the \"foreignKey\" START ELEMENT tag."); + }catch(Exception e){ + if (e instanceof IllegalStateException) + assertEquals("[l.1,c.8] Illegal usage of TableSetParser.parseFKey(XMLStreamParser)! This function can be called only when the reader has just read the START ELEMENT tag \"foreignKey\".", e.getMessage()); + else + throw e; + }finally{ + close(reader); + } + + // Test with a complete and correct XML foreignKey node: + reader = buildReader("SA.TBtruc.choseForeign key\ndescription.col1col2"); + reader.next(); // START ELEMENT("foreignKey") + ForeignKey fk = parser.parseFKey(reader); + assertEquals("SA.TB", fk.targetTable); + assertEquals("[l.1,c.45]", fk.targetTablePosition); + assertEquals("truc.chose", fk.utype); + assertEquals("Foreign key\ndescription.", fk.description); + assertEquals(1, fk.keyColumns.size()); + assertEquals("col2", fk.keyColumns.get("col1")); + close(reader); + // variant with some comments: + reader = buildReader("SA.TBForeign key\ndescription.col1col2"); + reader.next(); // START ELEMENT("foreignKey") + fk = parser.parseFKey(reader); + assertEquals("SA.TB", fk.targetTable); + assertEquals("Foreign key\ndescription.", fk.description); + assertEquals(1, fk.keyColumns.size()); + assertEquals("col2", fk.keyColumns.get("col1")); + close(reader); + // variant with texts at unapropriate places: + reader = buildReader("Here, we are SA.TBForeign key\ndescription.col1col2Here is the end!Nothing more!"); + reader.next(); // START ELEMENT("foreignKey") + fk = parser.parseFKey(reader); + assertEquals("SA.TB", fk.targetTable); + assertEquals("Foreign key\ndescription.", fk.description); + assertEquals(1, fk.keyColumns.size()); + assertEquals("col2", fk.keyColumns.get("col1")); + close(reader); + + // Test with a missing targetTable: + reader = buildReader("Foreign key\ndescription.col1col2"); + reader.next(); // START ELEMENT("foreignKey") + try{ + parser.parseFKey(reader); + fail("This test should have failed: the targetTable node is missing!"); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals("[l.2,c.123] Missing \"targetTable\"!", e.getMessage()); + }finally{ + close(reader); + } + // variant with duplicated targetTable: + reader = buildReader("SA.TBSA.TAForeign key\ndescription.col1col2"); + reader.next(); // START ELEMENT("foreignKey") + try{ + parser.parseFKey(reader); + fail("This test should have failed: the targetTable node is duplicated!"); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals("[l.1,c.58] Only one \"targetTable\" element can exist in a /tableset/schema/table/foreignKey!", e.getMessage()); + }finally{ + close(reader); + } + + // Test with a missing fkColumn: + reader = buildReader("SA.TBForeign key\ndescription."); + reader.next(); // START ELEMENT("foreignKey") + try{ + parser.parseFKey(reader); + fail("This test should have failed: at least 1 fkColumn node is missing!"); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals("[l.2,c.40] Missing at least one \"fkColumn\"!", e.getMessage()); + }finally{ + close(reader); + } + // variant with several fkColumn: + reader = buildReader("SA.TBForeign key\ndescription.col1col2col3col4"); + reader.next(); // START ELEMENT("foreignKey") + fk = parser.parseFKey(reader); + assertEquals("SA.TB", fk.targetTable); + assertEquals("Foreign key\ndescription.", fk.description); + assertEquals(2, fk.keyColumns.size()); + assertEquals("col2", fk.keyColumns.get("col1")); + assertEquals("col4", fk.keyColumns.get("col3")); + close(reader); + + // Test with a missing fromColumn: + reader = buildReader("SA.TBcol2"); + reader.next(); // START ELEMENT("foreignKey") + try{ + parser.parseFKey(reader); + fail("This test should have failed: the fromColumn node is missing!"); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals("[l.1,c.99] Missing \"fromColumn\"!", e.getMessage()); + }finally{ + close(reader); + } + // variant with several fromColumn: + reader = buildReader("SA.TBcol1col1biscol2"); + reader.next(); // START ELEMENT("foreignKey") + try{ + parser.parseFKey(reader); + fail("This test should have failed: sereval fromColumn are found!"); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals("[l.1,c.96] Only one \"fromColumn\" element can exist in a /tableset/schema/table/foreignKey/fkColumn !", e.getMessage()); + }finally{ + close(reader); + } + + // Test with a missing targetColumn: + reader = buildReader("SA.TBcol1"); + reader.next(); // START ELEMENT("foreignKey") + try{ + parser.parseFKey(reader); + fail("This test should have failed: the targetColumn node is missing!"); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals("[l.1,c.95] Missing \"targetColumn\"!", e.getMessage()); + }finally{ + close(reader); + } + // variant with several fromColumn: + reader = buildReader("SA.TBcol1col2col2bis"); + reader.next(); // START ELEMENT("foreignKey") + try{ + parser.parseFKey(reader); + fail("This test should have failed: several targetColumn are found!"); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals("[l.1,c.131] Only one \"targetColumn\" element can exist in a /tableset/schema/table/foreignKey/fkColumn !", e.getMessage()); + }finally{ + close(reader); + } + + // Test with a additional node: + reader = buildReader("blablaanythingSA.TBcol1col2"); + reader.next(); // START ELEMENT("foreignKey") + fk = parser.parseFKey(reader); + assertEquals("SA.TB", fk.targetTable); + assertNull(fk.description); + assertEquals(1, fk.keyColumns.size()); + assertEquals("col2", fk.keyColumns.get("col1")); + close(reader); + + }catch(Exception e){ + e.printStackTrace(); + if (e instanceof XMLStreamException) + fail("Unexpected error while reading the XML content: " + e.getMessage()); + else + fail("Unexpected error: " + e.getMessage()); + }finally{ + close(reader); + } + } + + @Test + public void testParseDataType(){ + XMLStreamReader reader = null; + try{ + + // Test while search outside from the dataType node: + reader = buildReader("char"); + reader.next(); // START ELEMENT("column") + try{ + parser.parseDataType(reader); + fail("This test should have failed: the reader has not just read the \"dataType\" START ELEMENT tag."); + }catch(Exception e){ + if (e instanceof IllegalStateException) + assertEquals("[l.1,c.408] Illegal usage of TableSetParser.parseDataType(XMLStreamParser)! This function can be called only when the reader has just read the START ELEMENT tag \"dataType\".", e.getMessage()); + else + throw e; + }finally{ + close(reader); + } + + // Test with a correct TAP type: + reader = buildReader("varchar"); + reader.next(); // START ELEMENT("column") + reader.next(); // START ELEMENT("dataType") + DBType dt = parser.parseDataType(reader); + assertEquals(DBDatatype.VARCHAR, dt.type); + assertEquals(-1, dt.length); + close(reader); + + // Test with a correct VOTable type: + reader = buildReader("char"); + reader.next(); // START ELEMENT("dataType") + dt = parser.parseDataType(reader); + assertEquals(DBDatatype.VARCHAR, dt.type); + assertEquals(-1, dt.length); + close(reader); + + // Test with a missing xsi:type: + reader = buildReader("char"); + reader.next(); // START ELEMENT("dataType") + try{ + parser.parseDataType(reader); + fail("This test should have failed: the attribute xsi:type is missing!"); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals("[l.1,c.424] Missing attribute \"xsi:type\" (where xsi = \"" + TableSetParser.XSI_NAMESPACE + "\")! Expected attribute value: vs:VOTableType or vs:TAPType, where vs = " + TableSetParser.VODATASERVICE_NAMESPACE + ".", e.getMessage()); + else + throw e; + }finally{ + close(reader); + } + // variant with a wrong namespace prefix + reader = buildReader("char"); + try{ + reader.next(); // START ELEMENT("dataType") + fail("This test should have failed: the prefix of the xsi:type attribute is wrong!"); + }catch(Exception e){ + if (e instanceof XMLStreamException) + assertEquals("ParseError at [row,col]:[1,450]\nMessage: http://www.w3.org/TR/1999/REC-xml-names-19990114#AttributePrefixUnbound?dataType&xsj:type&xsj", e.getMessage()); + else + throw e; + }finally{ + close(reader); + } + // variant with a missing namespace prefix: + reader = buildReader("char"); + try{ + reader.next(); // START ELEMENT("dataType") + fail("This test should have failed: the namespace xsi is not defined!"); + }catch(Exception e){ + if (e instanceof XMLStreamException) + assertEquals("ParseError at [row,col]:[1,51]\nMessage: http://www.w3.org/TR/1999/REC-xml-names-19990114#AttributePrefixUnbound?dataType&xsi:type&xsi", e.getMessage()); + else + throw e; + }finally{ + close(reader); + } + + // Test with an unsupported xsi:type: + reader = buildReader("char"); + reader.next(); // START ELEMENT("dataType") + try{ + parser.parseDataType(reader); + fail("This test should have failed: the type foo is not defined in VODataService!"); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals("[l.1,c.457] Unsupported type: \"vs:foo\"! Expected: vs:VOTableType or vs:TAPType, where vs = " + TableSetParser.VODATASERVICE_NAMESPACE + ".", e.getMessage()); + else + throw e; + }finally{ + close(reader); + } + // variant with no namespace prefix in front of the wrong type: + reader = buildReader("char"); + reader.next(); // START ELEMENT("dataType") + try{ + parser.parseDataType(reader); + fail("This test should have failed: the namespace prefix is missing in the value of xsi:type!"); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals("[l.1,c.439] Unresolved type: \"foo\"! Missing namespace prefix.", e.getMessage()); + else + throw e; + }finally{ + close(reader); + } + + // Test with a missing datatype: + reader = buildReader(""); + reader.next(); // START ELEMENT("dataType") + try{ + parser.parseDataType(reader); + fail("This test should have failed: the datatype value is missing!"); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals("[l.1,c.443] Missing column datatype!", e.getMessage()); + else + throw e; + }finally{ + close(reader); + } + // variant with a wrong datatype: + reader = buildReader("foo"); + reader.next(); // START ELEMENT("dataType") + try{ + parser.parseDataType(reader); + fail("This test should have failed: the datatype value is unknown!"); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals("[l.1,c.446] Unknown TAPType: \"foo\"!", e.getMessage()); + else + throw e; + }finally{ + close(reader); + } + + }catch(Exception e){ + e.printStackTrace(); + if (e instanceof XMLStreamException) + fail("Unexpected error while reading the XML content: " + e.getMessage()); + else + fail("Unexpected error: " + e.getMessage()); + }finally{ + close(reader); + } + } + + @Test + public void testParseColumn(){ + XMLStreamReader reader = null; + try{ + + // Test while search outside from the column node: + reader = buildReader("col1Column\ndescription.SMALLINTtruc.choset.cdegnullableprimaryindexed
    "); + reader.next(); // START ELEMENT("table") + try{ + parser.parseColumn(reader); + fail("This test should have failed: the reader has not just read the \"column\" START ELEMENT tag."); + }catch(Exception e){ + if (e instanceof IllegalStateException) + assertEquals("[l.1,c.407] Illegal usage of TableSetParser.parseColumn(XMLStreamParser)! This function can be called only when the reader has just read the START ELEMENT tag \"column\".", e.getMessage()); + else + throw e; + }finally{ + close(reader); + } + + // Test with a complete and correct XML column node: + reader = buildReader("col1Column\ndescription.SMALLINTtruc.choset.cdegnullableprimaryindexed
    "); + reader.next(); // START ELEMENT("table") + reader.next(); // START ELEMENT("column") + TAPColumn col = parser.parseColumn(reader); + assertEquals("col1", col.getADQLName()); + assertEquals("Column\ndescription.", col.getDescription()); + assertEquals(DBDatatype.SMALLINT, col.getDatatype().type); + assertEquals(-1, col.getDatatype().length); + assertEquals("truc.chose", col.getUtype()); + assertEquals("t.c", col.getUcd()); + assertEquals("deg", col.getUnit()); + assertTrue(col.isIndexed()); + assertTrue(col.isPrincipal()); + assertTrue(col.isNullable()); + assertTrue(col.isStd()); + close(reader); + // variant with entering inside the foreignKey node (as it is done by TableSetParser): + reader = buildReader("col1Column\ndescription.SMALLINTtruc.choset.cdeg"); + reader.next(); // START ELEMENT("column") + col = parser.parseColumn(reader); + assertEquals("col1", col.getADQLName()); + assertEquals("Column\ndescription.", col.getDescription()); + assertEquals(DBDatatype.SMALLINT, col.getDatatype().type); + assertEquals(-1, col.getDatatype().length); + assertEquals("truc.chose", col.getUtype()); + assertEquals("t.c", col.getUcd()); + assertEquals("deg", col.getUnit()); + assertFalse(col.isIndexed()); + assertFalse(col.isPrincipal()); + assertFalse(col.isNullable()); + assertFalse(col.isStd()); + close(reader); + // variant with some comments: + reader = buildReader("col1Column\ndescription.SMALLINTtruc.choset.cdeg"); + reader.next(); // START ELEMENT("column") + col = parser.parseColumn(reader); + assertEquals("col1", col.getADQLName()); + assertEquals("Column\ndescription.", col.getDescription()); + assertEquals(DBDatatype.SMALLINT, col.getDatatype().type); + assertEquals(-1, col.getDatatype().length); + assertEquals("truc.chose", col.getUtype()); + assertEquals("t.c", col.getUcd()); + assertEquals("deg", col.getUnit()); + assertFalse(col.isIndexed()); + assertFalse(col.isPrincipal()); + assertFalse(col.isNullable()); + assertFalse(col.isStd()); + close(reader); + // variant with texts at unapropriate places: + reader = buildReader("Here we are col1Column\ndescription.SMALLINTtruc.choset.cdegNothing more!"); + reader.next(); // START ELEMENT("column") + col = parser.parseColumn(reader); + assertEquals("col1", col.getADQLName()); + assertEquals("Column\ndescription.", col.getDescription()); + assertEquals(DBDatatype.SMALLINT, col.getDatatype().type); + assertEquals(-1, col.getDatatype().length); + assertEquals("truc.chose", col.getUtype()); + assertEquals("t.c", col.getUcd()); + assertEquals("deg", col.getUnit()); + assertFalse(col.isIndexed()); + assertFalse(col.isPrincipal()); + assertFalse(col.isNullable()); + assertFalse(col.isStd()); + close(reader); + + // Test with a missing "name" node: + reader = buildReader(""); + reader.next(); // START ELEMENT("column") + try{ + parser.parseColumn(reader); + fail("This test should have failed: the \"name\" node is missing!"); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals("[l.1,c.18] Missing column \"name\"!", e.getMessage()); + }finally{ + close(reader); + } + // variant with duplicated "name": + reader = buildReader("col1colA"); + reader.next(); // START ELEMENT("column") + try{ + parser.parseColumn(reader); + fail("This test should have failed: the \"name\" node is duplicated!"); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals("[l.1,c.32] Only one \"name\" element can exist in a /tableset/schema/table/column!", e.getMessage()); + }finally{ + close(reader); + } + + // Test with a additional node: + reader = buildReader("col1colAblabla"); + reader.next(); // START ELEMENT("foreignKey") + col = parser.parseColumn(reader); + assertEquals("col1", col.getADQLName()); + assertNull(col.getDescription()); + assertEquals(DBDatatype.VARCHAR, col.getDatatype().type); + assertEquals(-1, col.getDatatype().length); + assertNull(col.getUtype()); + assertNull(col.getUcd()); + assertNull(col.getUnit()); + assertFalse(col.isIndexed()); + assertFalse(col.isPrincipal()); + assertFalse(col.isNullable()); + assertFalse(col.isStd()); + close(reader); + + }catch(Exception e){ + e.printStackTrace(); + if (e instanceof XMLStreamException) + fail("Unexpected error while reading the XML content: " + e.getMessage()); + else + fail("Unexpected error: " + e.getMessage()); + }finally{ + close(reader); + } + } + + @Test + public void testParseTable(){ + XMLStreamReader reader = null; + ArrayList fkeys = new ArrayList(0); + try{ + + // Test while search outside from the table node: + reader = buildReader("TableA
    "); + reader.next(); // START ELEMENT("schema") + try{ + parser.parseTable(reader, fkeys); + fail("This test should have failed: the reader has not just read the \"table\" START ELEMENT tag."); + }catch(Exception e){ + if (e instanceof IllegalStateException) + assertEquals("[l.1,c.9] Illegal usage of TableSetParser.parseTable(XMLStreamParser)! This function can be called only when the reader has just read the START ELEMENT tag \"table\".", e.getMessage()); + else + throw e; + }finally{ + close(reader); + fkeys.clear(); + } + + // Test with a complete and correct XML table node: + reader = buildReader("TableATable \ndescription.truc.choseTable titlecol1TBcol1col2
    "); + reader.next(); // START ELEMENT("table") + TAPTable t = parser.parseTable(reader, fkeys); + assertEquals("TableA", t.getADQLName()); + assertEquals("Table\ndescription.", t.getDescription()); + assertEquals("truc.chose", t.getUtype()); + assertEquals("Table title", t.getTitle()); + assertEquals(1, t.getNbColumns()); + assertNotNull(t.getColumn("col1")); + assertEquals(0, t.getNbForeignKeys()); + assertEquals(1, fkeys.size()); + assertEquals("TB", fkeys.get(0).targetTable); + assertEquals(t, fkeys.get(0).fromTable); + close(reader); + fkeys.clear(); + // variant with some comments: + reader = buildReader("TableATable \ndescription.truc.choseTable titlecol1TBcol1col2
    "); + reader.next(); // START ELEMENT("table") + t = parser.parseTable(reader, fkeys); + assertEquals("TableA", t.getADQLName()); + assertEquals("Table\ndescription.", t.getDescription()); + assertEquals("truc.chose", t.getUtype()); + assertEquals("Table title", t.getTitle()); + assertEquals(1, t.getNbColumns()); + assertNotNull(t.getColumn("col1")); + assertEquals(0, t.getNbForeignKeys()); + assertEquals(1, fkeys.size()); + assertEquals("TB", fkeys.get(0).targetTable); + assertEquals(t, fkeys.get(0).fromTable); + close(reader); + fkeys.clear(); + // variant with texts at unapropriate places: + reader = buildReader("Here we are TableATable \ndescription.truc.choseTable titlecol1TBcol1col2
    Nothing more!"); + reader.next(); // START ELEMENT("table") + t = parser.parseTable(reader, fkeys); + assertEquals("TableA", t.getADQLName()); + assertEquals("Table\ndescription.", t.getDescription()); + assertEquals("truc.chose", t.getUtype()); + assertEquals("Table title", t.getTitle()); + assertEquals(1, t.getNbColumns()); + assertNotNull(t.getColumn("col1")); + assertEquals(0, t.getNbForeignKeys()); + assertEquals(1, fkeys.size()); + assertEquals("TB", fkeys.get(0).targetTable); + assertEquals(t, fkeys.get(0).fromTable); + close(reader); + fkeys.clear(); + + // Test with a missing "name" node: + reader = buildReader("
    "); + reader.next(); // START ELEMENT("table") + try{ + parser.parseTable(reader, fkeys); + fail("This test should have failed: the \"name\" node is missing!"); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals("[l.1,c.16] Missing table \"name\"!", e.getMessage()); + }finally{ + close(reader); + fkeys.clear(); + } + // variant with duplicated "name": + reader = buildReader("Table1TableA
    "); + reader.next(); // START ELEMENT("table") + try{ + parser.parseTable(reader, fkeys); + fail("This test should have failed: the \"name\" node is duplicated!"); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals("[l.1,c.33] Only one \"name\" element can exist in a /tableset/schema/table!", e.getMessage()); + }finally{ + close(reader); + fkeys.clear(); + } + + // Test with a additional node: + reader = buildReader("TableASuperTableAblabla
    "); + reader.next(); // START ELEMENT("table") + t = parser.parseTable(reader, fkeys); + assertEquals("TableA", t.getADQLName()); + assertNull(t.getDescription()); + assertNull(t.getUtype()); + assertNull(t.getTitle()); + assertEquals(0, t.getNbColumns()); + assertEquals(0, t.getNbForeignKeys()); + assertEquals(0, fkeys.size()); + close(reader); + fkeys.clear(); + + }catch(Exception e){ + e.printStackTrace(); + if (e instanceof XMLStreamException) + fail("Unexpected error while reading the XML content: " + e.getMessage()); + else + fail("Unexpected error: " + e.getMessage()); + }finally{ + close(reader); + fkeys.clear(); + } + } + + @Test + public void testParseSchema(){ + XMLStreamReader reader = null; + ArrayList fkeys = new ArrayList(0); + try{ + + // Test while search outside from the schema node: + reader = buildReader("PublicSchema"); + reader.next(); // START ELEMENT("tableset") + try{ + parser.parseSchema(reader, fkeys); + fail("This test should have failed: the reader has not just read the \"schema\" START ELEMENT tag."); + }catch(Exception e){ + if (e instanceof IllegalStateException) + assertEquals("[l.1,c.11] Illegal usage of TableSetParser.parseSchema(XMLStreamParser)! This function can be called only when the reader has just read the START ELEMENT tag \"schema\".", e.getMessage()); + else + throw e; + }finally{ + close(reader); + fkeys.clear(); + } + + // Test with a complete and correct XML table node: + reader = buildReader("PublicSchemaSchema \ndescription.truc.choseSchema titleTableA
    "); + reader.next(); // START ELEMENT("schema") + TAPSchema s = parser.parseSchema(reader, fkeys); + assertEquals("PublicSchema", s.getADQLName()); + assertEquals("Schema\ndescription.", s.getDescription()); + assertEquals("truc.chose", s.getUtype()); + assertEquals("Schema title", s.getTitle()); + assertEquals(1, s.getNbTables()); + assertNotNull(s.getTable("TableA")); + close(reader); + fkeys.clear(); + // variant with some comments: + reader = buildReader("PublicSchema Schema \ndescription.truc.choseSchema titleTableA
    "); + reader.next(); // START ELEMENT("schema") + s = parser.parseSchema(reader, fkeys); + assertEquals("PublicSchema", s.getADQLName()); + assertEquals("Schema\ndescription.", s.getDescription()); + assertEquals("truc.chose", s.getUtype()); + assertEquals("Schema title", s.getTitle()); + assertEquals(1, s.getNbTables()); + assertNotNull(s.getTable("TableA")); + close(reader); + fkeys.clear(); + // variant with texts at unapropriate places: + reader = buildReader("Here we are PublicSchema Schema \ndescription.truc.choseSchema titleTableA
    Nothing more!"); + reader.next(); // START ELEMENT("schema") + s = parser.parseSchema(reader, fkeys); + assertEquals("PublicSchema", s.getADQLName()); + assertEquals("Schema\ndescription.", s.getDescription()); + assertEquals("truc.chose", s.getUtype()); + assertEquals("Schema title", s.getTitle()); + assertEquals(1, s.getNbTables()); + assertNotNull(s.getTable("TableA")); + close(reader); + fkeys.clear(); + + // Test with a missing "name" node: + reader = buildReader(""); + reader.next(); // START ELEMENT("schema") + try{ + parser.parseSchema(reader, fkeys); + fail("This test should have failed: the \"name\" node is missing!"); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals("[l.1,c.18] Missing schema \"name\"!", e.getMessage()); + }finally{ + close(reader); + fkeys.clear(); + } + // variant with duplicated "name": + reader = buildReader("PublicSchemaPrivateSchema"); + reader.next(); // START ELEMENT("schema") + try{ + parser.parseSchema(reader, fkeys); + fail("This test should have failed: the \"name\" node is duplicated!"); + }catch(Exception e){ + if (e instanceof TAPException) + assertEquals("[l.1,c.40] Only one \"name\" element can exist in a /tableset/schema!", e.getMessage()); + }finally{ + close(reader); + fkeys.clear(); + } + + // Test with a additional node: + reader = buildReader("PublicSchemapublicblabla"); + reader.next(); // START ELEMENT("schema") + s = parser.parseSchema(reader, fkeys); + assertEquals("PublicSchema", s.getADQLName()); + assertNull(s.getDescription()); + assertNull(s.getUtype()); + assertNull(s.getTitle()); + assertEquals(0, s.getNbTables()); + close(reader); + fkeys.clear(); + + }catch(Exception e){ + e.printStackTrace(); + if (e instanceof XMLStreamException) + fail("Unexpected error while reading the XML content: " + e.getMessage()); + else + fail("Unexpected error: " + e.getMessage()); + }finally{ + close(reader); + fkeys.clear(); + } + } + +} diff --git a/test/tap/parameters/ServiceConnectionOfTest.java b/test/tap/parameters/ServiceConnectionOfTest.java new file mode 100644 index 0000000000000000000000000000000000000000..029c05663961d7783d6a1c5f6b37727cf35acec0 --- /dev/null +++ b/test/tap/parameters/ServiceConnectionOfTest.java @@ -0,0 +1,177 @@ +package tap.parameters; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Iterator; +import java.util.List; + +import tap.ServiceConnection; +import tap.TAPFactory; +import tap.TAPJob; +import tap.formatter.FITSFormat; +import tap.formatter.OutputFormat; +import tap.formatter.SVFormat; +import tap.formatter.VOTableFormat; +import tap.log.TAPLog; +import tap.metadata.TAPMetadata; +import uws.service.UserIdentifier; +import uws.service.file.UWSFileManager; +import adql.db.FunctionDef; + +public class ServiceConnectionOfTest implements ServiceConnection { + + private boolean available = true; + private String availability = "TAP Service available!"; + private int[] retentionPeriod = new int[]{-1,-1}; + private int[] executionDuration = new int[]{(int)TAPJob.UNLIMITED_DURATION,(int)TAPJob.UNLIMITED_DURATION}; + private int[] outputLimit = new int[]{TAPJob.UNLIMITED_MAX_REC,TAPJob.UNLIMITED_MAX_REC}; + private List outputFormats = Arrays.asList(new OutputFormat[]{new VOTableFormat(this),new SVFormat(this, SVFormat.COMMA_SEPARATOR),new FITSFormat(this)}); + + @Override + public String getProviderName(){ + return null; + } + + @Override + public String getProviderDescription(){ + return null; + } + + @Override + public boolean isAvailable(){ + return available; + } + + @Override + public void setAvailable(boolean isAvailable, String message){ + available = isAvailable; + if (message != null) + availability = message; + else + availability = (isAvailable ? "TAP Service available!" : "TAP Service momentarily UNavailable!"); + } + + @Override + public String getAvailability(){ + return availability; + } + + @Override + public int[] getRetentionPeriod(){ + return retentionPeriod; + } + + public void setRetentionPeriod(final int defaultVal, final int maxVal){ + retentionPeriod[0] = defaultVal; + retentionPeriod[1] = maxVal; + } + + @Override + public int[] getExecutionDuration(){ + return executionDuration; + } + + public void setExecutionDuration(final int defaultVal, final int maxVal){ + executionDuration[0] = defaultVal; + executionDuration[1] = maxVal; + } + + @Override + public int[] getOutputLimit(){ + return outputLimit; + } + + public void setOutputLimit(final int defaultVal, final int maxVal){ + outputLimit[0] = defaultVal; + outputLimit[1] = maxVal; + } + + @Override + public LimitUnit[] getOutputLimitType(){ + return new LimitUnit[]{LimitUnit.rows,LimitUnit.rows}; + } + + @Override + public UserIdentifier getUserIdentifier(){ + return null; + } + + @Override + public boolean uploadEnabled(){ + return false; + } + + @Override + public int[] getUploadLimit(){ + return null; + } + + @Override + public LimitUnit[] getUploadLimitType(){ + return null; + } + + @Override + public int getMaxUploadSize(){ + return 0; + } + + @Override + public TAPMetadata getTAPMetadata(){ + return null; + } + + @Override + public Collection getCoordinateSystems(){ + return null; + } + + @Override + public Collection getGeometries(){ + return null; + } + + @Override + public Collection getUDFs(){ + return null; + } + + @Override + public int getNbMaxAsyncJobs(){ + return 0; + } + + @Override + public TAPLog getLogger(){ + return null; + } + + @Override + public TAPFactory getFactory(){ + return null; + } + + @Override + public UWSFileManager getFileManager(){ + return null; + } + + @Override + public Iterator getOutputFormats(){ + return outputFormats.iterator(); + } + + @Override + public OutputFormat getOutputFormat(String mimeOrAlias){ + for(OutputFormat f : outputFormats) + if (f.getMimeType().equalsIgnoreCase(mimeOrAlias) || f.getShortMimeType().equalsIgnoreCase(mimeOrAlias)) + return f; + return null; + } + + @Override + public int[] getFetchSize(){ + return null; + } + +} \ No newline at end of file diff --git a/test/tap/parameters/TestFormatController.java b/test/tap/parameters/TestFormatController.java new file mode 100644 index 0000000000000000000000000000000000000000..50d0bb4cd8a8e46204aa8b191ce1d43adbe6ea87 --- /dev/null +++ b/test/tap/parameters/TestFormatController.java @@ -0,0 +1,89 @@ +package tap.parameters; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import tap.TAPJob; +import uws.UWSException; + +public class TestFormatController { + + @BeforeClass + public static void setUpBeforeClass() throws Exception{} + + @AfterClass + public static void tearDownAfterClass() throws Exception{} + + @Before + public void setUp() throws Exception{} + + @After + public void tearDown() throws Exception{} + + @Test + public void testCheck(){ + ServiceConnectionOfTest service = new ServiceConnectionOfTest(); + FormatController controller = new FormatController(service); + + try{ + assertEquals(controller.getDefault(), controller.check(null)); + assertEquals(controller.getDefault(), controller.check("")); + assertEquals(controller.getDefault(), controller.check(" ")); + assertEquals(controller.getDefault(), controller.check(" ")); + assertEquals(controller.getDefault(), controller.check(" ")); + assertEquals("votable", controller.check("votable")); + assertEquals("application/x-votable+xml", controller.check("application/x-votable+xml")); + assertEquals("csv", controller.check("csv")); + assertEquals("fits", controller.check("fits")); + }catch(Exception ex){ + ex.printStackTrace(); + fail(); + } + + try{ + controller.check("toto"); + }catch(Exception ex){ + assertTrue(ex instanceof UWSException); + assertTrue(ex.getMessage().startsWith("Unknown value for the parameter \"format\": \"toto\". It should be ")); + } + + try{ + controller.check("application/xml"); + }catch(Exception ex){ + assertTrue(ex instanceof UWSException); + assertTrue(ex.getMessage().startsWith("Unknown value for the parameter \"format\": \"application/xml\". It should be ")); + } + } + + @Test + public void testGetDefault(){ + ServiceConnectionOfTest service = new ServiceConnectionOfTest(); + FormatController controller = new FormatController(service); + + assertEquals(TAPJob.FORMAT_VOTABLE, controller.getDefault()); + } + + @Test + public void testAllowModification(){ + ServiceConnectionOfTest service = new ServiceConnectionOfTest(); + FormatController controller = new FormatController(service); + + // By default, user modification of the destruction time is allowed: + assertTrue(controller.allowModification()); + + controller.allowModification(true); + assertTrue(controller.allowModification()); + + controller.allowModification(false); + assertFalse(controller.allowModification()); + } + +} diff --git a/test/tap/parameters/TestMaxRecController.java b/test/tap/parameters/TestMaxRecController.java new file mode 100644 index 0000000000000000000000000000000000000000..613a2f92c04b7ecd38485d5050a0fef352303523 --- /dev/null +++ b/test/tap/parameters/TestMaxRecController.java @@ -0,0 +1,142 @@ +package tap.parameters; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import tap.TAPJob; + +public class TestMaxRecController { + + @BeforeClass + public static void setUpBeforeClass() throws Exception{} + + @AfterClass + public static void tearDownAfterClass() throws Exception{} + + @Before + public void setUp() throws Exception{} + + @After + public void tearDown() throws Exception{} + + @Test + public void testCheck(){ + ServiceConnectionOfTest service = new ServiceConnectionOfTest(); + MaxRecController controller = new MaxRecController(service); + + try{ + // A NULL limit will always return an unlimited duration: + assertEquals(TAPJob.UNLIMITED_MAX_REC, controller.check(null)); + assertEquals(TAPJob.UNLIMITED_MAX_REC, controller.check(-1)); + assertEquals(TAPJob.UNLIMITED_MAX_REC, controller.check(-123)); + + // A 0 value, means that only the metadata of the result must be returned (without executing the query); + // this value should stay like that: + assertEquals(0, controller.check(0)); + + // By default, the controller has no limit on the output limit, so let's try with a limit of 1000000 rows: + assertEquals(1000000, controller.check(1000000)); + + // With just a default output limit (of 100 rows): + service.setOutputLimit(100, -1); + assertEquals(100, controller.check(null)); + assertEquals(0, controller.check(0)); + assertEquals(TAPJob.UNLIMITED_MAX_REC, controller.check(-1)); + assertEquals(TAPJob.UNLIMITED_MAX_REC, controller.check(TAPJob.UNLIMITED_MAX_REC)); + + // With just a maximum output limit (of 10000 rows): + service.setOutputLimit(-1, 10000); + assertEquals(10000, controller.check(null)); + assertEquals(0, controller.check(0)); + assertEquals(60, controller.check(60)); + assertEquals(10000, controller.check(-1)); + assertEquals(10000, controller.check(TAPJob.UNLIMITED_MAX_REC)); + assertEquals(10000, controller.check(10001)); + + // With a default (100 rows) AND a maximum (10000 rows) output limit: + service.setOutputLimit(100, 10000); + assertEquals(100, controller.check(null)); + assertEquals(0, controller.check(0)); + assertEquals(10, controller.check(10)); + assertEquals(600, controller.check(600)); + assertEquals(10000, controller.check(10000)); + assertEquals(10000, controller.check(-1)); + assertEquals(10000, controller.check(TAPJob.UNLIMITED_MAX_REC)); + assertEquals(10000, controller.check(10001)); + + }catch(Exception t){ + t.printStackTrace(); + fail(); + } + } + + @Test + public void testGetDefault(){ + ServiceConnectionOfTest service = new ServiceConnectionOfTest(); + MaxRecController controller = new MaxRecController(service); + + // By default, when nothing is set, the default output limit is UNLIMITED: + assertEquals(TAPJob.UNLIMITED_MAX_REC, controller.getDefault()); + + // With no duration, the default output limit should remain UNLIMITED: + service.setOutputLimit(TAPJob.UNLIMITED_MAX_REC, -1); + assertEquals(TAPJob.UNLIMITED_MAX_REC, controller.getDefault()); + + // With a negative limit, the output limit should also be UNLIMITED: + service.setOutputLimit(-1, -1); + assertEquals(TAPJob.UNLIMITED_MAX_REC, controller.getDefault()); + + // With an output limit of 100 rows: + service.setOutputLimit(100, -1); + assertEquals(100, controller.getDefault()); + + // The default value must always be less than the maximum value: + service.setOutputLimit(600, 300); + assertEquals(300, controller.getDefault()); + } + + @Test + public void testGetMaxExecutionDuration(){ + ServiceConnectionOfTest service = new ServiceConnectionOfTest(); + MaxRecController controller = new MaxRecController(service); + + // By default, when nothing is set, the maximum output limit is UNLIMITED: + assertEquals(TAPJob.UNLIMITED_MAX_REC, controller.getMaxOutputLimit()); + + // With no duration, the maximum output limit should remain UNLIMITED: + service.setOutputLimit(-1, TAPJob.UNLIMITED_MAX_REC); + assertEquals(TAPJob.UNLIMITED_MAX_REC, controller.getMaxOutputLimit()); + + // With a negative limit, the output limit should also be UNLIMITED: + service.setOutputLimit(-1, -1); + assertEquals(TAPJob.UNLIMITED_MAX_REC, controller.getMaxOutputLimit()); + + // With an output limit of 10000 rows: + service.setOutputLimit(-1, 10000); + assertEquals(10000, controller.getMaxOutputLimit()); + } + + @Test + public void testAllowModification(){ + ServiceConnectionOfTest service = new ServiceConnectionOfTest(); + MaxRecController controller = new MaxRecController(service); + + // By default, user modification of the destruction time is allowed: + assertTrue(controller.allowModification()); + + controller.allowModification(true); + assertTrue(controller.allowModification()); + + controller.allowModification(false); + assertFalse(controller.allowModification()); + } + +} diff --git a/test/tap/parameters/TestTAPDestructionTimeController.java b/test/tap/parameters/TestTAPDestructionTimeController.java new file mode 100644 index 0000000000000000000000000000000000000000..73453af6d9e94e8741f848ee1a706cdd8c60a754 --- /dev/null +++ b/test/tap/parameters/TestTAPDestructionTimeController.java @@ -0,0 +1,187 @@ +package tap.parameters; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.util.Calendar; +import java.util.Date; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import uws.ISO8601Format; + +public class TestTAPDestructionTimeController { + + @BeforeClass + public static void setUpBeforeClass() throws Exception{} + + @AfterClass + public static void tearDownAfterClass() throws Exception{} + + @Before + public void setUp() throws Exception{} + + @After + public void tearDown() throws Exception{} + + @Test + public void testCheck(){ + ServiceConnectionOfTest service = new ServiceConnectionOfTest(); + TAPDestructionTimeController controller = new TAPDestructionTimeController(service); + Calendar calendar = Calendar.getInstance(); + + try{ + // A NULL destruction time will always return NULL: + assertNull(controller.check(null)); + + // By default, the controller has no limit on the destruction time, so let's try with a destruction in 100 years: + calendar.add(Calendar.YEAR, 100); + checkDate(calendar.getTime(), controller.check(calendar.getTime())); + checkDate(calendar.getTime(), controller.check(ISO8601Format.format(calendar.getTimeInMillis()))); + + // With just a default destruction time (of 10 minutes): + service.setRetentionPeriod(600, -1); + Calendar defaultTime = Calendar.getInstance(); + defaultTime.add(Calendar.MINUTE, 10); + checkDate(defaultTime.getTime(), controller.check(null)); + checkDate(calendar.getTime(), controller.check(calendar.getTime())); + + // With just a maximum destruction time (of 1 hour): + service.setRetentionPeriod(0, 3600); + Calendar maxTime = Calendar.getInstance(); + maxTime.add(Calendar.HOUR, 1); + checkDate(maxTime.getTime(), controller.check(null)); + checkDate(defaultTime.getTime(), controller.check(defaultTime.getTime())); + checkDate(maxTime.getTime(), controller.check(calendar.getTime())); + + // With a default (10 minutes) AND a maximum (1 hour) destruction time: + service.setRetentionPeriod(600, 3600); + checkDate(defaultTime.getTime(), controller.check(null)); + checkDate(maxTime.getTime(), controller.check(calendar.getTime())); + calendar = Calendar.getInstance(); + calendar.add(Calendar.MINUTE, 30); + checkDate(calendar.getTime(), controller.check(calendar.getTime())); + + }catch(Exception t){ + t.printStackTrace(); + fail(); + } + } + + @Test + public void testGetDefault(){ + ServiceConnectionOfTest service = new ServiceConnectionOfTest(); + TAPDestructionTimeController controller = new TAPDestructionTimeController(service); + + // By default, when nothing is set, the default destruction time is NULL (the job will never be destroyed): + assertNull(controller.getDefault()); + + // With no interval, the default destruction time should remain NULL (the job will never be destroyed): + service.setRetentionPeriod(0, -1); + assertNull(controller.getDefault()); + + // With a negative interval, the destruction time should also be NULL: + service.setRetentionPeriod(-1, -1); + assertNull(controller.getDefault()); + + // With a destruction interval of 100 minutes: + Calendar calendar = Calendar.getInstance(); + service.setRetentionPeriod(6000, -1); + calendar.add(Calendar.MINUTE, 100); + checkDate(calendar.getTime(), controller.getDefault()); + + // With a destruction interval of 100 seconds: + service.setRetentionPeriod(100, -1); + calendar = Calendar.getInstance(); + calendar.add(Calendar.SECOND, 100); + checkDate(calendar.getTime(), controller.getDefault()); + + // With a destruction interval of 1 week: + service.setRetentionPeriod(7 * 24 * 3600, -1); + calendar = Calendar.getInstance(); + calendar.add(Calendar.DAY_OF_MONTH, 7); + checkDate(calendar.getTime(), controller.getDefault()); + } + + @Test + public void testGetMaxDestructionTime(){ + ServiceConnectionOfTest service = new ServiceConnectionOfTest(); + TAPDestructionTimeController controller = new TAPDestructionTimeController(service); + + // By default, when nothing is set, the maximum destruction time is NULL (the job will never be destroyed): + assertNull(controller.getMaxDestructionTime()); + + // With no interval, the maximum destruction time should remain NULL (the job will never be destroyed): + service.setRetentionPeriod(-1, 0); + assertNull(controller.getMaxDestructionTime()); + + // With a negative interval, the destruction time should also be NULL: + service.setRetentionPeriod(-1, -1); + assertNull(controller.getMaxDestructionTime()); + + // With a destruction interval of 100 minutes: + Calendar calendar = Calendar.getInstance(); + service.setRetentionPeriod(-1, 6000); + calendar.add(Calendar.MINUTE, 100); + checkDate(calendar.getTime(), controller.getMaxDestructionTime()); + + // With a destruction interval of 100 seconds: + service.setRetentionPeriod(-1, 100); + calendar = Calendar.getInstance(); + calendar.add(Calendar.SECOND, 100); + checkDate(calendar.getTime(), controller.getMaxDestructionTime()); + + // With a destruction interval of 1 week: + service.setRetentionPeriod(-1, 7 * 24 * 3600); + calendar = Calendar.getInstance(); + calendar.add(Calendar.DAY_OF_MONTH, 7); + checkDate(calendar.getTime(), controller.getMaxDestructionTime()); + } + + @Test + public void testAllowModification(){ + ServiceConnectionOfTest service = new ServiceConnectionOfTest(); + TAPDestructionTimeController controller = new TAPDestructionTimeController(service); + + // By default, user modification of the destruction time is allowed: + assertTrue(controller.allowModification()); + + controller.allowModification(true); + assertTrue(controller.allowModification()); + + controller.allowModification(false); + assertFalse(controller.allowModification()); + } + + private void checkDate(final Date expected, final Object val){ + assertTrue(val instanceof Date); + + if (expected != null && val != null){ + Calendar cexpected = Calendar.getInstance(), cval = Calendar.getInstance(); + cexpected.setTime(expected); + cval.setTime((Date)val); + + try{ + assertEquals(cexpected.get(Calendar.DAY_OF_MONTH), cval.get(Calendar.DAY_OF_MONTH)); + assertEquals(cexpected.get(Calendar.MONTH), cval.get(Calendar.MONTH)); + assertEquals(cexpected.get(Calendar.YEAR), cval.get(Calendar.YEAR)); + assertEquals(cexpected.get(Calendar.HOUR), cval.get(Calendar.HOUR)); + assertEquals(cexpected.get(Calendar.MINUTE), cval.get(Calendar.MINUTE)); + assertEquals(cexpected.get(Calendar.SECOND), cval.get(Calendar.SECOND)); + }catch(AssertionError e){ + fail("Expected <" + expected + "> but was <" + val + ">"); + } + }else if (expected == null && val == null) + return; + else + fail("Expected <" + expected + "> but was <" + val + ">"); + } + +} diff --git a/test/tap/parameters/TestTAPExecutionDurationController.java b/test/tap/parameters/TestTAPExecutionDurationController.java new file mode 100644 index 0000000000000000000000000000000000000000..446c58e07098072a29e7fbcea479bf2c5545e68e --- /dev/null +++ b/test/tap/parameters/TestTAPExecutionDurationController.java @@ -0,0 +1,136 @@ +package tap.parameters; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import tap.TAPJob; + +public class TestTAPExecutionDurationController { + + @BeforeClass + public static void setUpBeforeClass() throws Exception{} + + @AfterClass + public static void tearDownAfterClass() throws Exception{} + + @Before + public void setUp() throws Exception{} + + @After + public void tearDown() throws Exception{} + + @Test + public void testCheck(){ + ServiceConnectionOfTest service = new ServiceConnectionOfTest(); + TAPExecutionDurationController controller = new TAPExecutionDurationController(service); + + try{ + // A NULL duration will always return an unlimited duration: + assertEquals(TAPJob.UNLIMITED_DURATION, controller.check(null)); + assertEquals(TAPJob.UNLIMITED_DURATION, controller.check(0)); + assertEquals(TAPJob.UNLIMITED_DURATION, controller.check(-1)); + assertEquals(TAPJob.UNLIMITED_DURATION, controller.check(-123)); + + // By default, the controller has no limit on the execution duration, so let's try with a duration of 1e6 seconds: + assertEquals(1000000L, controller.check(1000000)); + + // With just a default execution duration (of 10 minutes): + service.setExecutionDuration(600, -1); + assertEquals(600L, controller.check(null)); + assertEquals(TAPJob.UNLIMITED_DURATION, controller.check(-1)); + assertEquals(TAPJob.UNLIMITED_DURATION, controller.check(TAPJob.UNLIMITED_DURATION)); + + // With just a maximum execution duration (of 1 hour): + service.setExecutionDuration(-1, 3600); + assertEquals(3600L, controller.check(null)); + assertEquals(60L, controller.check(60)); + assertEquals(3600L, controller.check(-1)); + assertEquals(3600L, controller.check(TAPJob.UNLIMITED_DURATION)); + assertEquals(3600L, controller.check(3601)); + + // With a default (10 minutes) AND a maximum (1 hour) execution duration: + service.setExecutionDuration(600, 3600); + assertEquals(600L, controller.check(null)); + assertEquals(10L, controller.check(10)); + assertEquals(600L, controller.check(600)); + assertEquals(3600L, controller.check(3600)); + assertEquals(3600L, controller.check(-1)); + assertEquals(3600L, controller.check(TAPJob.UNLIMITED_DURATION)); + assertEquals(3600L, controller.check(3601)); + + }catch(Exception t){ + t.printStackTrace(); + fail(); + } + } + + @Test + public void testGetDefault(){ + ServiceConnectionOfTest service = new ServiceConnectionOfTest(); + TAPExecutionDurationController controller = new TAPExecutionDurationController(service); + + // By default, when nothing is set, the default execution duration is UNLIMITED: + assertEquals(TAPJob.UNLIMITED_DURATION, controller.getDefault()); + + // With no duration, the default execution duration should remain UNLIMITED: + service.setExecutionDuration((int)TAPJob.UNLIMITED_DURATION, -1); + assertEquals(TAPJob.UNLIMITED_DURATION, controller.getDefault()); + + // With a negative duration, the execution duration should also be UNLIMITED: + service.setExecutionDuration(-1, -1); + assertEquals(TAPJob.UNLIMITED_DURATION, controller.getDefault()); + + // With an execution duration of 10 minutes: + service.setExecutionDuration(600, -1); + assertEquals(600L, controller.getDefault()); + + // The default value must always be less than the maximum value: + service.setExecutionDuration(600, 300); + assertEquals(300L, controller.getDefault()); + } + + @Test + public void testGetMaxExecutionDuration(){ + ServiceConnectionOfTest service = new ServiceConnectionOfTest(); + TAPExecutionDurationController controller = new TAPExecutionDurationController(service); + + // By default, when nothing is set, the maximum execution duration is UNLIMITED: + assertEquals(TAPJob.UNLIMITED_DURATION, controller.getMaxDuration()); + + // With no duration, the maximum execution duration should remain UNLIMITED: + service.setExecutionDuration(-1, (int)TAPJob.UNLIMITED_DURATION); + assertEquals(TAPJob.UNLIMITED_DURATION, controller.getMaxDuration()); + + // With a negative duration, the execution duration should also be UNLIMITED: + service.setExecutionDuration(-1, -1); + assertEquals(TAPJob.UNLIMITED_DURATION, controller.getMaxDuration()); + + // With an execution duration of 10 minutes: + service.setExecutionDuration(-1, 600); + assertEquals(600L, controller.getMaxDuration()); + } + + @Test + public void testAllowModification(){ + ServiceConnectionOfTest service = new ServiceConnectionOfTest(); + TAPExecutionDurationController controller = new TAPExecutionDurationController(service); + + // By default, user modification of the execution duration is allowed: + assertTrue(controller.allowModification()); + + controller.allowModification(true); + assertTrue(controller.allowModification()); + + controller.allowModification(false); + assertFalse(controller.allowModification()); + } + +} diff --git a/test/testtools/CommandExecute.java b/test/testtools/CommandExecute.java new file mode 100644 index 0000000000000000000000000000000000000000..2e78978ae39eaa6d0c0a67d007eb8936d8c4ea28 --- /dev/null +++ b/test/testtools/CommandExecute.java @@ -0,0 +1,51 @@ +package testtools; + +import java.io.BufferedReader; +import java.io.InputStreamReader; + +/** + * Let's execute any shell command (even with pipes and redirections). + * + * @author Grégory Mantelet (ARI) + * @version 2.0 (09/2014) + */ +public final class CommandExecute { + + /** + * SINGLETON CLASS. + * No instance of this class can be created. + */ + private CommandExecute(){} + + /** + * Execute the given command (which may include pipe(s) and/or redirection(s)). + * + * @param command Command to execute in the shell. + * + * @return The string returned by the execution of the command. + */ + public final static String execute(final String command){ + + String[] shellCmd = new String[]{"/bin/sh","-c",command}; + + StringBuffer output = new StringBuffer(); + + Process p; + try{ + p = Runtime.getRuntime().exec(shellCmd); + p.waitFor(); + BufferedReader reader = new BufferedReader(new InputStreamReader(p.getInputStream())); + + String line = ""; + while((line = reader.readLine()) != null){ + output.append(line + "\n"); + } + + }catch(Exception e){ + e.printStackTrace(); + } + + return output.toString(); + + } +} diff --git a/test/testtools/DBTools.java b/test/testtools/DBTools.java new file mode 100644 index 0000000000000000000000000000000000000000..ba542f9ad6d85bea6b96a7de22e9813e620d9ed8 --- /dev/null +++ b/test/testtools/DBTools.java @@ -0,0 +1,136 @@ +package testtools; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.HashMap; + +public final class DBTools { + + public static int count = 0; + + public final static void main(final String[] args) throws Throwable{ + for(int i = 0; i < 3; i++){ + Thread t = new Thread(new Runnable(){ + @Override + public void run(){ + count++; + try{ + Connection conn = DBTools.createConnection("postgresql", "127.0.0.1", null, "gmantele", "gmantele", "pwd"); + System.out.println("Start - " + count + ": "); + String query = "SELECT * FROM gums.smc WHERE magg BETWEEN " + (15 + count) + " AND " + (20 + count) + ";"; + System.out.println(query); + ResultSet rs = DBTools.select(conn, query); + try{ + rs.last(); + System.out.println("Nb rows: " + rs.getRow()); + }catch(SQLException e){ + e.printStackTrace(); + } + if (DBTools.closeConnection(conn)) + System.out.println("[DEBUG] Connection closed!"); + }catch(DBToolsException e){ + e.printStackTrace(); + } + System.out.println("End - " + count); + count--; + } + }); + t.start(); + } + } + + public static class DBToolsException extends Exception { + + private static final long serialVersionUID = 1L; + + public DBToolsException(){ + super(); + } + + public DBToolsException(String message, Throwable cause){ + super(message, cause); + } + + public DBToolsException(String message){ + super(message); + } + + public DBToolsException(Throwable cause){ + super(cause); + } + + } + + public final static HashMap VALUE_JDBC_DRIVERS = new HashMap(4); + static{ + VALUE_JDBC_DRIVERS.put("oracle", "oracle.jdbc.OracleDriver"); + VALUE_JDBC_DRIVERS.put("postgresql", "org.postgresql.Driver"); + VALUE_JDBC_DRIVERS.put("mysql", "com.mysql.jdbc.Driver"); + VALUE_JDBC_DRIVERS.put("sqlite", "org.sqlite.JDBC"); + } + + private DBTools(){} + + public final static Connection createConnection(String dbms, final String server, final String port, final String dbName, final String user, final String passwd) throws DBToolsException{ + // 1. Resolve the DBMS and get its JDBC driver: + if (dbms == null) + throw new DBToolsException("Missing DBMS (expected: oracle, postgresql, mysql or sqlite)!"); + dbms = dbms.toLowerCase(); + String jdbcDriver = VALUE_JDBC_DRIVERS.get(dbms); + if (jdbcDriver == null) + throw new DBToolsException("Unknown DBMS (\"" + dbms + "\")!"); + + // 2. Load the JDBC driver: + try{ + Class.forName(jdbcDriver); + }catch(ClassNotFoundException e){ + throw new DBToolsException("Impossible to load the JDBC driver: " + e.getMessage(), e); + } + + // 3. Establish the connection: + Connection connection = null; + try{ + connection = DriverManager.getConnection("jdbc:" + dbms + ":" + ((server != null && server.trim().length() > 0) ? "//" + server + ((port != null && port.trim().length() > 0) ? (":" + port) : "") + "/" : "") + dbName, user, passwd); + }catch(SQLException e){ + throw new DBToolsException("Connection failed: " + e.getMessage(), e); + } + + if (connection == null) + throw new DBToolsException("Failed to make connection!"); + + return connection; + } + + public final static boolean closeConnection(final Connection conn) throws DBToolsException{ + try{ + if (conn != null && !conn.isClosed()){ + conn.close(); + try{ + Thread.sleep(200); + }catch(InterruptedException e){ + System.err.println("WARNING: can't wait/sleep before testing the connection close status! [" + e.getMessage() + "]"); + } + return conn.isClosed(); + }else + return true; + }catch(SQLException e){ + throw new DBToolsException("Closing connection failed: " + e.getMessage(), e); + } + } + + public final static ResultSet select(final Connection conn, final String selectQuery) throws DBToolsException{ + if (conn == null || selectQuery == null || selectQuery.trim().length() == 0) + throw new DBToolsException("One parameter is missing!"); + + try{ + Statement stmt = conn.createStatement(); + return stmt.executeQuery(selectQuery); + }catch(SQLException e){ + throw new DBToolsException("Can't execute the given SQL query: " + e.getMessage(), e); + } + } + +} diff --git a/test/testtools/MD5Checksum.java b/test/testtools/MD5Checksum.java new file mode 100644 index 0000000000000000000000000000000000000000..d4941c53febea6ff6cc35e94647ecdf6e8d4d373 --- /dev/null +++ b/test/testtools/MD5Checksum.java @@ -0,0 +1,46 @@ +package testtools; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.security.MessageDigest; + +public class MD5Checksum { + + public static byte[] createChecksum(InputStream input) throws Exception{ + byte[] buffer = new byte[1024]; + MessageDigest complete = MessageDigest.getInstance("MD5"); + int numRead; + + do{ + numRead = input.read(buffer); + if (numRead > 0){ + complete.update(buffer, 0, numRead); + } + }while(numRead != -1); + return complete.digest(); + } + + // see this How-to for a faster way to convert + // a byte array to a HEX string + public static String getMD5Checksum(InputStream input) throws Exception{ + byte[] b = createChecksum(input); + String result = ""; + + for(int i = 0; i < b.length; i++){ + result += Integer.toString((b[i] & 0xff) + 0x100, 16).substring(1); + } + return result; + } + + public static String getMD5Checksum(final String content) throws Exception{ + return getMD5Checksum(new ByteArrayInputStream(content.getBytes())); + } + + public static void main(String args[]){ + try{ + System.out.println(getMD5Checksum("Blabla et Super blabla")); + }catch(Exception e){ + e.printStackTrace(); + } + } +} diff --git a/test/uws/TestISO8601Format.java b/test/uws/TestISO8601Format.java new file mode 100644 index 0000000000000000000000000000000000000000..22c9ee920c8fd0585a9dae1b0df7a7db60cbced5 --- /dev/null +++ b/test/uws/TestISO8601Format.java @@ -0,0 +1,163 @@ +package uws; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +import java.text.ParseException; +import java.util.TimeZone; + +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class TestISO8601Format { + + private final long date = 1411737870325L; // Fri Sep 26 15:24:30 CEST 2014 = 2014-09-26T15:24:30.325+02:00 = 1411737870325 ms + private final long dateAlone = 1411689600000L; + + private final long oldDate = -3506029200000L; // Thu Nov 25 00:00:00 CET 1858 = 1858-11-25T00:00:00+01:00 = -3506029200000 ms + + private static boolean displayMS; + private static boolean displayTZ; + private static String targetTZ = null; + + @BeforeClass + public static void setUpBeforeClass() throws Exception{ + displayMS = ISO8601Format.displayMilliseconds; + displayTZ = ISO8601Format.displayTimeZone; + targetTZ = ISO8601Format.targetTimeZone; + } + + @Before + public void setUp() throws Exception{ + ISO8601Format.displayMilliseconds = false; + ISO8601Format.displayTimeZone = true; + ISO8601Format.targetTimeZone = "Europe/Berlin"; + } + + @AfterClass + public static void tearDownAfterClass() throws Exception{ + ISO8601Format.displayMilliseconds = displayMS; + ISO8601Format.displayTimeZone = displayTZ; + ISO8601Format.targetTimeZone = targetTZ; + } + + @Test + public void testFormatDate(){ + // Special case: reference for the millisecond representation of dates (1st January 1970): + assertEquals("1970-01-01T01:00:00+01:00", ISO8601Format.format(0)); + assertEquals("1970-01-01T00:00:00Z", ISO8601Format.formatInUTC(0)); + + // Special case: old date (25th November 1858): + assertEquals("1858-11-25T00:00:00+01:00", ISO8601Format.format(oldDate)); + assertEquals("1858-11-24T23:00:00Z", ISO8601Format.formatInUTC(oldDate)); + + // Tests of: FORMAT(Date) && FORMAT(Date, boolean withTimestamp): + assertEquals("2014-09-26T15:24:30+02:00", ISO8601Format.format(date)); + assertEquals(ISO8601Format.format(date), ISO8601Format.format(date, true)); + assertEquals("2014-09-26T15:24:30", ISO8601Format.format(date, false)); + + // Tests of: FORMAT_IN_UTC(Date) && FORMAT_IN_UTC(Date, boolean withTimestamp): + assertEquals("2014-09-26T13:24:30Z", ISO8601Format.formatInUTC(date)); + assertEquals(ISO8601Format.formatInUTC(date), ISO8601Format.formatInUTC(date, true)); + assertEquals("2014-09-26T13:24:30", ISO8601Format.formatInUTC(date, false)); + + // Test with a different time zone: + assertEquals("2014-09-26T17:24:30+04:00", ISO8601Format.format(date, "Indian/Reunion", true, false)); + + // Test with no specified different time zone (the chosen time zone should be the local one): + assertEquals(ISO8601Format.format(date, TimeZone.getDefault().getID(), true, false), ISO8601Format.format(date, null, true, false)); + + // Test with display of milliseconds: + assertEquals("2014-09-26T15:24:30.325+02:00", ISO8601Format.format(date, null, true, true)); + assertEquals("2014-09-26T15:24:30.325", ISO8601Format.format(date, null, false, true)); + + // Same tests but in the UTC time zone: + assertEquals("2014-09-26T13:24:30.325Z", ISO8601Format.format(date, "UTC", true, true)); + assertEquals("2014-09-26T13:24:30.325", ISO8601Format.format(date, "UTC", false, true)); + } + + @Test + public void testParse(){ + // Special case: NULL + try{ + ISO8601Format.parse(null); + fail("Parse can not theoretically work without a string"); + }catch(Throwable t){ + assertEquals(NullPointerException.class, t.getClass()); + } + + // Special case: "" + try{ + ISO8601Format.parse(""); + fail("Parse can not theoretically work without a non-empty string"); + }catch(Throwable t){ + assertEquals(ParseException.class, t.getClass()); + assertEquals("Invalid date format: \"\"! An ISO8601 date was expected.", t.getMessage()); + } + + // Special case: anything stupid rather than a valid date + try{ + ISO8601Format.parse("stupid thing"); + fail("Parse can not theoretically work without a valid string date"); + }catch(Throwable t){ + assertEquals(ParseException.class, t.getClass()); + assertEquals("Invalid date format: \"stupid thing\"! An ISO8601 date was expected.", t.getMessage()); + } + + try{ + // Special case: reference for the millisecond representation of dates (1st January 1970): + assertEquals(0, ISO8601Format.parse("1970-01-01T01:00:00+01:00")); + assertEquals(0, ISO8601Format.parse("1970-01-01T00:00:00Z")); + + // Special case: old date (25th November 1858): + assertEquals(oldDate, ISO8601Format.parse("1858-11-25T00:00:00+01:00")); + assertEquals(oldDate, ISO8601Format.parse("1858-11-24T23:00:00Z")); + + // Test with a perfectly valid date in ISO8601: + assertEquals(dateAlone, ISO8601Format.parse("2014-09-26")); + assertEquals(date, ISO8601Format.parse("2014-09-26T15:24:30.325+02:00")); + assertEquals(date - 325, ISO8601Format.parse("2014-09-26T15:24:30+02:00")); + + // Test with Z as time zone (UTC): + assertEquals(date, ISO8601Format.parse("2014-09-26T13:24:30.325Z")); + assertEquals(date - 325, ISO8601Format.parse("2014-09-26T13:24:30Z")); + + // If no time zone is specified, the local one should be used: + assertEquals(date, ISO8601Format.parse("2014-09-26T13:24:30.325")); + assertEquals(date - 325, ISO8601Format.parse("2014-09-26T13:24:30")); + + // All the previous tests without the _ between days, month, and years: + assertEquals(0, ISO8601Format.parse("19700101T01:00:00+01:00")); + assertEquals(0, ISO8601Format.parse("19700101T00:00:00Z")); + assertEquals(oldDate, ISO8601Format.parse("18581125T00:00:00+01:00")); + assertEquals(oldDate, ISO8601Format.parse("18581124T23:00:00Z")); + assertEquals(dateAlone, ISO8601Format.parse("20140926")); + assertEquals(date, ISO8601Format.parse("20140926T15:24:30.325+02:00")); + assertEquals(date - 325, ISO8601Format.parse("20140926T15:24:30+02:00")); + assertEquals(date, ISO8601Format.parse("20140926T13:24:30.325Z")); + assertEquals(date - 325, ISO8601Format.parse("20140926T13:24:30Z")); + assertEquals(date, ISO8601Format.parse("20140926T13:24:30.325")); + assertEquals(date - 325, ISO8601Format.parse("20140926T13:24:30")); + + // All the previous tests without the : between hours, minutes and seconds: + assertEquals(0, ISO8601Format.parse("1970-01-01T010000+0100")); + assertEquals(oldDate, ISO8601Format.parse("1858-11-25T000000+0100")); + assertEquals(date, ISO8601Format.parse("2014-09-26T152430.325+0200")); + assertEquals(date - 325, ISO8601Format.parse("2014-09-26T152430+0200")); + + // All the previous tests by replacing the T between date and time by a space: + assertEquals(0, ISO8601Format.parse("1970-01-01 00:00:00Z")); + assertEquals(oldDate, ISO8601Format.parse("1858-11-24 23:00:00Z")); + assertEquals(date, ISO8601Format.parse("2014-09-26 13:24:30.325Z")); + assertEquals(date - 325, ISO8601Format.parse("2014-09-26 13:24:30Z")); + assertEquals(date, ISO8601Format.parse("2014-09-26 13:24:30.325")); + assertEquals(date - 325, ISO8601Format.parse("2014-09-26 13:24:30")); + + }catch(ParseException ex){ + ex.printStackTrace(System.err); + } + } + +} diff --git a/test/uws/job/parameters/TestDestructionTimeController.java b/test/uws/job/parameters/TestDestructionTimeController.java new file mode 100644 index 0000000000000000000000000000000000000000..a50d5eb1e40297c803e82cbc1da04c065520a243 --- /dev/null +++ b/test/uws/job/parameters/TestDestructionTimeController.java @@ -0,0 +1,186 @@ +package uws.job.parameters; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.util.Calendar; +import java.util.Date; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import uws.ISO8601Format; +import uws.job.parameters.DestructionTimeController.DateField; + +public class TestDestructionTimeController { + + @BeforeClass + public static void setUpBeforeClass() throws Exception{} + + @AfterClass + public static void tearDownAfterClass() throws Exception{} + + @Before + public void setUp() throws Exception{} + + @After + public void tearDown() throws Exception{} + + @Test + public void testCheck(){ + DestructionTimeController controller = new DestructionTimeController(); + Calendar calendar = Calendar.getInstance(); + + try{ + // A NULL destruction time will always return NULL: + assertNull(controller.check(null)); + + // By default, the controller has no limit on the destruction time, so let's try with a destruction in 100 years: + calendar.add(Calendar.YEAR, 100); + checkDate(calendar.getTime(), controller.check(calendar.getTime())); + checkDate(calendar.getTime(), controller.check(ISO8601Format.format(calendar.getTimeInMillis()))); + + // With just a default destruction time (of 10 minutes): + controller.setDefaultDestructionInterval(10); + Calendar defaultTime = Calendar.getInstance(); + defaultTime.add(Calendar.MINUTE, 10); + checkDate(defaultTime.getTime(), controller.check(null)); + checkDate(calendar.getTime(), controller.check(calendar.getTime())); + + // With just a maximum destruction time (of 1 hour): + controller.setDefaultDestructionInterval(0); + controller.setMaxDestructionInterval(1, DateField.HOUR); + Calendar maxTime = Calendar.getInstance(); + maxTime.add(Calendar.HOUR, 1); + checkDate(maxTime.getTime(), controller.check(null)); + checkDate(defaultTime.getTime(), controller.check(defaultTime.getTime())); + checkDate(maxTime.getTime(), controller.check(calendar.getTime())); + + // With a default (10 minutes) AND a maximum (1 hour) destruction time: + controller.setDefaultDestructionInterval(10); + controller.setMaxDestructionInterval(1, DateField.HOUR); + checkDate(defaultTime.getTime(), controller.check(null)); + checkDate(maxTime.getTime(), controller.check(calendar.getTime())); + calendar = Calendar.getInstance(); + calendar.add(Calendar.MINUTE, 30); + checkDate(calendar.getTime(), controller.check(calendar.getTime())); + + }catch(Exception t){ + t.printStackTrace(); + fail(); + } + } + + @Test + public void testGetDefault(){ + DestructionTimeController controller = new DestructionTimeController(); + + // By default, when nothing is set, the default destruction time is NULL (the job will never be destroyed): + assertNull(controller.getDefault()); + + // With no interval, the default destruction time should remain NULL (the job will never be destroyed): + controller.setDefaultDestructionInterval(DestructionTimeController.NO_INTERVAL); + assertNull(controller.getDefault()); + + // With a negative interval, the destruction time should also be NULL: + controller.setDefaultDestructionInterval(-1); + assertNull(controller.getDefault()); + + // With a destruction interval of 100 minutes: + Calendar calendar = Calendar.getInstance(); + controller.setDefaultDestructionInterval(100); + calendar.add(Calendar.MINUTE, 100); + checkDate(calendar.getTime(), controller.getDefault()); + + // With a destruction interval of 100 seconds: + controller.setDefaultDestructionInterval(100, DateField.SECOND); + calendar = Calendar.getInstance(); + calendar.add(Calendar.SECOND, 100); + checkDate(calendar.getTime(), controller.getDefault()); + + // With a destruction interval of 1 week: + controller.setDefaultDestructionInterval(7, DateField.DAY); + calendar = Calendar.getInstance(); + calendar.add(Calendar.DAY_OF_MONTH, 7); + checkDate(calendar.getTime(), controller.getDefault()); + } + + @Test + public void testGetMaxDestructionTime(){ + DestructionTimeController controller = new DestructionTimeController(); + + // By default, when nothing is set, the maximum destruction time is NULL (the job will never be destroyed): + assertNull(controller.getMaxDestructionTime()); + + // With no interval, the maximum destruction time should remain NULL (the job will never be destroyed): + controller.setMaxDestructionInterval(DestructionTimeController.NO_INTERVAL); + assertNull(controller.getMaxDestructionTime()); + + // With a negative interval, the destruction time should also be NULL: + controller.setMaxDestructionInterval(-1); + assertNull(controller.getMaxDestructionTime()); + + // With a destruction interval of 100 minutes: + Calendar calendar = Calendar.getInstance(); + controller.setMaxDestructionInterval(100); + calendar.add(Calendar.MINUTE, 100); + checkDate(calendar.getTime(), controller.getMaxDestructionTime()); + + // With a destruction interval of 100 seconds: + controller.setMaxDestructionInterval(100, DateField.SECOND); + calendar = Calendar.getInstance(); + calendar.add(Calendar.SECOND, 100); + checkDate(calendar.getTime(), controller.getMaxDestructionTime()); + + // With a destruction interval of 1 week: + controller.setMaxDestructionInterval(7, DateField.DAY); + calendar = Calendar.getInstance(); + calendar.add(Calendar.DAY_OF_MONTH, 7); + checkDate(calendar.getTime(), controller.getMaxDestructionTime()); + } + + @Test + public void testAllowModification(){ + DestructionTimeController controller = new DestructionTimeController(); + + // By default, user modification of the destruction time is allowed: + assertTrue(controller.allowModification()); + + controller.allowModification(true); + assertTrue(controller.allowModification()); + + controller.allowModification(false); + assertFalse(controller.allowModification()); + } + + private void checkDate(final Date expected, final Object val){ + assertTrue(val instanceof Date); + + if (expected != null && val != null){ + Calendar cexpected = Calendar.getInstance(), cval = Calendar.getInstance(); + cexpected.setTime(expected); + cval.setTime((Date)val); + + try{ + assertEquals(cexpected.get(Calendar.DAY_OF_MONTH), cval.get(Calendar.DAY_OF_MONTH)); + assertEquals(cexpected.get(Calendar.MONTH), cval.get(Calendar.MONTH)); + assertEquals(cexpected.get(Calendar.YEAR), cval.get(Calendar.YEAR)); + assertEquals(cexpected.get(Calendar.HOUR), cval.get(Calendar.HOUR)); + assertEquals(cexpected.get(Calendar.MINUTE), cval.get(Calendar.MINUTE)); + assertEquals(cexpected.get(Calendar.SECOND), cval.get(Calendar.SECOND)); + }catch(AssertionError e){ + fail("Expected <" + expected + "> but was <" + val + ">"); + } + }else if (expected == null && val == null) + return; + else + fail("Expected <" + expected + "> but was <" + val + ">"); + } + +} diff --git a/test/uws/job/parameters/TestExecutionDurationController.java b/test/uws/job/parameters/TestExecutionDurationController.java new file mode 100644 index 0000000000000000000000000000000000000000..65eef00017943d266fdd3dd27259537ef3815231 --- /dev/null +++ b/test/uws/job/parameters/TestExecutionDurationController.java @@ -0,0 +1,134 @@ +package uws.job.parameters; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import uws.job.UWSJob; + +public class TestExecutionDurationController { + + @BeforeClass + public static void setUpBeforeClass() throws Exception{} + + @AfterClass + public static void tearDownAfterClass() throws Exception{} + + @Before + public void setUp() throws Exception{} + + @After + public void tearDown() throws Exception{} + + @Test + public void testCheck(){ + ExecutionDurationController controller = new ExecutionDurationController(); + + try{ + // A NULL duration will always return an unlimited duration: + assertEquals(UWSJob.UNLIMITED_DURATION, controller.check(null)); + assertEquals(UWSJob.UNLIMITED_DURATION, controller.check(0)); + assertEquals(UWSJob.UNLIMITED_DURATION, controller.check(-1)); + assertEquals(UWSJob.UNLIMITED_DURATION, controller.check(-123)); + + // By default, the controller has no limit on the execution duration, so let's try with a duration of 1e6 seconds: + assertEquals(1000000L, controller.check(1000000)); + + // With just a default execution duration (of 10 minutes): + controller.setDefaultExecutionDuration(600); + assertEquals(600L, controller.check(null)); + assertEquals(UWSJob.UNLIMITED_DURATION, controller.check(-1)); + assertEquals(UWSJob.UNLIMITED_DURATION, controller.check(UWSJob.UNLIMITED_DURATION)); + + // With just a maximum execution duration (of 1 hour): + controller.setDefaultExecutionDuration(-1); + controller.setMaxExecutionDuration(3600); + assertEquals(3600L, controller.check(null)); + assertEquals(60L, controller.check(60)); + assertEquals(3600L, controller.check(-1)); + assertEquals(3600L, controller.check(UWSJob.UNLIMITED_DURATION)); + assertEquals(3600L, controller.check(3601)); + + // With a default (10 minutes) AND a maximum (1 hour) execution duration: + controller.setDefaultExecutionDuration(600); + controller.setMaxExecutionDuration(3600); + assertEquals(600L, controller.check(null)); + assertEquals(10L, controller.check(10)); + assertEquals(600L, controller.check(600)); + assertEquals(3600L, controller.check(3600)); + assertEquals(3600L, controller.check(-1)); + assertEquals(3600L, controller.check(UWSJob.UNLIMITED_DURATION)); + assertEquals(3600L, controller.check(3601)); + + }catch(Exception t){ + t.printStackTrace(); + fail(); + } + } + + @Test + public void testGetDefault(){ + ExecutionDurationController controller = new ExecutionDurationController(); + + // By default, when nothing is set, the default execution duration is UNLIMITED: + assertEquals(UWSJob.UNLIMITED_DURATION, controller.getDefault()); + + // With no duration, the default execution duration should remain UNLIMITED: + controller.setDefaultExecutionDuration(UWSJob.UNLIMITED_DURATION); + assertEquals(UWSJob.UNLIMITED_DURATION, controller.getDefault()); + + // With a negative duration, the execution duration should also be UNLIMITED: + controller.setDefaultExecutionDuration(-1); + assertEquals(UWSJob.UNLIMITED_DURATION, controller.getDefault()); + + // With an execution duration of 10 minutes: + controller.setDefaultExecutionDuration(600); + assertEquals(600L, controller.getDefault()); + + // The default value must always be less than the maximum value: + controller.setMaxExecutionDuration(300); + assertEquals(300L, controller.getDefault()); + } + + @Test + public void testGetMaxExecutionDuration(){ + ExecutionDurationController controller = new ExecutionDurationController(); + + // By default, when nothing is set, the maximum execution duration is UNLIMITED: + assertEquals(UWSJob.UNLIMITED_DURATION, controller.getMaxExecutionDuration()); + + // With no duration, the maximum execution duration should remain UNLIMITED: + controller.setMaxExecutionDuration(UWSJob.UNLIMITED_DURATION); + assertEquals(UWSJob.UNLIMITED_DURATION, controller.getMaxExecutionDuration()); + + // With a negative duration, the execution duration should also be UNLIMITED: + controller.setMaxExecutionDuration(-1); + assertEquals(UWSJob.UNLIMITED_DURATION, controller.getMaxExecutionDuration()); + + // With an execution duration of 10 minutes: + controller.setMaxExecutionDuration(600); + assertEquals(600L, controller.getMaxExecutionDuration()); + } + + @Test + public void testAllowModification(){ + ExecutionDurationController controller = new ExecutionDurationController(); + + // By default, user modification of the execution duration is allowed: + assertTrue(controller.allowModification()); + + controller.allowModification(true); + assertTrue(controller.allowModification()); + + controller.allowModification(false); + assertFalse(controller.allowModification()); + } + +} diff --git a/test/uws/service/UWSUrlTest.java b/test/uws/service/UWSUrlTest.java new file mode 100644 index 0000000000000000000000000000000000000000..a61db70f0032176b4f983aa3c915dcbf9be21d0d --- /dev/null +++ b/test/uws/service/UWSUrlTest.java @@ -0,0 +1,560 @@ +package uws.service; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.security.Principal; +import java.util.Collection; +import java.util.Enumeration; +import java.util.Locale; +import java.util.Map; + +import javax.servlet.AsyncContext; +import javax.servlet.DispatcherType; +import javax.servlet.RequestDispatcher; +import javax.servlet.ServletContext; +import javax.servlet.ServletException; +import javax.servlet.ServletInputStream; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.Cookie; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.servlet.http.HttpSession; +import javax.servlet.http.Part; + +import org.junit.Before; +import org.junit.Test; + +public class UWSUrlTest { + + public static final class TestHttpServletRequest implements HttpServletRequest { + + private final String scheme; + private final String serverName; + private final int serverPort; + private final String contextPath; + private final String pathInfo; + private final String requestURI; + private final StringBuffer requestURL; + private final String servletPath; + + public TestHttpServletRequest(StringBuffer requestURL, String requestURI, String scheme, String serverName, int serverPort, String contextPath, String servletPath, String pathInfo){ + this.scheme = scheme; + this.serverName = serverName; + this.serverPort = serverPort; + this.contextPath = contextPath; + this.pathInfo = pathInfo; + this.requestURI = requestURI; + this.requestURL = requestURL; + this.servletPath = servletPath; + } + + @Override + public String getScheme(){ + return scheme; + } + + @Override + public String getServerName(){ + return serverName; + } + + @Override + public int getServerPort(){ + return serverPort; + } + + @Override + public String getPathInfo(){ + return pathInfo; + } + + @Override + public String getRequestURI(){ + return requestURI; + } + + @Override + public StringBuffer getRequestURL(){ + return requestURL; + } + + @Override + public String getContextPath(){ + return contextPath; + } + + @Override + public String getServletPath(){ + return servletPath; + } + + @Override + public AsyncContext getAsyncContext(){ + return null; + } + + @Override + public Object getAttribute(String arg0){ + return null; + } + + @Override + public Enumeration getAttributeNames(){ + return null; + } + + @Override + public String getCharacterEncoding(){ + return null; + } + + @Override + public int getContentLength(){ + return 0; + } + + @Override + public String getContentType(){ + return null; + } + + @Override + public DispatcherType getDispatcherType(){ + return null; + } + + @Override + public ServletInputStream getInputStream() throws IOException{ + return null; + } + + @Override + public String getLocalAddr(){ + return null; + } + + @Override + public String getLocalName(){ + return null; + } + + @Override + public int getLocalPort(){ + return 0; + } + + @Override + public Locale getLocale(){ + return null; + } + + @Override + public Enumeration getLocales(){ + return null; + } + + @Override + public String getParameter(String arg0){ + return null; + } + + @Override + public Map getParameterMap(){ + return null; + } + + @Override + public Enumeration getParameterNames(){ + return null; + } + + @Override + public String[] getParameterValues(String arg0){ + return null; + } + + @Override + public String getProtocol(){ + return null; + } + + @Override + public BufferedReader getReader() throws IOException{ + return null; + } + + @Override + public String getRealPath(String arg0){ + return null; + } + + @Override + public String getRemoteAddr(){ + return null; + } + + @Override + public String getRemoteHost(){ + return null; + } + + @Override + public int getRemotePort(){ + return 0; + } + + @Override + public RequestDispatcher getRequestDispatcher(String arg0){ + return null; + } + + @Override + public ServletContext getServletContext(){ + return null; + } + + @Override + public boolean isAsyncStarted(){ + return false; + } + + @Override + public boolean isAsyncSupported(){ + return false; + } + + @Override + public boolean isSecure(){ + return false; + } + + @Override + public void removeAttribute(String arg0){ + + } + + @Override + public void setAttribute(String arg0, Object arg1){ + + } + + @Override + public void setCharacterEncoding(String arg0) throws UnsupportedEncodingException{ + + } + + @Override + public AsyncContext startAsync(){ + return null; + } + + @Override + public AsyncContext startAsync(ServletRequest arg0, ServletResponse arg1){ + return null; + } + + @Override + public boolean authenticate(HttpServletResponse arg0) throws IOException, ServletException{ + return false; + } + + @Override + public String getAuthType(){ + return null; + } + + @Override + public Cookie[] getCookies(){ + return null; + } + + @Override + public long getDateHeader(String arg0){ + return 0; + } + + @Override + public String getHeader(String arg0){ + return null; + } + + @Override + public Enumeration getHeaderNames(){ + return null; + } + + @Override + public Enumeration getHeaders(String arg0){ + return null; + } + + @Override + public int getIntHeader(String arg0){ + return 0; + } + + @Override + public String getMethod(){ + return null; + } + + @Override + public Part getPart(String arg0) throws IOException, IllegalStateException, ServletException{ + return null; + } + + @Override + public Collection getParts() throws IOException, IllegalStateException, ServletException{ + return null; + } + + @Override + public String getPathTranslated(){ + return null; + } + + @Override + public String getQueryString(){ + return null; + } + + @Override + public String getRemoteUser(){ + return null; + } + + @Override + public String getRequestedSessionId(){ + return null; + } + + @Override + public HttpSession getSession(){ + return null; + } + + @Override + public HttpSession getSession(boolean arg0){ + return null; + } + + @Override + public Principal getUserPrincipal(){ + return null; + } + + @Override + public boolean isRequestedSessionIdFromCookie(){ + return false; + } + + @Override + public boolean isRequestedSessionIdFromURL(){ + return false; + } + + @Override + public boolean isRequestedSessionIdFromUrl(){ + return false; + } + + @Override + public boolean isRequestedSessionIdValid(){ + return false; + } + + @Override + public boolean isUserInRole(String arg0){ + return false; + } + + @Override + public void login(String arg0, String arg1) throws ServletException{} + + @Override + public void logout() throws ServletException{} + + } + + private HttpServletRequest requestFromRoot2root; + private HttpServletRequest requestFromRoot2async; + + private HttpServletRequest requestFromPath2root; + private HttpServletRequest requestFromPath2async; + + private HttpServletRequest requestWithServletPathNull; + + @Before + public void setUp() throws Exception{ + requestFromRoot2root = new TestHttpServletRequest(new StringBuffer("http://localhost:8080/tapTest/"), "/tapTest/", "http", "localhost", 8080, "/tapTest", "", "/"); + requestFromRoot2async = new TestHttpServletRequest(new StringBuffer("http://localhost:8080/tapTest/async"), "/tapTest/async", "http", "localhost", 8080, "/tapTest", "", "/async"); + + requestFromPath2root = new TestHttpServletRequest(new StringBuffer("http://localhost:8080/tapTest/path/"), "/tapTest/path/", "http", "localhost", 8080, "/tapTest", "/path", "/"); + requestFromPath2async = new TestHttpServletRequest(new StringBuffer("http://localhost:8080/tapTest/path/async"), "/tapTest/path/async", "http", "localhost", 8080, "/tapTest", "/path", "/async"); + + requestWithServletPathNull = new TestHttpServletRequest(new StringBuffer("http://localhost:8080/tapTest/"), "/tapTest/", "http", "localhost", 8080, "/tapTest", null, "/"); + } + + @Test + public void testExtractBaseURI(){ + // CASE 1: http://localhost:8080/tapTest/path with url-pattern = /path/* + try{ + UWSUrl uu = new UWSUrl(requestFromPath2root); + assertEquals("/path", uu.getBaseURI()); + assertEquals("", uu.getUwsURI()); + assertEquals("http://localhost:8080/tapTest/path/", uu.toString()); + }catch(Exception e){ + e.printStackTrace(System.err); + fail("This HTTP request is perfectly correct: " + requestFromPath2root.getRequestURL()); + } + + // CASE 2: http://localhost:8080/tapTest/path/async with url-pattern = /path/* + try{ + UWSUrl uu = new UWSUrl(requestFromPath2async); + assertEquals("/path", uu.getBaseURI()); + assertEquals("/async", uu.getUwsURI()); + assertEquals("http://localhost:8080/tapTest/path/async", uu.toString()); + }catch(Exception e){ + e.printStackTrace(System.err); + fail("This HTTP request is perfectly correct: " + requestFromPath2async.getRequestURL()); + } + + // CASE 3: http://localhost:8080/tapTest with url-pattern = /* + try{ + UWSUrl uu = new UWSUrl(requestFromRoot2root); + assertEquals("", uu.getBaseURI()); + assertEquals("", uu.getUwsURI()); + assertEquals("http://localhost:8080/tapTest/", uu.toString()); + }catch(Exception e){ + e.printStackTrace(System.err); + fail("This HTTP request is perfectly correct: " + requestFromRoot2root.getRequestURL()); + } + + // CASE 4: http://localhost:8080/tapTest/async with url-pattern = /* + try{ + UWSUrl uu = new UWSUrl(requestFromRoot2async); + assertEquals("", uu.getBaseURI()); + assertEquals("/async", uu.getUwsURI()); + assertEquals("http://localhost:8080/tapTest/async", uu.toString()); + }catch(Exception e){ + e.printStackTrace(System.err); + fail("This HTTP request is perfectly correct: " + requestFromRoot2async.getRequestURL()); + } + + // CASE 5: http://localhost:8080/tapTest/path/async with url-pattern = /path/* + try{ + new UWSUrl(requestWithServletPathNull); + fail("RequestURL with no servlet path: this test should have failed!"); + }catch(Exception e){ + assertTrue(e instanceof NullPointerException); + assertEquals(e.getMessage(), "The extracted base UWS URI is NULL!"); + } + } + + @Test + public void testLoadHttpServletRequest(){ + // CASE 1a: http://localhost:8080/tapTest/path with url-pattern = /path/* + try{ + UWSUrl uu = new UWSUrl(requestFromPath2root); + uu.load(requestFromPath2root); + assertEquals("", uu.getUwsURI()); + assertEquals("http://localhost:8080/tapTest/path/async/123456A", uu.jobSummary("async", "123456A").toString()); + }catch(Exception e){ + e.printStackTrace(System.err); + fail("This HTTP request is perfectly correct: " + requestFromPath2root.getRequestURL()); + } + // CASE 1b: Idem while loading http://localhost:8080/tapTest/path/async + try{ + UWSUrl uu = new UWSUrl(requestFromPath2root); + uu.load(requestFromPath2async); + assertEquals("/async", uu.getUwsURI()); + assertEquals("http://localhost:8080/tapTest/path/async/123456A", uu.jobSummary("async", "123456A").toString()); + }catch(Exception e){ + e.printStackTrace(System.err); + fail("This HTTP request is perfectly correct: " + requestFromPath2async.getRequestURL()); + } + + // CASE 2a: http://localhost:8080/tapTest/path/async with url-pattern = /path/* + try{ + UWSUrl uu = new UWSUrl(requestFromPath2async); + uu.load(requestFromPath2async); + assertEquals("/async", uu.getUwsURI()); + assertEquals("http://localhost:8080/tapTest/path/async/123456A", uu.jobSummary("async", "123456A").toString()); + }catch(Exception e){ + e.printStackTrace(System.err); + fail("This HTTP request is perfectly correct: " + requestFromPath2async.getRequestURL()); + } + + // CASE 2b: Idem while loading http://localhost:8080/tapTest/path + try{ + UWSUrl uu = new UWSUrl(requestFromPath2async); + uu.load(requestFromPath2root); + assertEquals("", uu.getUwsURI()); + assertEquals("http://localhost:8080/tapTest/path/async/123456A", uu.jobSummary("async", "123456A").toString()); + }catch(Exception e){ + e.printStackTrace(System.err); + fail("This HTTP request is perfectly correct: " + requestFromPath2root.getRequestURL()); + } + + // CASE 3a: http://localhost:8080/tapTest with url-pattern = /* + try{ + UWSUrl uu = new UWSUrl(requestFromRoot2root); + uu.load(requestFromRoot2root); + assertEquals("", uu.getUwsURI()); + assertEquals("http://localhost:8080/tapTest/async/123456A", uu.jobSummary("async", "123456A").toString()); + }catch(NullPointerException e){ + fail("This HTTP request is perfectly correct: " + requestFromRoot2root.getRequestURL()); + } + + // CASE 3b: Idem while loading http://localhost:8080/tapTest/async + try{ + UWSUrl uu = new UWSUrl(requestFromRoot2root); + uu.load(requestFromRoot2async); + assertEquals("/async", uu.getUwsURI()); + assertEquals("http://localhost:8080/tapTest/async/123456A", uu.jobSummary("async", "123456A").toString()); + }catch(Exception e){ + e.printStackTrace(System.err); + fail("This HTTP request is perfectly correct: " + requestFromRoot2async.getRequestURL()); + } + + // CASE 4a: http://localhost:8080/tapTest/async with url-pattern = /* + try{ + UWSUrl uu = new UWSUrl(requestFromRoot2async); + uu.load(requestFromRoot2async); + assertEquals("/async", uu.getUwsURI()); + assertEquals("http://localhost:8080/tapTest/async/123456A", uu.jobSummary("async", "123456A").toString()); + }catch(Exception e){ + e.printStackTrace(System.err); + fail("This HTTP request is perfectly correct: " + requestFromRoot2async.getRequestURL()); + } + + // CASE 4b: Idem while loading http://localhost:8080/tapTest + try{ + UWSUrl uu = new UWSUrl(requestFromRoot2async); + uu.load(requestFromRoot2root); + assertEquals("", uu.getUwsURI()); + assertEquals("http://localhost:8080/tapTest/async/123456A", uu.jobSummary("async", "123456A").toString()); + }catch(Exception e){ + e.printStackTrace(System.err); + fail("This HTTP request is perfectly correct: " + requestFromRoot2root.getRequestURL()); + } + + // SPECIAL CASE 1: Creation with http://localhost:8080/tapTest[/async] (/*) but loading with http://localhost:8080/tapTest/path[/async] (/path/*): + try{ + UWSUrl uu = new UWSUrl(requestFromRoot2async); + uu.load(requestFromPath2async); + assertFalse(uu.getUwsURI().equals("")); + }catch(Exception e){ + e.printStackTrace(System.err); + fail("This HTTP request is perfectly correct: " + requestFromRoot2root.getRequestURL()); + } + } + +} diff --git a/test/uws/service/file/TestLogRotation.java b/test/uws/service/file/TestLogRotation.java new file mode 100644 index 0000000000000000000000000000000000000000..16aa1e499f3ab6d9758b4a307130f89ba2869538 --- /dev/null +++ b/test/uws/service/file/TestLogRotation.java @@ -0,0 +1,277 @@ +package uws.service.file; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.fail; + +import java.io.BufferedReader; +import java.io.File; +import java.io.IOException; +import java.io.InputStreamReader; + +import org.junit.Test; + +import uws.service.log.DefaultUWSLog; +import uws.service.log.UWSLog; +import uws.service.log.UWSLog.LogLevel; + +public class TestLogRotation { + + @Test + public void testEventFrequencyCreation(){ + EventFrequency freq; + + try{ + String DEFAULT_FREQ = "daily at 00:00"; + + // FREQ = NULL => !!! ; frequency = every day + freq = new EventFrequency(null); + assertEquals(DEFAULT_FREQ, freq.toString()); + + // FREQ = "" => !!! ; frequency = every day + freq = new EventFrequency(""); + assertEquals(DEFAULT_FREQ, freq.toString()); + + // FREQ = "blabla" => !!! + freq = new EventFrequency("blabla"); + assertEquals(DEFAULT_FREQ, freq.toString()); + + /* *********** */ + /* DAILY EVENT */ + /* *********** */ + DEFAULT_FREQ = "daily at 00:00"; + + // FREQ = "D" => ok! ; frequency = every day at midnight + freq = new EventFrequency("D"); + assertEquals(DEFAULT_FREQ, freq.toString()); + + // FREQ = "D 06" => !!! ; frequency = every day at midnight + freq = new EventFrequency("D 06"); + assertEquals(DEFAULT_FREQ, freq.toString()); + + // FREQ = "D 06 30" => ok! ; frequency = every day at 06:30 + freq = new EventFrequency("D 06 30"); + assertEquals("daily at 06:30", freq.toString()); + + // FREQ = "D 6 30" => ok! ; frequency = every day at 06:30 + freq = new EventFrequency("D 6 30"); + assertEquals("daily at 06:30", freq.toString()); + + // FREQ = "D 06 30" => ok! (with spaces and tabs inside) ; frequency = every day at 06:30 + freq = new EventFrequency("D 06 30"); + assertEquals("daily at 06:30", freq.toString()); + + // FREQ = "D 24 30" => !!! ; frequency = every day at midnight + freq = new EventFrequency("D 24 30"); + assertEquals(DEFAULT_FREQ, freq.toString()); + + // FREQ = "D 06 60" => !!! ; frequency = every day at midnight + freq = new EventFrequency("D 06 60"); + assertEquals(DEFAULT_FREQ, freq.toString()); + + // FREQ = "D 6 30 01 blabla" => ok! ; frequency = every day at 06:30 + freq = new EventFrequency("D 6 30 01 blabla"); + assertEquals("daily at 06:30", freq.toString()); + + // FREQ = "d 06 30" => !!! ; frequency = every day at midnight + freq = new EventFrequency("d 06 30"); + assertEquals(DEFAULT_FREQ, freq.toString()); + + // FREQ = "D HH mm" => !!! + freq = new EventFrequency("D HH mm"); + assertEquals(DEFAULT_FREQ, freq.toString()); + + /* ********** */ + /* WEEK EVENT */ + /* ********** */ + DEFAULT_FREQ = "weekly on Sunday at 00:00"; + + // FREQ = "W" => ok! ; frequency = every week the Sunday at midnight + freq = new EventFrequency("W"); + assertEquals(DEFAULT_FREQ, freq.toString()); + + // FREQ = "W 06" => !!! ; frequency = every week the Sunday at midnight + freq = new EventFrequency("W 06"); + assertEquals(DEFAULT_FREQ, freq.toString()); + + // FREQ = "W 06 30" => !!! ; frequency = every week the Sunday at midnight + freq = new EventFrequency("W 06 30"); + assertEquals(DEFAULT_FREQ, freq.toString()); + + // FREQ = "W 2" => !!! ; frequency = every week the Sunday at midnight + freq = new EventFrequency("W 2"); + assertEquals(DEFAULT_FREQ, freq.toString()); + + // FREQ = "W 2 06" => !!! ; frequency = every week the Sunday at midnight + freq = new EventFrequency("W 2 06"); + assertEquals(DEFAULT_FREQ, freq.toString()); + + // FREQ = "W 2 06 30" => ok! ; frequency = every week the Monday at 06:30 + freq = new EventFrequency("W 2 06 30"); + assertEquals("weekly on Monday at 06:30", freq.toString()); + + // FREQ = "W 0 06 30" => !!! ; frequency = every week the Sunday at 06:30 + freq = new EventFrequency("W 0 06 30"); + assertEquals(DEFAULT_FREQ, freq.toString()); + + // FREQ = "W 10 06 30" => !!! ; frequency = every week the Sunday at 06:30 + freq = new EventFrequency("W 10 06 30"); + assertEquals(DEFAULT_FREQ, freq.toString()); + + // FREQ = "w 2 06 30" => !!! ; frequency = every day at 00:00 + freq = new EventFrequency("w 2 06 30"); + assertEquals("daily at 00:00", freq.toString()); + + // FREQ = "W 2 6 30" => ok! ; frequency = every week the Monday at 06:30 + freq = new EventFrequency("W 2 6 30"); + assertEquals("weekly on Monday at 06:30", freq.toString()); + + // FREQ = "W 2 6 30" => ok! (with spaces and tabs inside) ; frequency = every week the Monday at 06:30 + freq = new EventFrequency("W 2 6 30"); + assertEquals("weekly on Monday at 06:30", freq.toString()); + + // FREQ = "W 2 6 30 12 blabla" => ok! ; frequency = every week the Monday at 06:30 + freq = new EventFrequency("W 2 6 30 12 blabla"); + assertEquals("weekly on Monday at 06:30", freq.toString()); + + /* ***************************************** */ + /* MONTH EVENT (same code as for WEEK EVENT) */ + /* ***************************************** */ + DEFAULT_FREQ = "monthly on the 1st at 00:00"; + + // FREQ = "M 2 06 30" => ok! ; frequency = every month on the 2nd at 06:30 + freq = new EventFrequency("M 2 06 30"); + assertEquals("monthly on the 2nd at 06:30", freq.toString()); + + // FREQ = "M 2 06 30" => ok! (with spaces and tabs inside) ; frequency = every month on the 2nd at 06:30 + freq = new EventFrequency("M 2 06 30"); + assertEquals("monthly on the 2nd at 06:30", freq.toString()); + + // FREQ = "m 2 06 30" => !!! ; frequency = every minute + freq = new EventFrequency("m 2 06 30"); + assertEquals("every minute", freq.toString()); + + // FREQ = "M 0 06 30" => !!! ; frequency = every month on the 1st at 00:00 + freq = new EventFrequency("M 0 06 30"); + assertEquals(DEFAULT_FREQ, freq.toString()); + + // FREQ = "M 32 06 30" => !!! ; frequency = every month on the 1st at 00:00 + freq = new EventFrequency("M 32 06 30"); + assertEquals(DEFAULT_FREQ, freq.toString()); + + /* ********** */ + /* HOUR EVENT */ + /* ********** */ + DEFAULT_FREQ = "hourly at 00"; + + // FREQ = "h" => ok! ; frequency = every hour at 00 + freq = new EventFrequency("h"); + assertEquals(DEFAULT_FREQ, freq.toString()); + + // FREQ = "h 10" => ok! ; frequency = every hour at 10 + freq = new EventFrequency("h 10"); + assertEquals("hourly at 10", freq.toString()); + + // FREQ = "h 10" => ok! (with spaces and tabs inside) ; frequency = every hour at 10 + freq = new EventFrequency("h 10"); + assertEquals("hourly at 10", freq.toString()); + + // FREQ = "H 10" => !!! ; frequency = every day at 00:00 + freq = new EventFrequency("H 10"); + assertEquals("daily at 00:00", freq.toString()); + + // FREQ = "h 5" => ok! ; frequency = every hour at 05 + freq = new EventFrequency("h 5"); + assertEquals("hourly at 05", freq.toString()); + + // FREQ = "h 60" => !!! ; frequency = every hour at 00 + freq = new EventFrequency("h 60"); + assertEquals("hourly at 00", freq.toString()); + + // FREQ = "h 10 12 blabla" => ok! ; frequency = every hour at 10 + freq = new EventFrequency("h 10 12 blabla"); + assertEquals("hourly at 10", freq.toString()); + + /* ********** */ + /* HOUR EVENT */ + /* ********** */ + DEFAULT_FREQ = "every minute"; + + // FREQ = "m" => ok! ; frequency = every minute + freq = new EventFrequency("m"); + assertEquals(DEFAULT_FREQ, freq.toString()); + + // FREQ = "m 10 blabla" => ok! ; frequency = every minute + freq = new EventFrequency("m 10 blabla"); + assertEquals(DEFAULT_FREQ, freq.toString()); + + // FREQ = "M" => !!! ; frequency = every month on the 1st at 00:00 + freq = new EventFrequency("M"); + assertEquals("monthly on the 1st at 00:00", freq.toString()); + + }catch(Exception e){ + e.printStackTrace(System.err); + fail("UNEXPECTED EXCEPTION: \"" + e.getMessage() + "\""); + } + } + + @Test + public void testGetLogOutput(){ + try{ + final LocalUWSFileManager fileManager = new LocalUWSFileManager(new File(".")); + fileManager.logRotation = new EventFrequency("m"); + final int MAX_TIME = 3000; // 3 seconds => 68 messages (for 5 threads) + int nbExpectedMessages = 0; + + // Delete old log file: + fileManager.getLogFile(LogLevel.DEBUG, null).delete(); + + // Log a lot of messages: + final UWSLog logger = new DefaultUWSLog(fileManager); + for(int i = 0; i < 5; i++){ + final int logFreq = i + 1; + nbExpectedMessages += 30 / logFreq; + (new Thread(new Runnable(){ + @Override + public void run(){ + try{ + final int nbMsgs = 30 / logFreq; + final int freq = MAX_TIME / nbMsgs; + for(int cnt = 0; cnt < nbMsgs; cnt++){ + logger.log(LogLevel.INFO, "TEST", "LOG MESSAGE FROM Thread-" + logFreq, null); + assertFalse(fileManager.getLogOutput(LogLevel.INFO, "UWS").checkError()); // if true, it means that at least one attempt to write something fails, and so, that write attempts have been done after a log rotation! + Thread.sleep(freq); + } + }catch(InterruptedException e){ + e.printStackTrace(System.err); + fail("ERROR WITH THE THREAD-" + logFreq); + }catch(IOException e){ + e.printStackTrace(System.err); + fail("IO ERROR WHEN RETRIEVING THE LOG OUTPUT IN THE THREAD-" + logFreq); + } + } + })).start(); + } + Thread.sleep(MAX_TIME); + + // Check that all messages have been well written: + BufferedReader input = new BufferedReader(new InputStreamReader(fileManager.getLogInput(LogLevel.DEBUG, null))); + int nbLines = 0; + while(input.readLine() != null) + nbLines++; + nbLines -= 3; // deduce the number of 3 header lines + assertEquals(nbExpectedMessages, nbLines); + + // Delete log file if no error: + fileManager.getLogFile(LogLevel.DEBUG, null).delete(); + + }catch(InterruptedException e){ + e.printStackTrace(System.err); + fail("CAN NOT WAIT 3 SECONDS!"); + }catch(Exception e){ + e.printStackTrace(System.err); + fail("CAN NOT CREATE THE FILE MANAGER!"); + } + } + +} diff --git a/test/uws/service/log/TestDefaultUWSLog.java b/test/uws/service/log/TestDefaultUWSLog.java new file mode 100644 index 0000000000000000000000000000000000000000..2b39544abb0dc2e16f6deef90f7e28d0be261216 --- /dev/null +++ b/test/uws/service/log/TestDefaultUWSLog.java @@ -0,0 +1,63 @@ +package uws.service.log; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.io.ByteArrayOutputStream; +import java.io.OutputStream; + +import org.junit.Test; + +import uws.service.log.UWSLog.LogLevel; + +public class TestDefaultUWSLog { + + @Test + public void testCanLog(){ + OutputStream output = new ByteArrayOutputStream(); + DefaultUWSLog logger = new DefaultUWSLog(output); + + // Default value = DEBUG => ALL MESSAGES CAN BE LOGGED + assertEquals(LogLevel.DEBUG, logger.getMinLogLevel()); + for(LogLevel ll : LogLevel.values()) + assertTrue(logger.canLog(ll)); + + // Test: INFO => ALL EXCEPT DEBUG CAN BE LOGGED + logger.setMinLogLevel(LogLevel.INFO); + assertEquals(LogLevel.INFO, logger.getMinLogLevel()); + assertFalse(logger.canLog(LogLevel.DEBUG)); + assertTrue(logger.canLog(LogLevel.INFO)); + assertTrue(logger.canLog(LogLevel.WARNING)); + assertTrue(logger.canLog(LogLevel.ERROR)); + assertTrue(logger.canLog(LogLevel.FATAL)); + + // Test: WARNING => ALL EXCEPT DEBUG AND INFO CAN BE LOGGED + logger.setMinLogLevel(LogLevel.WARNING); + assertEquals(LogLevel.WARNING, logger.getMinLogLevel()); + assertFalse(logger.canLog(LogLevel.DEBUG)); + assertFalse(logger.canLog(LogLevel.INFO)); + assertTrue(logger.canLog(LogLevel.WARNING)); + assertTrue(logger.canLog(LogLevel.ERROR)); + assertTrue(logger.canLog(LogLevel.FATAL)); + + // Test: ERROR => ONLY ERROR AND FATAL CAN BE LOGGED + logger.setMinLogLevel(LogLevel.ERROR); + assertEquals(LogLevel.ERROR, logger.getMinLogLevel()); + assertFalse(logger.canLog(LogLevel.DEBUG)); + assertFalse(logger.canLog(LogLevel.INFO)); + assertFalse(logger.canLog(LogLevel.WARNING)); + assertTrue(logger.canLog(LogLevel.ERROR)); + assertTrue(logger.canLog(LogLevel.FATAL)); + + // Test: FATAL => ONLY FATAL CAN BE LOGGED + logger.setMinLogLevel(LogLevel.FATAL); + assertEquals(LogLevel.FATAL, logger.getMinLogLevel()); + assertFalse(logger.canLog(LogLevel.DEBUG)); + assertFalse(logger.canLog(LogLevel.INFO)); + assertFalse(logger.canLog(LogLevel.WARNING)); + assertFalse(logger.canLog(LogLevel.ERROR)); + assertTrue(logger.canLog(LogLevel.FATAL)); + } + +}