diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 251616a87cb7cd127c0407b30276e042626912bb..22b8717b24b7270cdaadbdc52c20ec55724be4f4 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -2,11 +2,19 @@ stages: - test - dockerize +variables: + # to avoid "fatal: git fetch-pack: expected shallow list" + GIT_STRATEGY: clone + test: stage: test tags: - docker + image: "git.ia2.inaf.it:5050/vospace/vospace-oats/vospace-test-env" + variables: + FILE_CATALOG_REPO_URL: "https://gitlab-ci-token:${CI_JOB_TOKEN}@www.ict.inaf.it/gitlab/vospace/vospace-file-catalog.git" script: + - git clone ${FILE_CATALOG_REPO_URL} - mvn clean test - awk -F"," '{ instructions += $4 + $5; covered += $5 } END { print "coverage=" 100*covered/instructions }' target/site/jacoco/jacoco.csv coverage: '/coverage=\d+\.\d+/' diff --git a/pom.xml b/pom.xml index 4d9233328d5785ceb9a08fc9450886002f7f01fd..a464d1bd314bc6d9bfa2b47047016d363f7975e3 100644 --- a/pom.xml +++ b/pom.xml @@ -15,8 +15,10 @@ <description>VOSpace File service</description> <properties> - <java.version>14</java.version> <finalName>${project.artifactId}-${project.version}</finalName> + <!-- File catalog repository directory --> + <init_database_scripts_path>../../../vospace-file-catalog</init_database_scripts_path> + <zonky.postgres-binaries.version>12.5.0</zonky.postgres-binaries.version> </properties> <dependencies> @@ -57,8 +59,51 @@ <artifactId>rap-client</artifactId> <version>1.0-SNAPSHOT</version> </dependency> + + <!-- Embedded PostgreSQL: --> + <dependency> + <groupId>com.opentable.components</groupId> + <artifactId>otj-pg-embedded</artifactId> + <version>0.13.3</version> + <scope>test</scope> + </dependency> </dependencies> + <profiles> + <profile> + <id>platform-linux</id> + <activation> + <os> + <family>unix</family> + </os> + </activation> + <dependencies> + <dependency> + <groupId>io.zonky.test.postgres</groupId> + <artifactId>embedded-postgres-binaries-linux-amd64</artifactId> + <version>${zonky.postgres-binaries.version}</version> + <scope>test</scope> + </dependency> + </dependencies> + </profile> + <profile> + <id>platform-windows</id> + <activation> + <os> + <family>windows</family> + </os> + </activation> + <dependencies> + <dependency> + <groupId>io.zonky.test.postgres</groupId> + <artifactId>embedded-postgres-binaries-windows-amd64</artifactId> + <version>${zonky.postgres-binaries.version}</version> + <scope>test</scope> + </dependency> + </dependencies> + </profile> + </profiles> + <repositories> <repository> <id>ia2-snapshots</id> @@ -69,6 +114,25 @@ <build> <finalName>${finalName}</finalName> + <testResources> + <testResource> + <directory>src/test/resources</directory> + <filtering>true</filtering> + <includes> + <include>test.properties</include> + </includes> + </testResource> + <testResource> + <directory>src/test/resources</directory> + <filtering>false</filtering> + <includes> + <include>**/*</include> + </includes> + <excludes> + <exclude>test.properties</exclude> + </excludes> + </testResource> + </testResources> <plugins> <plugin> <artifactId>maven-surefire-plugin</artifactId> diff --git a/src/test/java/it/inaf/ia2/transfer/persistence/DataSourceConfig.java b/src/test/java/it/inaf/ia2/transfer/persistence/DataSourceConfig.java new file mode 100644 index 0000000000000000000000000000000000000000..e298c4a7bc74278cff18febdce1d737a488a60ce --- /dev/null +++ b/src/test/java/it/inaf/ia2/transfer/persistence/DataSourceConfig.java @@ -0,0 +1,123 @@ +package it.inaf.ia2.transfer.persistence; + +import com.opentable.db.postgres.embedded.EmbeddedPostgres; +import com.opentable.db.postgres.embedded.PgBinaryResolver; +import com.opentable.db.postgres.embedded.UncompressBundleDirectoryResolver; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.sql.Connection; +import java.util.Arrays; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import javax.sql.DataSource; +import static org.junit.jupiter.api.Assertions.assertTrue; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.test.context.TestConfiguration; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Primary; +import org.springframework.context.annotation.Scope; +import org.springframework.core.io.ByteArrayResource; +import org.springframework.core.io.ClassPathResource; +import org.springframework.jdbc.datasource.init.ScriptUtils; + +/** + * Generates a DataSource that can be used for testing DAO classes. It loads an + * embedded Postgres database and fills it using the data from + * vospace-transfer-service repository (folder must exists; it location can be + * configured using the init_database_scripts_path in test.properties). + */ +@TestConfiguration +public class DataSourceConfig { + + @Value("${init_database_scripts_path}") + private String scriptPath; + + /** + * Using the prototype scope we are generating a different database in each + * test. + */ + @Bean + @Scope("prototype") + @Primary + public DataSource dataSource() throws Exception { + DataSource embeddedPostgresDS = EmbeddedPostgres.builder() + .setPgDirectoryResolver(new UncompressBundleDirectoryResolver(new CustomPostgresBinaryResolver())) + .start().getPostgresDatabase(); + + initDatabase(embeddedPostgresDS); + + return embeddedPostgresDS; + } + + private class CustomPostgresBinaryResolver implements PgBinaryResolver { + + /** + * Loads specific embedded Postgres version. + */ + @Override + public InputStream getPgBinary(String system, String architecture) throws IOException { + ClassPathResource resource = new ClassPathResource(String.format("postgres-%s-%s.txz", system.toLowerCase(), architecture)); + return resource.getInputStream(); + } + } + + /** + * Loads SQL scripts for database initialization from + * vospace-transfer-service repo directory. + */ + private void initDatabase(DataSource dataSource) throws Exception { + try ( Connection conn = dataSource.getConnection()) { + + File currentDir = new File(DataSourceConfig.class.getClassLoader().getResource(".").getFile()); + File scriptDir = currentDir.toPath().resolve(scriptPath).toFile().getCanonicalFile(); + + assertTrue(scriptDir.exists(), "DAO tests require " + scriptDir.getAbsolutePath() + " to exists.\n" + + "Please clone the repository from https://www.ict.inaf.it/gitlab/vospace/vospace-file-catalog.git"); + + File[] scripts = scriptDir.listFiles(f -> f.getName().endsWith(".sql")); + Arrays.sort(scripts); // sort alphabetically + + for (File script : scripts) { + ByteArrayResource scriptResource = replaceDollarQuoting(script.toPath()); + ScriptUtils.executeSqlScript(conn, scriptResource); + } + + ScriptUtils.executeSqlScript(conn, new ClassPathResource("test-data.sql")); + } + } + + /** + * It seems that dollar quoting (used in UDF) is broken in JDBC. Replacing + * it with single quotes solves the problem. We replace the quoting here + * instead of inside the original files because dollar quoting provides a + * better visibility. + */ + private ByteArrayResource replaceDollarQuoting(Path sqlScriptPath) throws Exception { + + String scriptContent = Files.readString(sqlScriptPath); + + if (scriptContent.contains("$func$")) { + + String func = extractFunctionDefinition(scriptContent); + + String originalFunction = "$func$" + func + "$func$"; + String newFunction = "'" + func.replaceAll("'", "''") + "'"; + + scriptContent = scriptContent.replace(originalFunction, newFunction); + } + + return new ByteArrayResource(scriptContent.getBytes()); + } + + private String extractFunctionDefinition(String scriptContent) { + Pattern pattern = Pattern.compile("\\$func\\$(.*?)\\$func\\$", Pattern.DOTALL); + Matcher matcher = pattern.matcher(scriptContent); + if (matcher.find()) { + return matcher.group(1); + } + throw new IllegalArgumentException(scriptContent + " doesn't contain $func$"); + } +} diff --git a/src/test/resources/test-data.sql b/src/test/resources/test-data.sql new file mode 100644 index 0000000000000000000000000000000000000000..3dff11c62abdb00724ab989b040e9db16e6ed12d --- /dev/null +++ b/src/test/resources/test-data.sql @@ -0,0 +1,9 @@ +DELETE FROM node; +ALTER SEQUENCE node_node_id_seq RESTART WITH 1; + +INSERT INTO node (parent_path, parent_relative_path, name, type, owner_id, creator_id) VALUES (NULL, NULL, '', 'container', '0', '0'); + +INSERT INTO node (parent_path, parent_relative_path, name, type, owner_id, creator_id, group_read, group_write) VALUES ('', NULL, 'test1', 'container', 'user1', 'user1', '{"group1","group2"}','{"group2"}'); -- /test1 +INSERT INTO node (parent_path, parent_relative_path, name, type, owner_id, creator_id, group_read, group_write) VALUES ('2', NULL, '.tmp-123.txt', 'structured', 'user1', 'user1', '{"group1","group2"}','{"group2"}'); -- /test1/.tmp-123.txt +INSERT INTO node (parent_path, parent_relative_path, name, type, owner_id, creator_id, group_read, group_write) VALUES ('2', NULL, 'file1.txt', 'data', 'user1', 'user1', '{"group1","group2"}','{"group2"}'); -- /test1/file1.txt +INSERT INTO node (parent_path, parent_relative_path, name, type, owner_id, creator_id, group_read, group_write) VALUES ('2', NULL, 'file2.txt', 'data', 'user1', 'user1', '{"group1","group2"}','{"group2"}'); -- /test1/file2.txt diff --git a/src/test/resources/test.properties b/src/test/resources/test.properties new file mode 100644 index 0000000000000000000000000000000000000000..9dd67fabc9af378d8b30a62f05d2c74cff22988d --- /dev/null +++ b/src/test/resources/test.properties @@ -0,0 +1,2 @@ +# File catalog repository directory (filled by pom.xml, overridable passing environment variable) +init_database_scripts_path=@init_database_scripts_path@