Skip to content
Snippets Groups Projects
Commit 64c2fee1 authored by Sonia Zorba's avatar Sonia Zorba
Browse files

Implemented retrieval of portal files during tar/zip archive generation and...

Implemented retrieval of portal files during tar/zip archive generation and other ArchiveService improvements
parent 0608bd6a
Branches
Tags
No related merge requests found
Pipeline #1997 failed
Showing
with 551 additions and 84 deletions
......@@ -14,6 +14,7 @@ import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.web.servlet.FilterRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.web.client.RestTemplate;
@SpringBootApplication
public class FileServiceApplication {
......@@ -38,4 +39,9 @@ public class FileServiceApplication {
registration.addUrlPatterns("/*");
return registration;
}
@Bean
public RestTemplate restTemplate() {
return new RestTemplate();
}
}
......@@ -61,9 +61,7 @@ public class GetFileController extends FileController {
private ResponseEntity<?> getFileResponse(FileInfo fileInfo) {
File file = new File(fileInfo.getOsPath());
String vosName = fileInfo.getVirtualPath() == null ? null
: fileInfo.getVirtualPath().substring(fileInfo.getVirtualPath().lastIndexOf("/") + 1);
return FileResponseUtil.getFileResponse(response, file, vosName);
return FileResponseUtil.getFileResponse(response, file, fileInfo.getVirtualName());
}
}
......@@ -39,7 +39,7 @@ public class FileDAO {
public Optional<FileInfo> getFileInfo(String virtualPath) {
String sql = "SELECT n.node_id, is_public, group_read, group_write, creator_id, async_trans,\n"
+ "content_type, content_encoding, content_length, content_md5,\n"
+ "content_type, content_encoding, content_length, content_md5, name, n.location_id,\n"
+ "accept_views, provide_views, l.location_type, n.path <> n.relative_path AS virtual_parent,\n"
+ "(SELECT user_name FROM users WHERE user_id = creator_id) AS username,\n"
+ "base_path, get_os_path(n.node_id) AS os_path, ? AS vos_path, false AS is_directory\n"
......@@ -133,7 +133,7 @@ public class FileDAO {
+ "n.accept_views, n.provide_views, l.location_type, n.path <> n.relative_path AS virtual_parent,\n"
+ "(SELECT user_name FROM users WHERE user_id = n.creator_id) AS username,\n"
+ "base_path, get_os_path(n.node_id) AS os_path, get_vos_path(n.node_id) AS vos_path,\n"
+ "n.type = 'container' AS is_directory\n"
+ "n.type = 'container' AS is_directory, n.name, n.location_id\n"
+ "FROM node n\n"
+ "JOIN node p ON p.path @> n.path\n"
+ "LEFT JOIN location l ON l.location_id = n.location_id\n"
......@@ -169,11 +169,16 @@ public class FileDAO {
fi.setProvideViews(toList(rs.getArray("provide_views")));
fi.setVirtualParent(rs.getBoolean("virtual_parent"));
fi.setVirtualPath(rs.getString("vos_path"));
fi.setVirtualName(rs.getString("name"));
fi.setContentEncoding(rs.getString("content_encoding"));
fi.setContentLength(rs.getLong("content_length"));
fi.setContentMd5(rs.getString("content_md5"));
fi.setContentType(rs.getString("content_type"));
fi.setDirectory(rs.getBoolean("is_directory"));
int locationId = rs.getInt("location_id");
if (!rs.wasNull()) {
fi.setLocationId(locationId);
}
fillOsPath(fi, rs);
......
......@@ -5,6 +5,7 @@
*/
package it.inaf.ia2.transfer.persistence;
import it.inaf.ia2.transfer.persistence.model.JobException;
import java.sql.Types;
import javax.sql.DataSource;
import net.ivoa.xml.uws.v1.ExecutionPhase;
......@@ -53,8 +54,7 @@ public class JobDAO {
ps -> {
ps.setString(1, jobId);
}, rs -> {
if(rs.next())
{
if (rs.next()) {
return ExecutionPhase.fromValue(rs.getString("phase"));
} else {
return null;
......@@ -64,4 +64,20 @@ public class JobDAO {
return result;
}
public void setJobError(String jobId, JobException jobError) {
String sql = "UPDATE job SET phase = ?, error_message = ?, error_type = ?,\n"
+ "error_has_detail = ?, error_detail = ?, end_time = NOW()\n"
+ "WHERE job_id = ?";
jdbcTemplate.update(sql, ps -> {
int i = 0;
ps.setObject(++i, ExecutionPhase.ERROR, Types.OTHER);
ps.setString(++i, jobError.getErrorMessage());
ps.setObject(++i, jobError.getType().value(), Types.OTHER);
ps.setBoolean(++i, jobError.getErrorDetail() != null);
ps.setString(++i, jobError.getErrorDetail());
ps.setString(++i, jobId);
});
}
}
/*
* This file is part of vospace-file-service
* Copyright (C) 2021 Istituto Nazionale di Astrofisica
* SPDX-License-Identifier: GPL-3.0-or-later
*/
package it.inaf.ia2.transfer.persistence;
import java.util.HashMap;
import java.util.Map;
import javax.sql.DataSource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Repository;
@Repository
public class LocationDAO {
private final JdbcTemplate jdbcTemplate;
@Autowired
public LocationDAO(DataSource fileCatalogDatasource) {
this.jdbcTemplate = new JdbcTemplate(fileCatalogDatasource);
}
public Map<Integer, String> getPortalLocationUrls() {
String sql = "SELECT location_id, hostname, base_url\n"
+ "FROM location l\n"
+ "JOIN storage s ON s.storage_id = l.storage_dest_id\n"
+ "WHERE location_type = 'portal'";
return jdbcTemplate.query(sql, rs -> {
Map<Integer, String> locationUrls = new HashMap<>();
while (rs.next()) {
int locationId = rs.getInt("location_id");
String hostname = rs.getString("hostname");
String baseUrl = rs.getString("base_url");
String url = "http://" + hostname + baseUrl;
locationUrls.put(locationId, url);
}
return locationUrls;
});
}
}
......@@ -12,6 +12,7 @@ public class FileInfo {
private int nodeId;
private String osPath;
private String virtualPath;
private String virtualName;
private boolean isPublic;
private boolean virtualParent;
private boolean directory;
......@@ -25,6 +26,7 @@ public class FileInfo {
private String contentEncoding;
private Long contentLength;
private String contentMd5;
private Integer locationId;
public int getNodeId() {
return nodeId;
......@@ -82,6 +84,14 @@ public class FileInfo {
this.virtualPath = virtualPath;
}
public String getVirtualName() {
return virtualName;
}
public void setVirtualName(String virtualName) {
this.virtualName = virtualName;
}
public boolean isPublic() {
return isPublic;
}
......@@ -153,4 +163,12 @@ public class FileInfo {
public void setProvideViews(List<String> provideViews) {
this.provideViews = provideViews;
}
public Integer getLocationId() {
return locationId;
}
public void setLocationId(Integer locationId) {
this.locationId = locationId;
}
}
/*
* This file is part of vospace-file-service
* Copyright (C) 2021 Istituto Nazionale di Astrofisica
* SPDX-License-Identifier: GPL-3.0-or-later
*/
package it.inaf.ia2.transfer.persistence.model;
public class JobException extends RuntimeException {
public static enum Type {
TRANSIENT("transient"),
FATAL("fatal");
private final String value;
private Type(String v) {
value = v;
}
public String value() {
return value;
}
}
private final Type type;
private String errorMessage;
private String errorDetail;
public JobException(Type type) {
this.type = type;
}
public Type getType() {
return type;
}
public String getErrorMessage() {
return errorMessage;
}
public JobException setErrorMessage(String errorMessage) {
this.errorMessage = errorMessage;
return this;
}
public String getErrorDetail() {
return errorDetail;
}
public JobException setErrorDetail(String errorDetail) {
this.errorDetail = errorDetail;
return this;
}
}
......@@ -12,15 +12,12 @@ public class ArchiveJob {
public static enum Type {
TAR,
TGZ,
ZIP;
public String getExtension() {
switch (this) {
case TAR:
return "tar";
case TGZ:
return "tar.gz";
case ZIP:
return "zip";
default:
......
......@@ -5,44 +5,70 @@
*/
package it.inaf.ia2.transfer.service;
import it.inaf.ia2.transfer.auth.TokenPrincipal;
import it.inaf.ia2.transfer.persistence.FileDAO;
import it.inaf.ia2.transfer.persistence.JobDAO;
import it.inaf.ia2.transfer.persistence.LocationDAO;
import it.inaf.ia2.transfer.persistence.model.FileInfo;
import java.io.BufferedInputStream;
import it.inaf.ia2.transfer.persistence.model.JobException;
import it.inaf.ia2.transfer.persistence.model.JobException.Type;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.security.Principal;
import java.util.List;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import javax.annotation.PostConstruct;
import net.ivoa.xml.uws.v1.ExecutionPhase;
import org.kamranzafar.jtar.TarEntry;
import org.kamranzafar.jtar.TarOutputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.stereotype.Service;
import org.springframework.util.FileSystemUtils;
import org.springframework.web.client.RestTemplate;
@Service
public class ArchiveService {
private static final Logger LOG = LoggerFactory.getLogger(ArchiveService.class);
private final static int BUFFER_SIZE = 100 * 1024;
@Autowired
private FileDAO fileDAO;
@Autowired
private LocationDAO locationDAO;
@Autowired
private JobDAO jobDAO;
@Autowired
private AuthorizationService authorizationService;
private final File generatedDir;
@Autowired
private RestTemplate restTemplate;
public ArchiveService(@Value("${generated.dir}") String generatedDir) {
this.generatedDir = new File(generatedDir);
@Value("${upload_location_id}")
private int uploadLocationId;
@Value("${generated.dir}")
private String generatedDirString;
private File generatedDir;
@PostConstruct
public void init() {
this.generatedDir = new File(generatedDirString);
if (!this.generatedDir.exists()) {
if (!this.generatedDir.mkdirs()) {
throw new IllegalStateException("Unable to create directory " + this.generatedDir.getAbsolutePath());
......@@ -50,62 +76,87 @@ public class ArchiveService {
}
}
public void createArchive(ArchiveJob job) {
public <O extends OutputStream, E> void createArchive(ArchiveJob job) {
jobDAO.updateJobPhase(ExecutionPhase.EXECUTING, job.getJobId());
LOG.trace("Started archive job " + job.getJobId());
try {
// TODO: check total size limit
// TODO: switch on archive type
File parentDir = getArchiveParentDir(job.getPrincipal());
if (!parentDir.exists()) {
if (!parentDir.mkdirs()) {
throw new IllegalStateException("Unable to create directory " + parentDir.getAbsolutePath());
}
}
File archiveFile = parentDir.toPath().resolve(job.getJobId() + "." + job.getType().getExtension()).toFile();
if (!archiveFile.createNewFile()) {
throw new IllegalStateException("Unable to create file " + archiveFile.getAbsolutePath());
}
File archiveFile = getArchiveFile(job);
String commonParent = getCommonParent(job.getVosPaths());
// support directory used to generate folder inside tar files (path is redefined each time by TarEntry class)
File supportDir = Files.createTempDirectory("dir").toFile();
try ( TarOutputStream tos = new TarOutputStream(
new BufferedOutputStream(new FileOutputStream(archiveFile)))) {
// it will be initialized only when necessary
Map<Integer, String> portalLocationUrls = null;
try ( ArchiveHandler<O, E> handler = getArchiveHandler(archiveFile, job.getType())) {
for (FileInfo fileInfo : fileDAO.getArchiveFileInfos(job.getVosPaths())) {
String relPath = fileInfo.getVirtualPath().substring(commonParent.length());
if (fileInfo.isDirectory()) {
tos.putNextEntry(new TarEntry(supportDir, relPath));
handler.putNextEntry(supportDir, relPath);
continue;
}
// TODO: handle different locations
if (!authorizationService.isDownloadable(fileInfo, job.getPrincipal())) {
// TODO: proper exception type
throw new RuntimeException("Unauthorized");
if (fileInfo.getLocationId() != null && fileInfo.getLocationId() != uploadLocationId) {
// remote file
if (portalLocationUrls == null) {
portalLocationUrls = locationDAO.getPortalLocationUrls();
}
String url = portalLocationUrls.get(fileInfo.getLocationId());
downloadFileIntoArchive(fileInfo, relPath, job.getPrincipal(), handler, url);
} else {
// local file or virtual directory
writeFileIntoArchive(fileInfo, relPath, job.getPrincipal(), handler);
}
File file = new File(fileInfo.getOsPath());
LOG.trace("Adding file " + file.getAbsolutePath() + " to tar archive");
writeFileIntoTarArchive(file, relPath, tos);
}
} finally {
FileSystemUtils.deleteRecursively(supportDir);
}
// TODO: update job status
jobDAO.updateJobPhase(ExecutionPhase.COMPLETED, job.getJobId());
} catch (Throwable t) {
LOG.error("Error happened creating archive", t);
JobException jobException;
if (t instanceof JobException) {
jobException = (JobException) t;
} else {
LOG.error("Unexpected error happened creating archive", t);
jobException = new JobException(Type.FATAL).setErrorMessage("Internal Fault")
.setErrorDetail("InternalFault: Unexpected error happened creating archive");
}
jobDAO.setJobError(job.getJobId(), jobException);
}
}
private File getArchiveFile(ArchiveJob job) throws IOException {
File parentDir = getArchiveParentDir(job.getPrincipal());
if (!parentDir.exists()) {
if (!parentDir.mkdirs()) {
LOG.error("Unable to create directory " + parentDir.getAbsolutePath());
throw new JobException(Type.FATAL).setErrorMessage("Internal Fault")
.setErrorDetail("InternalFault: Unable to create temporary directory for job");
}
}
File archiveFile = parentDir.toPath().resolve(job.getJobId() + "." + job.getType().getExtension()).toFile();
if (!archiveFile.createNewFile()) {
LOG.error("Unable to create file " + archiveFile.getAbsolutePath());
throw new JobException(Type.FATAL).setErrorMessage("Internal Fault")
.setErrorDetail("InternalFault: Unable to create archive file");
}
return archiveFile;
}
public File getArchiveParentDir(Principal principal) {
return generatedDir.toPath().resolve(principal.getName()).toFile();
}
......@@ -131,21 +182,125 @@ public class ArchiveService {
return commonParent;
}
private void writeFileIntoTarArchive(File file, String path, TarOutputStream tos) throws IOException {
TarEntry tarEntry = new TarEntry(file, path);
private static abstract class ArchiveHandler<O extends OutputStream, E> implements AutoCloseable {
try ( InputStream is = new FileInputStream(file)) {
tos.putNextEntry(tarEntry);
try ( BufferedInputStream origin = new BufferedInputStream(is)) {
int count;
byte data[] = new byte[BUFFER_SIZE];
private final O os;
ArchiveHandler(O os) {
this.os = os;
}
public abstract E getEntry(File file, String path);
public abstract void putNextEntry(E entry) throws IOException;
public void putNextEntry(File file, String path) throws IOException {
putNextEntry(getEntry(file, path));
}
public final O getOutputStream() {
return os;
}
@Override
public void close() throws Exception {
os.close();
}
}
private class TarArchiveHandler extends ArchiveHandler<TarOutputStream, TarEntry> {
TarArchiveHandler(File archiveFile) throws IOException {
super(new TarOutputStream(new BufferedOutputStream(new FileOutputStream(archiveFile))));
}
@Override
public TarEntry getEntry(File file, String path) {
return new TarEntry(file, path);
}
@Override
public void putNextEntry(TarEntry entry) throws IOException {
getOutputStream().putNextEntry(entry);
}
}
private class ZipArchiveHandler extends ArchiveHandler<ZipOutputStream, ZipEntry> {
ZipArchiveHandler(File archiveFile) throws IOException {
super(new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(archiveFile))));
}
@Override
public ZipEntry getEntry(File file, String path) {
if (file.isDirectory()) {
// ZipEntry assumes that paths ending with / are folders
path += "/";
}
return new ZipEntry(path);
}
@Override
public void putNextEntry(ZipEntry entry) throws IOException {
getOutputStream().putNextEntry(entry);
}
}
private ArchiveHandler getArchiveHandler(File archiveFile, ArchiveJob.Type type) throws IOException {
switch (type) {
case TAR:
return new TarArchiveHandler(archiveFile);
case ZIP:
return new ZipArchiveHandler(archiveFile);
default:
throw new UnsupportedOperationException("Type " + type + " not supported yet");
}
}
private <O extends OutputStream, E> void downloadFileIntoArchive(FileInfo fileInfo, String relPath, TokenPrincipal tokenPrincipal, ArchiveHandler<O, E> handler, String baseUrl) {
if (baseUrl == null) {
LOG.error("Location URL not found for location " + fileInfo.getLocationId());
throw new JobException(Type.FATAL).setErrorMessage("Internal Fault")
.setErrorDetail("InternalFault: Unable to retrieve location of file " + fileInfo.getVirtualPath());
}
while ((count = origin.read(data)) != -1) {
tos.write(data, 0, count);
String url = baseUrl + "/" + fileInfo.getVirtualName();
LOG.trace("Downloading file from " + url);
restTemplate.execute(url, HttpMethod.GET, req -> {
HttpHeaders headers = req.getHeaders();
if (tokenPrincipal.getToken() != null) {
headers.setBearerAuth(tokenPrincipal.getToken());
}
}, res -> {
File tmpFile = Files.createTempFile("download", null).toFile();
try ( FileOutputStream os = new FileOutputStream(tmpFile)) {
res.getBody().transferTo(os);
handler.putNextEntry(tmpFile, relPath);
try ( FileInputStream is = new FileInputStream(tmpFile)) {
is.transferTo(handler.getOutputStream());
}
} finally {
tmpFile.delete();
}
return null;
}, new Object[]{});
}
tos.flush();
private <O extends OutputStream, E> void writeFileIntoArchive(FileInfo fileInfo, String relPath, TokenPrincipal tokenPrincipal, ArchiveHandler<O, E> handler) throws IOException {
if (!authorizationService.isDownloadable(fileInfo, tokenPrincipal)) {
throw new JobException(Type.FATAL).setErrorMessage("Permission Denied")
.setErrorDetail("PermissionDenied: " + fileInfo.getVirtualPath());
}
File file = new File(fileInfo.getOsPath());
LOG.trace("Adding file " + file.getAbsolutePath() + " to tar archive");
try ( InputStream is = new FileInputStream(file)) {
handler.putNextEntry(file, relPath);
is.transferTo(handler.getOutputStream());
}
}
}
......@@ -47,17 +47,23 @@ public class FileDAOTest {
assertEquals("/home/username1/retrieve/file1.txt", fileInfo.getOsPath());
}
@Test
public void testGetUnexistingFile() {
assertTrue(dao.getFileInfo("/doesnt/exist").isEmpty());
}
@Test
public void testGetArchiveFileInfos() {
List<FileInfo> fileInfos = dao.getArchiveFileInfos(Arrays.asList("/public/file1", "/public/file2", "/public/subdir1"));
assertEquals(5, fileInfos.size());
assertEquals(6, fileInfos.size());
assertEquals("/home/vospace/upload/user1/file1", fileInfos.get(0).getOsPath());
assertEquals("/home/vospace/upload/user1/file2", fileInfos.get(1).getOsPath());
assertTrue(fileInfos.get(2).isDirectory());
assertEquals("/home/username1/retrieve/subdir1/file3", fileInfos.get(3).getOsPath());
assertEquals("/home/username1/retrieve/subdir1/file4", fileInfos.get(4).getOsPath());
assertEquals("portal-file", fileInfos.get(5).getVirtualName());
}
}
......@@ -5,6 +5,8 @@
*/
package it.inaf.ia2.transfer.persistence;
import it.inaf.ia2.transfer.persistence.model.JobException;
import it.inaf.ia2.transfer.persistence.model.JobException.Type;
import javax.sql.DataSource;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.extension.ExtendWith;
......@@ -34,8 +36,7 @@ public class JobDAOTest {
}
@Test
public void testNodeDAO()
{
public void testJobPhase() {
assertTrue(dao.isJobExisting("pippo5"));
assertFalse(dao.isJobExisting("pippo22"));
......@@ -46,6 +47,17 @@ public class JobDAOTest {
assertEquals(ExecutionPhase.COMPLETED, phase);
}
@Test
public void testSetJobError() {
assertEquals(ExecutionPhase.QUEUED, dao.getJobPhase("pippo3"));
JobException jobError = new JobException(Type.FATAL)
.setErrorMessage("Error message")
.setErrorDetail("Error detail");
dao.setJobError("pippo3", jobError);
assertEquals(ExecutionPhase.ERROR, dao.getJobPhase("pippo3"));
}
}
/*
* This file is part of vospace-file-service
* Copyright (C) 2021 Istituto Nazionale di Astrofisica
* SPDX-License-Identifier: GPL-3.0-or-later
*/
package it.inaf.ia2.transfer.persistence;
import java.util.Map;
import javax.sql.DataSource;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit.jupiter.SpringExtension;
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = {DataSourceConfig.class})
@TestPropertySource(locations = "classpath:test.properties")
public class LocationDAOTest {
@Autowired
private DataSource dataSource;
private LocationDAO dao;
@BeforeEach
public void init() {
dao = new LocationDAO(dataSource);
}
@Test
public void testGetPortalLocationUrls() {
Map<Integer, String> map = dao.getPortalLocationUrls();
assertEquals(1, map.size());
assertEquals("http://archive.lbto.org/files/lbt", map.get(4));
}
}
......@@ -7,13 +7,24 @@ package it.inaf.ia2.transfer.service;
import it.inaf.ia2.transfer.auth.TokenPrincipal;
import it.inaf.ia2.transfer.persistence.FileDAO;
import it.inaf.ia2.transfer.persistence.JobDAO;
import it.inaf.ia2.transfer.persistence.LocationDAO;
import it.inaf.ia2.transfer.persistence.model.FileInfo;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import net.ivoa.xml.uws.v1.ExecutionPhase;
import org.junit.jupiter.api.AfterAll;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
......@@ -25,21 +36,42 @@ import org.junit.jupiter.api.extension.ExtendWith;
import org.kamranzafar.jtar.TarEntry;
import org.kamranzafar.jtar.TarInputStream;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.http.HttpMethod;
import org.springframework.http.client.ClientHttpResponse;
import org.springframework.test.util.ReflectionTestUtils;
import org.springframework.util.FileSystemUtils;
import org.springframework.web.client.RequestCallback;
import org.springframework.web.client.ResponseExtractor;
import org.springframework.web.client.RestTemplate;
@ExtendWith(MockitoExtension.class)
public class ArchiveServiceTest {
@Mock
private JobDAO jobDAO;
@Mock
private FileDAO fileDAO;
@Mock
private LocationDAO locationDAO;
@Mock
private RestTemplate restTemplate;
@Mock
private AuthorizationService authorizationService;
@InjectMocks
private ArchiveService archiveService;
private static File tmpDir;
......@@ -56,14 +88,72 @@ public class ArchiveServiceTest {
@BeforeEach
public void setUp() {
archiveService = new ArchiveService(tmpDir.getAbsolutePath());
ReflectionTestUtils.setField(archiveService, "fileDAO", fileDAO);
ReflectionTestUtils.setField(archiveService, "authorizationService", authorizationService);
ReflectionTestUtils.setField(archiveService, "generatedDir", tmpDir);
}
@Test
public void testTarGeneration() throws Exception {
testArchiveGeneration(ArchiveJob.Type.TAR, "tar", is -> new TestArchiveHandler<TarInputStream, TarEntry>(new TarInputStream(is)) {
@Override
TarEntry getNextEntry() throws IOException {
return getInputStream().getNextEntry();
}
@Override
String getName(TarEntry entry) {
return entry.getName();
}
@Override
boolean isDirectory(TarEntry entry) {
return entry.isDirectory();
}
});
}
@Test
public void testZipGeneration() throws Exception {
testArchiveGeneration(ArchiveJob.Type.ZIP, "zip", is -> new TestArchiveHandler<ZipInputStream, ZipEntry>(new ZipInputStream(is)) {
@Override
ZipEntry getNextEntry() throws IOException {
return getInputStream().getNextEntry();
}
@Override
String getName(ZipEntry entry) {
return entry.getName();
}
@Override
boolean isDirectory(ZipEntry entry) {
return entry.isDirectory();
}
});
}
private static abstract class TestArchiveHandler<I extends InputStream, E> {
private final I is;
TestArchiveHandler(I is) {
this.is = is;
}
I getInputStream() {
return is;
}
abstract E getNextEntry() throws IOException;
abstract String getName(E entry);
abstract boolean isDirectory(E entry);
}
private <I extends InputStream, E> void testArchiveGeneration(ArchiveJob.Type type, String extension, Function<FileInputStream, TestArchiveHandler<I, E>> testArchiveGetter) throws Exception {
String parent = "/path/to";
File tmpParent = tmpDir.toPath().resolve("test1").toFile();
......@@ -73,11 +163,12 @@ public class ArchiveServiceTest {
File file4 = createFile(tmpParent, "dir2/c/file4");
File file5 = createFile(tmpParent, "dir2/c/d/file5");
File file6 = createFile(tmpParent, "file6");
File file7 = createFile(tmpParent, "portal-file");
ArchiveJob job = new ArchiveJob();
job.setPrincipal(new TokenPrincipal("user123", "token123"));
job.setJobId("abcdef");
job.setType(ArchiveJob.Type.TAR);
job.setType(type);
job.setVosPaths(Arrays.asList(parent + "/dir1", parent + "/dir2", parent + "/file6"));
when(authorizationService.isDownloadable(any(), any())).thenReturn(true);
......@@ -95,48 +186,70 @@ public class ArchiveServiceTest {
addFileInfo(fileInfos, parent + "/dir2/c/file4", file4);
addDirInfo(fileInfos, parent + "/dir2/c/d");
addFileInfo(fileInfos, parent + "/dir2/c/d/file5", file5);
addFileInfo(fileInfos, parent + "/portal-file", file7).setLocationId(1);
when(fileDAO.getArchiveFileInfos(any())).thenReturn(fileInfos);
when(locationDAO.getPortalLocationUrls()).thenReturn(Map.of(1, "http://portal/base/url"));
doAnswer(invocation -> {
ResponseExtractor responseExtractor = invocation.getArgument(3);
ClientHttpResponse mockedResponse = mock(ClientHttpResponse.class);
when(mockedResponse.getBody()).thenReturn(new ByteArrayInputStream("some data".getBytes()));
responseExtractor.extractData(mockedResponse);
return null;
}).when(restTemplate).execute(eq("http://portal/base/url/portal-file"), eq(HttpMethod.GET),
any(RequestCallback.class), any(ResponseExtractor.class), any(Object[].class));
archiveService.createArchive(job);
File result = tmpDir.toPath().resolve("user123").resolve("abcdef.tar").toFile();
verify(jobDAO, times(1)).updateJobPhase(eq(ExecutionPhase.COMPLETED), eq(job.getJobId()));
File result = tmpDir.toPath().resolve("user123").resolve("abcdef." + extension).toFile();
assertTrue(result.exists());
// verify result structure
List<String> expectedSequence = Arrays.asList("file6", "dir1/", "dir1/a/", "dir1/a/b/",
"dir1/a/b/file1", "dir1/a/b/file2", "dir2/", "dir2/c/", "dir2/c/file3", "dir2/c/file4",
"dir2/c/d/", "dir2/c/d/file5");
"dir2/c/d/", "dir2/c/d/file5", "portal-file");
int i = 0;
try ( TarInputStream tis = new TarInputStream(new FileInputStream(result))) {
TarEntry entry;
while ((entry = tis.getNextEntry()) != null) {
TestArchiveHandler<I, E> testArchiveHandler = testArchiveGetter.apply(new FileInputStream(result));
try ( InputStream is = testArchiveHandler.getInputStream()) {
E entry;
while ((entry = testArchiveHandler.getNextEntry()) != null) {
assertFalse(i >= expectedSequence.size(), "Found more entries than in expected sequence");
assertEquals(expectedSequence.get(i), entry.getName());
if (!entry.isDirectory()) {
assertEquals("some data", new String(tis.readAllBytes()));
assertEquals(expectedSequence.get(i), testArchiveHandler.getName(entry));
if (!testArchiveHandler.isDirectory(entry)) {
assertEquals("some data", new String(is.readAllBytes()));
}
i++;
}
} catch (IOException ex) {
throw new UncheckedIOException(ex);
}
assertFalse(i < expectedSequence.size(), "Found less entries than in expected sequence");
}
private void addFileInfo(List<FileInfo> fileInfos, String vosPath, File file) {
private FileInfo addFileInfo(List<FileInfo> fileInfos, String vosPath, File file) {
FileInfo fileInfo = new FileInfo();
fileInfo.setOsPath(file.getAbsolutePath());
fileInfo.setVirtualPath(vosPath);
fileInfo.setVirtualName(vosPath.substring(vosPath.lastIndexOf("/") + 1));
fileInfos.add(fileInfo);
return fileInfo;
}
private void addDirInfo(List<FileInfo> fileInfos, String vosPath) {
private FileInfo addDirInfo(List<FileInfo> fileInfos, String vosPath) {
FileInfo fileInfo = new FileInfo();
fileInfo.setVirtualPath(vosPath);
fileInfo.setDirectory(true);
fileInfos.add(fileInfo);
return fileInfo;
}
private File createFile(File parent, String path) throws Exception {
......
......@@ -28,7 +28,8 @@ INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, loc
('5', '', 'file2', 'data', 'user1', 3, true),
('5', '', 'subdir1', 'container', 'user1', NULL, true),
('5.8', '8', 'file3', 'data', 'user1', 1, true),
('5.8', '8', 'file4', 'data', 'user1', 1, true);
('5.8', '8', 'file4', 'data', 'user1', 1, true),
('5.8', '8', 'portal-file', 'data', 'user1', 4, true);
DELETE FROM job;
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment