Skip to content
Snippets Groups Projects
Commit 29e892ef authored by nfcalabria's avatar nfcalabria
Browse files

Bugfixes from local copy testing

parent 02c3a41b
No related branches found
No related tags found
No related merge requests found
......@@ -17,6 +17,7 @@ import it.inaf.oats.vospace.exception.InvalidArgumentException;
import java.util.concurrent.CompletableFuture;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import it.inaf.ia2.transfer.auth.TokenPrincipal;
@RestController
public class CopyController extends AuthenticatedFileController {
......@@ -42,13 +43,15 @@ public class CopyController extends AuthenticatedFileController {
LOG.debug("copyFiles called from jobId {}", jobId);
TokenPrincipal principal = getPrincipal();
// need to make a completable future start
CompletableFuture.runAsync(() -> {
handleFileJob(() -> {
try {
copyService.copyFiles(copyRequest.getSourceRootVosPath(),
copyRequest.getDestinationRootVosPath(), copyRequest.getJobId(),
getPrincipal());
principal);
} finally {
// TODO: cleanup code to remove unpopulated nodes in case
// of failure?
......
......@@ -228,8 +228,8 @@ public class FileDAO {
+ "JOIN node p ON p.path @> n.path\n"
+ "LEFT JOIN location l ON l.location_id = n.location_id\n"
+ "LEFT JOIN storage s ON s.storage_id = l.storage_dest_id\n"
+ "WHERE (p.node_id = id_from_vos_path(?) AND n.job_id = ?)"
+ "\nORDER BY vos_path ASC";
+ "WHERE (p.node_id = id_from_vos_path(?) AND n.job_id = ?)\n"
+ "ORDER BY vos_path ASC\n";
return jdbcTemplate.query(conn -> {
PreparedStatement ps = conn.prepareStatement(sql);
......@@ -248,16 +248,26 @@ public class FileDAO {
public void setBranchLocationId(String rootVosPath, String jobId, int locationId) {
// TODO: validate rootVosPath as a vos_path
String sql = "UPDATE node n SET\n"
+ "location_id = ?\n"
+ "JOIN node p ON n.path <@ p.path\n"
+ "WHERE (path ~ ('*.' || id_from_vos_path(?))::lquery AND n.job_id = ?)";
String cte = "SELECT n.node_id AS id\n"
+ "FROM node n\n"
+ "JOIN node p ON p.path @> n.path\n"
+ "WHERE (p.node_id = id_from_vos_path(?) AND n.job_id = ?)\n";
String update = "UPDATE node\n"
+ " SET location_id = ?\n"
+ "FROM cte\n"
+ "WHERE node_id = cte.id\n";
String sql = "WITH cte AS (\n"
+ cte
+ ")\n"
+update;
jdbcTemplate.update(conn -> {
PreparedStatement ps = conn.prepareStatement(sql);
ps.setInt(1, locationId);
ps.setString(2, rootVosPath);
ps.setString(3, jobId);
ps.setString(1, rootVosPath);
ps.setString(2, jobId);
ps.setInt(3, locationId);
return ps;
});
}
......
......@@ -52,16 +52,22 @@ public class FileCopyService {
public void copyFiles(String sourceRootVosPath,
String destinationRootVosPath, String jobId, TokenPrincipal principal) {
LOG.trace("copyFiles called for source {}, destination {}, from jobId \"{}\"",
sourceRootVosPath, destinationRootVosPath, jobId);
// We use jobId to identify nodes created by the REST part of CopyNode
// We expect them to be locked
List<FileInfo> sources
= fileDAO.getBranchFileInfos(sourceRootVosPath, jobId);
LOG.debug("found {} sources", sources.size());
if (sources.isEmpty()) {
throw new NodeNotFoundException(sourceRootVosPath);
}
// Set location of destinations to this file service update location
// before retrieving file infos
fileDAO.setBranchLocationId(destinationRootVosPath, jobId, uploadLocationId);
......@@ -69,6 +75,8 @@ public class FileCopyService {
List<FileInfo> destinations
= fileDAO.getBranchFileInfos(destinationRootVosPath, jobId);
LOG.debug("found {} destinations", destinations.size());
if (destinations.isEmpty()) {
throw new NodeNotFoundException(destinationRootVosPath);
}
......@@ -97,6 +105,7 @@ public class FileCopyService {
Map<Integer, String> portalLocationUrls = null;
for (FileInfo destinationFileInfo : destinationFileInfos) {
LOG.trace("Processing {} destination", destinationFileInfo.getVirtualPath());
// Cycle on files only
if (!destinationFileInfo.isDirectory()) {
// Calculate source file vos path
......@@ -169,7 +178,7 @@ public class FileCopyService {
String url = baseUrl + "/" + sourceFileInfo.getVirtualName();
LOG.trace("Downloading file from " + url);
LOG.trace("Downloading file from {}", url);
restTemplate.execute(url, HttpMethod.GET, req -> {
HttpHeaders headers = req.getHeaders();
......@@ -198,8 +207,7 @@ public class FileCopyService {
}
File file = new File(sourceFileInfo.getOsPath());
LOG.trace("Copying file: " + file.getAbsolutePath() + " to: "
+ destinationFileInfo.getOsPath());
LOG.trace("Copying file: {} to {}",file.getAbsolutePath(), destinationFileInfo.getOsPath());
putFileService.copyLocalFile(sourceFileInfo, destinationFileInfo, remainingQuota);
......
......@@ -123,15 +123,17 @@ public class PutFileService {
// TODO: discuss if mismatches lead to taking actions
if (sourceFileInfo != null) {
if (!Objects.equals(sourceFileInfo.getContentLength(), fileSize)) {
LOG.warn("Destination file size mismatch with source");
LOG.warn("Destination file {} size mismatch with source", destinationFile.toPath().toString());
}
if (!sourceFileInfo.getContentType().equals(destinationFileInfo.getContentType())) {
LOG.warn("Destination file content type mismatch with source");
LOG.warn("Destination file {} content type mismatch with source {} {}", destinationFile.toPath().toString(),
destinationFileInfo.getContentType(), sourceFileInfo.getContentType());
}
if (!sourceFileInfo.getContentMd5().equals(destinationFileInfo.getContentMd5())) {
LOG.warn("Destination file md5 mismatch with source");
if (!sourceFileInfo.getContentMd5().equals(md5Checksum)) {
LOG.warn("Destination file {} md5 mismatch with source {} {}", destinationFile.toPath().toString(),
destinationFileInfo.getContentMd5(), sourceFileInfo.getContentMd5() );
}
}
......
......@@ -42,6 +42,26 @@ public class FileDAOTest {
ReflectionTestUtils.setField(dao, "uploadLocationId", uploadLocationId);
}
@Test
public void testGetBranchFileInfo() {
List<FileInfo> fi = dao.getBranchFileInfos("/test100", "pippo");
assertEquals(3, fi.size());
List<FileInfo> fi2 = dao.getBranchFileInfos("/test100/test1001.txt", "pippo");
assertEquals(1, fi2.size());
}
@Test
public void testSetBranchLocationId() {
dao.setBranchLocationId("/test100", "pippo", 3);
List<FileInfo> fi = dao.getBranchFileInfos("/test100", "pippo");
assertEquals(3, fi.size());
for(FileInfo f : fi) {
assertEquals(3, f.getLocationId());
}
}
@Test
public void testGetFileInfo() {
......
......@@ -37,6 +37,12 @@ INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, con
('12.13', NULL, 'file1', 'data', 'user1', 100000, 500000),
('12.13', NULL, 'file2', 'data', 'user1', 200000, 500000);
-- test data for get branch file info
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('', NULL, 'test100', 'container', 'user1', '{"group1","group2"}','{"group2"}', 'pippo'); -- /test100
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('16', '', 'test1001.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 'pippo'); -- /test100
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('16', '', 'test1002.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', 'pippo'); -- /test100
INSERT INTO node (parent_path, parent_relative_path, name, type, creator_id, group_read, group_write, job_id) VALUES ('16', '', 'test1003.txt', 'data', 'user1', '{"group1","group2"}','{"group2"}', NULL); -- /test100
DELETE FROM job;
INSERT INTO job (job_id, owner_id, job_type, phase, start_time, end_time, creation_time, job_info, results) VALUES ('pippo1', 'user1', 'pullFromVoSpace', 'ARCHIVED', NULL, NULL, '2011-06-22 19:10:25', NULL, NULL);
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment