package com.perforce.spark;
import java.io.File;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.FileUploadException;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
import org.apache.commons.io.FilenameUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import spark.Request;
public class MavenAsset extends AbstractAsset {
private static Logger logger = LoggerFactory.getLogger(MavenAsset.class);
private String assetDepot;
private String repositoryID;
private String groupID;
private String assetID;
private String version;
private List<FileItem> fileItems = new ArrayList<>();
public MavenAsset(Request request) throws FileUploadException {
DiskFileItemFactory factory = new DiskFileItemFactory();
ServletFileUpload up = new ServletFileUpload(factory);
List<FileItem> items = up.parseRequest(request.raw());
for (FileItem item : items) {
if (item.isFormField()) {
String fieldname = item.getFieldName();
String fieldvalue = item.getString();
switch (fieldname) {
case "assetDepot":
assetDepot = fieldvalue;
break;
case "repositoryID":
repositoryID = fieldvalue;
break;
case "groupID":
groupID = fieldvalue;
break;
case "assetID":
assetID = fieldvalue;
break;
case "version":
version = fieldvalue;
break;
}
} else {
fileItems.add(item);
}
}
}
@Override
Path getPath() {
String groupPath = groupID.replace(".", "/");
String id[] = { repositoryID, groupPath, assetID, version };
Path depotPath = Paths.get(assetDepot, id);
return depotPath;
}
@Override
void populateWorkspace(File root) throws Exception {
for (FileItem item : fileItems) {
String fileName = FilenameUtils.getName(item.getName());
item.write(new File(root, fileName));
logger.info("uploading: " + fileName);
}
}
}