getSampleTypes() {
+ return sampleTypes;
+ }
+}
diff --git a/src/main/java/life/qbic/model/download/AuthenticationException.java b/src/main/java/life/qbic/model/download/AuthenticationException.java
new file mode 100644
index 0000000..6738853
--- /dev/null
+++ b/src/main/java/life/qbic/model/download/AuthenticationException.java
@@ -0,0 +1,23 @@
+package life.qbic.model.download;
+
+/**
+ * Exception to indicate failed authentication against openBIS.
+ *
+ * This exception shall be thrown, when the returned session token of openBIS is empty, after the
+ * client tried to authenticate against the openBIS application server via its Java API.
+ */
+public class AuthenticationException extends RuntimeException {
+
+ AuthenticationException() {
+ super();
+ }
+
+ AuthenticationException(String msg) {
+ super(msg);
+ }
+
+ AuthenticationException(String msg, Throwable t) {
+ super(msg, t);
+ }
+
+}
diff --git a/src/main/java/life/qbic/model/download/ConnectionException.java b/src/main/java/life/qbic/model/download/ConnectionException.java
new file mode 100644
index 0000000..73bf9af
--- /dev/null
+++ b/src/main/java/life/qbic/model/download/ConnectionException.java
@@ -0,0 +1,20 @@
+package life.qbic.model.download;
+
+/**
+ * ConnectionException indicates issues when a client wants to connect with the openBIS API.
+ */
+public class ConnectionException extends RuntimeException {
+
+ ConnectionException() {
+ super();
+ }
+
+ ConnectionException(String msg) {
+ super(msg);
+ }
+
+ ConnectionException(String msg, Throwable t) {
+ super(msg, t);
+ }
+
+}
diff --git a/src/main/java/life/qbic/model/download/FileSystemWriter.java b/src/main/java/life/qbic/model/download/FileSystemWriter.java
new file mode 100644
index 0000000..59b6419
--- /dev/null
+++ b/src/main/java/life/qbic/model/download/FileSystemWriter.java
@@ -0,0 +1,53 @@
+package life.qbic.model.download;
+
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.nio.file.Path;
+import java.util.List;
+
+/**
+ * File system implementation of the ModelReporter interface.
+ *
+ * Provides methods to write the queried openBIS model to a file on the local file system.
+ *
+ * @author: Sven Fillinger, Andreas Friedrich
+ */
+public class FileSystemWriter implements SummaryWriter {
+
+ /**
+ * File that stores the summary report content for valid checksums.
+ */
+ final private File summaryFile;
+
+
+ /**
+ * FileSystemWriter constructor with the paths for the summary files.
+ *
+ * @param summaryFile The path where to write the summary
+ */
+ public FileSystemWriter(Path summaryFile) {
+ this.summaryFile = new File(summaryFile.toString());
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void reportSummary(List summary) throws IOException {
+ if (!summaryFile.exists()) {
+ summaryFile.createNewFile();
+ //file exists or could not be created
+ if (!summaryFile.exists()) {
+ throw new IOException("The file " + summaryFile.getAbsoluteFile() + " could not be created.");
+ }
+ }
+ BufferedWriter writer = new BufferedWriter(new FileWriter(summaryFile, true));
+ for(String line : summary) {
+ writer.append(line+"\n");
+ }
+ writer.close();
+ }
+
+}
diff --git a/src/main/java/life/qbic/model/download/OpenbisConnector.java b/src/main/java/life/qbic/model/download/OpenbisConnector.java
new file mode 100644
index 0000000..7d872eb
--- /dev/null
+++ b/src/main/java/life/qbic/model/download/OpenbisConnector.java
@@ -0,0 +1,729 @@
+package life.qbic.model.download;
+
+import ch.ethz.sis.openbis.generic.OpenBIS;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.common.search.SearchResult;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset.DataSet;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset.DataSetType;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset.fetchoptions.DataSetFetchOptions;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset.fetchoptions.DataSetTypeFetchOptions;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset.id.DataSetPermId;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset.search.DataSetSearchCriteria;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset.search.DataSetTypeSearchCriteria;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.dataset.update.DataSetUpdate;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.entitytype.EntityKind;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.entitytype.id.EntityTypePermId;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.experiment.Experiment;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.experiment.fetchoptions.ExperimentFetchOptions;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.experiment.id.ExperimentIdentifier;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.experiment.search.ExperimentSearchCriteria;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.experiment.update.ExperimentUpdate;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.Sample;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.SampleType;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.create.SampleCreation;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.fetchoptions.SampleFetchOptions;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.fetchoptions.SampleTypeFetchOptions;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.id.SampleIdentifier;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.search.SampleSearchCriteria;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.search.SampleTypeSearchCriteria;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.sample.update.SampleUpdate;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.space.Space;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.space.fetchoptions.SpaceFetchOptions;
+import ch.ethz.sis.openbis.generic.asapi.v3.dto.space.search.SpaceSearchCriteria;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.dataset.create.UploadedDataSetCreation;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.DataSetFile;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.download.DataSetFileDownload;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.download.DataSetFileDownloadOptions;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.download.DataSetFileDownloadReader;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.fetchoptions.DataSetFileFetchOptions;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.id.DataSetFilePermId;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.id.IDataSetFileId;
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.search.DataSetFileSearchCriteria;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+import life.qbic.model.DatasetWithProperties;
+import life.qbic.model.OpenbisExperimentWithDescendants;
+import life.qbic.model.SampleTypeConnection;
+import life.qbic.model.SampleTypesAndMaterials;
+import life.qbic.model.download.SEEKConnector.SeekStructurePostRegistrationInformation;
+import org.apache.commons.lang3.tuple.Pair;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+public class OpenbisConnector {
+
+ private static final Logger LOG = LogManager.getLogger(OpenbisConnector.class);
+ private final OpenBIS openBIS;
+
+ public static Pattern datasetCodePattern = Pattern.compile("[0-9]{17}-[0-9]+");
+ public final String EXPERIMENT_LINK_PROPERTY = "EXPERIMENT_NAME";
+ public final String SAMPLE_LINK_PROPERTY = "experimentLink";
+ public final String DATASET_LINK_PROPERTY = "experimentLink";
+
+ public OpenbisConnector(OpenBIS authentication) {
+ this.openBIS = authentication;
+ }
+
+ public List getSpaces() {
+ SpaceSearchCriteria criteria = new SpaceSearchCriteria();
+ SpaceFetchOptions options = new SpaceFetchOptions();
+ return openBIS.searchSpaces(criteria, options).getObjects()
+ .stream().map(Space::getCode).collect(Collectors.toList());
+ }
+
+ public DataSetPermId registerDatasetForExperiment(Path uploadPath, String experimentID,
+ String datasetType, List parentCodes) {
+ UploadedDataSetCreation creation = prepareDataSetCreation(uploadPath, datasetType, parentCodes);
+ creation.setExperimentId(new ExperimentIdentifier(experimentID));
+
+ try {
+ return openBIS.createUploadedDataSet(creation);
+ } catch (final Exception e) {
+ LOG.error(e.getMessage());
+ }
+ return null;
+ }
+
+ public DataSetPermId registerDatasetForSample(Path uploadPath, String sampleID,
+ String datasetType, List parentCodes) {
+ UploadedDataSetCreation creation = prepareDataSetCreation(uploadPath, datasetType, parentCodes);
+ creation.setSampleId(new SampleIdentifier(sampleID));
+
+ try {
+ return openBIS.createUploadedDataSet(creation);
+ } catch (final Exception e) {
+ LOG.error(e.getMessage());
+ }
+ return null;
+ }
+
+ private UploadedDataSetCreation prepareDataSetCreation(Path uploadPath, String datasetType,
+ List parentCodes) {
+ if(listDatasetTypes().stream().map(DataSetType::getCode).noneMatch(x -> x.equals(datasetType))) {
+ throw new RuntimeException("Dataset type " + datasetType +
+ " is not supported by this instance of openBIS.");
+ }
+ final String uploadId = openBIS.uploadFileWorkspaceDSS(uploadPath);
+
+ final UploadedDataSetCreation creation = new UploadedDataSetCreation();
+ creation.setUploadId(uploadId);
+ creation.setParentIds(parentCodes.stream().map(DataSetPermId::new).collect(
+ Collectors.toList()));
+ creation.setTypeId(new EntityTypePermId(datasetType, EntityKind.DATA_SET));
+ return creation;
+ }
+
+ private static void copyInputStreamToFile(InputStream inputStream, File file)
+ throws IOException {
+ try (FileOutputStream outputStream = new FileOutputStream(file, false)) {
+ int read;
+ byte[] bytes = new byte[8192];
+ while ((read = inputStream.read(bytes)) != -1) {
+ outputStream.write(bytes, 0, read);
+ }
+ }
+ }
+
+ public List listDatasetsOfExperiment(List spaces, String experiment) {
+ DataSetSearchCriteria criteria = new DataSetSearchCriteria();
+ criteria.withExperiment().withCode().thatEquals(experiment);
+ if (!spaces.isEmpty()) {
+ criteria.withAndOperator();
+ criteria.withExperiment().withProject().withSpace().withCodes().thatIn(spaces);
+ }
+ DataSetFetchOptions options = new DataSetFetchOptions();
+ options.withType();
+ options.withRegistrator();
+ options.withExperiment().withProject().withSpace();
+ return openBIS.searchDataSets(criteria, options).getObjects();
+ }
+
+ public List listDatasetsOfSample(List spaces, String sample) {
+ DataSetSearchCriteria criteria = new DataSetSearchCriteria();
+ criteria.withSample().withCode().thatEquals(sample);
+ if (!spaces.isEmpty()) {
+ criteria.withAndOperator();
+ criteria.withExperiment().withProject().withSpace().withCodes().thatIn(spaces);
+ }
+ DataSetFetchOptions options = new DataSetFetchOptions();
+ options.withType();
+ options.withRegistrator();
+ options.withExperiment().withProject().withSpace();
+ return openBIS.searchDataSets(criteria, options).getObjects();
+ }
+
+ public File downloadDataset(String targetPath, String datasetID, String filePath) {
+ DataSetFileDownloadOptions options = new DataSetFileDownloadOptions();
+ IDataSetFileId fileToDownload = new DataSetFilePermId(new DataSetPermId(datasetID),
+ filePath);
+
+ // Setting recursive flag to true will return both subfolders and files
+ options.setRecursive(true);
+
+ // Read the contents and print them out
+ InputStream stream = openBIS.downloadFiles(new ArrayList<>(List.of(fileToDownload)),
+ options);
+ DataSetFileDownloadReader reader = new DataSetFileDownloadReader(stream);
+ DataSetFileDownload file;
+ while ((file = reader.read()) != null) {
+ DataSetFile df = file.getDataSetFile();
+ String currentPath = df.getPath().replace("original", "");
+ if (df.isDirectory()) {
+ File newDir = new File(targetPath, currentPath);
+ if (!newDir.exists()) {
+ if(!newDir.mkdirs()) {
+ throw new RuntimeException("Could not create folders for downloaded dataset.");
+ }
+ }
+ } else {
+ File toWrite = new File(targetPath, currentPath);
+ try {
+ copyInputStreamToFile(file.getInputStream(), toWrite);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+ return new File(targetPath, filePath.replace("original/",""));
+ }
+
+ public InputStream streamDataset(String datasetCode, String filePath) {
+ DataSetFileDownloadOptions options = new DataSetFileDownloadOptions();
+ IDataSetFileId fileToDownload = new DataSetFilePermId(new DataSetPermId(datasetCode),
+ filePath);
+
+ // Setting recursive flag to true will return both subfolders and files
+ options.setRecursive(true);
+
+ // Read the contents and print them out
+ InputStream stream = openBIS.downloadFiles(new ArrayList<>(List.of(fileToDownload)),
+ options);
+
+ DataSetFileDownloadReader reader = new DataSetFileDownloadReader(stream);
+ return reader.read().getInputStream();
+ }
+
+ public Map queryFullSampleHierarchy(List spaces) {
+ Map hierarchy = new HashMap<>();
+ if (spaces.isEmpty()) {
+ spaces = getSpaces();
+ }
+ for (String space : spaces) {
+ SampleFetchOptions fetchType = new SampleFetchOptions();
+ fetchType.withType();
+ SampleFetchOptions withDescendants = new SampleFetchOptions();
+ withDescendants.withChildrenUsing(fetchType);
+ withDescendants.withType();
+ SampleSearchCriteria criteria = new SampleSearchCriteria();
+ criteria.withSpace().withCode().thatEquals(space.toUpperCase());
+ SearchResult result = openBIS.searchSamples(criteria, withDescendants);
+ for (Sample s : result.getObjects()) {
+ SampleType parentType = s.getType();
+ List children = s.getChildren();
+ if (children.isEmpty()) {
+ SampleTypeConnection leaf = new SampleTypeConnection(parentType);
+ if (hierarchy.containsKey(leaf)) {
+ int count = hierarchy.get(leaf) + 1;
+ hierarchy.put(leaf, count);
+ } else {
+ hierarchy.put(leaf, 1);
+ }
+ } else {
+ for (Sample c : children) {
+ SampleType childType = c.getType();
+ SampleTypeConnection connection = new SampleTypeConnection(parentType, childType);
+ if (hierarchy.containsKey(connection)) {
+ int count = hierarchy.get(connection) + 1;
+ hierarchy.put(connection, count);
+ } else {
+ hierarchy.put(connection, 1);
+ }
+ }
+ }
+ }
+ }
+ return hierarchy;
+ }
+
+ private Set getPropertiesFromSampleHierarchy(String propertyName, List samples,
+ Set foundProperties) {
+ if(samples.isEmpty()) {
+ return foundProperties;
+ }
+ for(Sample s : samples) {
+ if(s.getProperties().containsKey(propertyName)) {
+ foundProperties.add(s.getProperties().get(propertyName));
+ }
+ }
+ return getPropertiesFromSampleHierarchy(propertyName,
+ samples.stream().map(Sample::getParents).flatMap(List::stream).collect(Collectors.toList()),
+ foundProperties);
+ }
+
+ public Set findPropertiesInSampleHierarchy(String propertyName,
+ ExperimentIdentifier experimentId) {
+ return getPropertiesFromSampleHierarchy(propertyName,
+ getSamplesWithAncestorsOfExperiment(experimentId), new HashSet<>());
+ }
+
+ public Map> getExperimentsBySpace(List spaces) {
+ Map> result = new HashMap<>();
+ ExperimentFetchOptions options = new ExperimentFetchOptions();
+ options.withProject().withSpace();
+ ExperimentSearchCriteria criteria = new ExperimentSearchCriteria();
+ criteria.withProject().withSpace().withCodes().thatIn(spaces);
+ for (Experiment e : openBIS.searchExperiments(criteria, options).getObjects()) {
+ String space = e.getProject().getSpace().getCode();
+ if(result.containsKey(space)) {
+ result.get(space).add(e);
+ } else {
+ result.put(space, new ArrayList<>());
+ }
+ }
+ return result;
+ }
+
+ public Map> getSamplesBySpace(List spaces) {
+ Map> result = new HashMap<>();
+ SampleFetchOptions options = new SampleFetchOptions();
+ options.withSpace();
+ SampleSearchCriteria criteria = new SampleSearchCriteria();
+ criteria.withSpace().withCodes().thatIn(spaces);
+ for (Sample s : openBIS.searchSamples(criteria, options).getObjects()) {
+ String space = s.getSpace().getCode();
+ if(!result.containsKey(space)) {
+ result.put(space, new ArrayList<>());
+ }
+ result.get(space).add(s);
+ }
+ return result;
+ }
+
+ public Map>> getExperimentsByTypeAndSpace(List spaces) {
+ Map>> result = new HashMap<>();
+ ExperimentFetchOptions options = new ExperimentFetchOptions();
+ options.withProject().withSpace();
+ options.withType();
+
+ ExperimentSearchCriteria criteria = new ExperimentSearchCriteria();
+ criteria.withProject().withSpace().withCodes().thatIn(spaces);
+ for (Experiment exp : openBIS.searchExperiments(criteria, options).getObjects()) {
+ String space = exp.getProject().getSpace().getCode();
+ String type = exp.getType().getCode();
+ if(!result.containsKey(space)) {
+ Map> typeMap = new HashMap<>();
+ typeMap.put(type, new ArrayList<>(Arrays.asList(exp)));
+ result.put(space, typeMap);
+ } else {
+ Map> typeMap = result.get(space);
+ if(!typeMap.containsKey(type)) {
+ typeMap.put(type, new ArrayList<>());
+ }
+ typeMap.get(type).add(exp);
+ }
+ }
+ return result;
+ }
+
+ public Map>> getSamplesByTypeAndSpace(List spaces) {
+ Map>> result = new HashMap<>();
+ SampleFetchOptions options = new SampleFetchOptions();
+ options.withSpace();
+ options.withType();
+
+ SampleSearchCriteria criteria = new SampleSearchCriteria();
+ criteria.withSpace().withCodes().thatIn(spaces);
+ for (Sample s : openBIS.searchSamples(criteria, options).getObjects()) {
+ String space = s.getSpace().getCode();
+ String type = s.getType().getCode();
+ if(!result.containsKey(space)) {
+ Map> typeMap = new HashMap<>();
+ typeMap.put(type, new ArrayList<>(Arrays.asList(s)));
+ result.put(space, typeMap);
+ } else {
+ Map> typeMap = result.get(space);
+ if(!typeMap.containsKey(type)) {
+ typeMap.put(type, new ArrayList<>());
+ }
+ typeMap.get(type).add(s);
+ }
+ }
+ return result;
+ }
+
+ public Map>> getDatasetsByTypeAndSpace(List spaces) {
+ Map>> result = new HashMap<>();
+ DataSetFetchOptions options = new DataSetFetchOptions();
+ options.withSample().withSpace();
+ options.withExperiment().withProject().withSpace();
+ options.withType();
+ DataSetSearchCriteria criteria = new DataSetSearchCriteria();
+ criteria.withOrOperator();
+ criteria.withSample().withSpace().withCodes().thatIn(spaces);
+ criteria.withExperiment().withProject().withSpace().withCodes().thatIn(spaces);
+ for (DataSet d : openBIS.searchDataSets(criteria, options).getObjects()) {
+ String space = getSpaceFromSampleOrExperiment(d);
+ String type = d.getType().getCode();
+ if(!result.containsKey(space)) {
+ Map> typeMap = new HashMap<>();
+ typeMap.put(type, new ArrayList<>(Arrays.asList(d)));
+ result.put(space, typeMap);
+ } else {
+ Map> typeMap = result.get(space);
+ if(!typeMap.containsKey(type)) {
+ typeMap.put(type, new ArrayList<>());
+ }
+ typeMap.get(type).add(d);
+ }
+ }
+ return result;
+ }
+
+ private String getSpaceFromSampleOrExperiment(DataSet d) {
+ try {
+ if (d.getSample() != null) {
+ return d.getSample().getSpace().getCode();
+ }
+ if (d.getExperiment() != null) {
+ return d.getExperiment().getProject().getSpace().getCode();
+ }
+ } catch (NullPointerException e) {
+
+ }
+ System.out.println("Dataset " + d + "does not seem to be attached to a space");
+ return "NO SPACE";
+ }
+
+ private List getSamplesWithAncestorsOfExperiment(ExperimentIdentifier experimentId) {
+ int numberOfFetchedLevels = 10;
+ SampleFetchOptions previousLevel = null;
+ for(int i = 0; i < numberOfFetchedLevels; i++) {
+ SampleFetchOptions withAncestors = new SampleFetchOptions();
+ withAncestors.withProperties();
+ withAncestors.withType();
+ if (previousLevel != null) {
+ withAncestors.withParentsUsing(previousLevel);
+ }
+ previousLevel = withAncestors;
+ }
+
+ SampleSearchCriteria criteria = new SampleSearchCriteria();
+ criteria.withExperiment().withId().thatEquals(experimentId);
+
+ return openBIS.searchSamples(criteria, previousLevel).getObjects();
+ }
+
+ public List findDataSets(List codes) {
+ if (codes.isEmpty()) {
+ return new ArrayList<>();
+ }
+ DataSetSearchCriteria criteria = new DataSetSearchCriteria();
+ criteria.withCodes().thatIn(codes);
+ DataSetFetchOptions options = new DataSetFetchOptions();
+ options.withExperiment();
+ options.withType();
+ return openBIS.searchDataSets(criteria, options).getObjects();
+ }
+
+ public boolean datasetExists(String code) {
+ return !findDataSets(new ArrayList<>(Arrays.asList(code))).isEmpty();
+ }
+
+ public boolean experimentExists(String experimentID) {
+ ExperimentSearchCriteria criteria = new ExperimentSearchCriteria();
+ criteria.withIdentifier().thatEquals(experimentID);
+
+ return !openBIS.searchExperiments(criteria, new ExperimentFetchOptions()).getObjects()
+ .isEmpty();
+ }
+
+ public boolean sampleExists(String objectID) {
+ SampleSearchCriteria criteria = new SampleSearchCriteria();
+ criteria.withIdentifier().thatEquals(objectID);
+
+ return !openBIS.searchSamples(criteria, new SampleFetchOptions()).getObjects()
+ .isEmpty();
+ }
+
+ public OpenbisExperimentWithDescendants getExperimentWithDescendants(String experimentID) {
+ ExperimentSearchCriteria criteria = new ExperimentSearchCriteria();
+ criteria.withIdentifier().thatEquals(experimentID);
+
+ ExperimentFetchOptions fetchOptions = new ExperimentFetchOptions();
+ fetchOptions.withType();
+ fetchOptions.withProject();
+ fetchOptions.withProperties();
+ DataSetFetchOptions dataSetFetchOptions = new DataSetFetchOptions();
+ dataSetFetchOptions.withType();
+ dataSetFetchOptions.withRegistrator();
+ dataSetFetchOptions.withExperiment();
+ dataSetFetchOptions.withSample();
+ SampleFetchOptions sampleFetchOptions = new SampleFetchOptions();
+ sampleFetchOptions.withProperties();
+ sampleFetchOptions.withType().withPropertyAssignments().withPropertyType();
+ sampleFetchOptions.withDataSetsUsing(dataSetFetchOptions);
+ fetchOptions.withDataSetsUsing(dataSetFetchOptions);
+ fetchOptions.withSamplesUsing(sampleFetchOptions);
+
+ Experiment experiment = openBIS.searchExperiments(criteria, fetchOptions).getObjects().get(0);
+
+ Map> datasetCodeToFiles = new HashMap<>();
+ for(DataSet dataset : experiment.getDataSets()) {
+ datasetCodeToFiles.put(dataset.getPermId().getPermId(), getDatasetFiles(dataset));
+ }
+
+ return new OpenbisExperimentWithDescendants(experiment, experiment.getSamples(),
+ experiment.getDataSets()
+ .stream().map(DatasetWithProperties::new)
+ .collect(Collectors.toList()), datasetCodeToFiles);
+ }
+
+ public List getDatasetFiles(DataSet dataset) {
+ DataSetFileSearchCriteria criteria = new DataSetFileSearchCriteria();
+
+ DataSetSearchCriteria dataSetCriteria = criteria.withDataSet().withOrOperator();
+ dataSetCriteria.withCode().thatEquals(dataset.getCode());
+
+ SearchResult result = openBIS.searchFiles(criteria, new DataSetFileFetchOptions());
+
+ return result.getObjects();
+ }
+
+ public List listDatasetTypes() {
+ DataSetTypeSearchCriteria criteria = new DataSetTypeSearchCriteria();
+ DataSetTypeFetchOptions fetchOptions = new DataSetTypeFetchOptions();
+ fetchOptions.withPropertyAssignments().withPropertyType();
+ fetchOptions.withPropertyAssignments().withEntityType();
+ return openBIS.searchDataSetTypes(criteria, fetchOptions).getObjects();
+ }
+
+ public SampleTypesAndMaterials getSampleTypesWithMaterials() {
+ SampleTypeSearchCriteria criteria = new SampleTypeSearchCriteria();
+ SampleTypeFetchOptions typeOptions = new SampleTypeFetchOptions();
+ typeOptions.withPropertyAssignments().withPropertyType();
+ typeOptions.withPropertyAssignments().withEntityType();
+ Set sampleTypes = new HashSet<>();
+ Set sampleTypesAsMaterials = new HashSet<>();
+ for(SampleType type : openBIS.searchSampleTypes(criteria, typeOptions).getObjects()) {
+ /*
+ System.err.println("sample type: "+type.getCode());
+ for(PropertyAssignment assignment : type.getPropertyAssignments()) {
+ if (assignment.getPropertyType().getDataType().name().equals("SAMPLE")) {
+ System.err.println(assignment.getPropertyType().getLabel());
+ System.err.println(assignment.getPropertyType().getDataType().name());
+ System.err.println(assignment.getPropertyType().getCode());
+ }
+ }
+ */
+ if(type.getCode().startsWith("MATERIAL.")) {
+ sampleTypesAsMaterials.add(type);
+ } else {
+ sampleTypes.add(type);
+ }
+ }
+ return new SampleTypesAndMaterials(sampleTypes, sampleTypesAsMaterials);
+ }
+
+ public void createSeekLinks(SeekStructurePostRegistrationInformation postRegInformation) {
+ Optional> experimentInfo = postRegInformation.getExperimentIDWithEndpoint();
+ //TODO link sample type not implemented?
+ final String SAMPLE_TYPE = "EXTERNAL_LINK";
+
+ SampleTypeSearchCriteria criteria = new SampleTypeSearchCriteria();
+ criteria.withCode().thatEquals(SAMPLE_TYPE);
+ SampleTypeFetchOptions typeOptions = new SampleTypeFetchOptions();
+ typeOptions.withPropertyAssignments().withPropertyType();
+ typeOptions.withPropertyAssignments().withEntityType();
+ if(openBIS.searchSampleTypes(criteria, typeOptions).getObjects().isEmpty()) {
+ System.out.printf(
+ "This is where links would be put into openBIS, but EXTERNAL_LINK sample was "
+ + "not yet added to openBIS instance.%n");
+ return;
+ }
+
+ if(experimentInfo.isPresent()) {
+ ExperimentIdentifier id = new ExperimentIdentifier(experimentInfo.get().getLeft());
+ String endpoint = experimentInfo.get().getRight();
+ SampleCreation sample = createNewLinkSample(endpoint);
+ sample.setExperimentId(id);
+ openBIS.createSamples(Arrays.asList(sample));
+ }
+ Map sampleInfos = postRegInformation.getSampleIDsWithEndpoints();
+ for(String sampleID : sampleInfos.keySet()) {
+ SampleIdentifier id = new SampleIdentifier(sampleID);
+ String endpoint = sampleInfos.get(sampleID);
+ SampleCreation sample = createNewLinkSample(endpoint);
+ sample.setParentIds(Arrays.asList(id));
+ openBIS.createSamples(Arrays.asList(sample));
+ }
+ }
+
+ private SampleCreation createNewLinkSample(String endpoint) {
+ final String SAMPLE_TYPE = "EXTERNAL_LINK";
+ SampleCreation sample = new SampleCreation();
+ sample.setTypeId(new EntityTypePermId(SAMPLE_TYPE, EntityKind.SAMPLE));
+
+ Map properties = new HashMap<>();
+ properties.put("LINK_TYPE", "SEEK");
+ properties.put("URL", endpoint);
+
+ sample.setProperties(properties);
+ return sample;
+ }
+
+ public void updateSeekLinks(SeekStructurePostRegistrationInformation postRegistrationInformation) {
+ }
+
+ private void updateExperimentProperties(ExperimentIdentifier id, Map properties,
+ boolean overwrite) {
+ ExperimentUpdate update = new ExperimentUpdate();
+ update.setExperimentId(id);
+ if(overwrite) {
+ update.setProperties(properties);
+ } else {
+ ExperimentFetchOptions options = new ExperimentFetchOptions();
+ options.withProperties();
+ Experiment oldExp = openBIS.getExperiments(Arrays.asList(id), options).get(id);
+ for(String property : properties.keySet()) {
+ String newValue = properties.get(property);
+ String oldValue = oldExp.getProperty(property);
+ if(oldValue == null || oldValue.isEmpty() || oldValue.equals(newValue)) {
+ update.setProperty(property, newValue);
+ } else if(!newValue.isBlank()) {
+ update.setProperty(property, oldValue+", "+newValue);//TODO this can be changed to any other strategy
+ }
+ }
+ }
+ openBIS.updateExperiments(Arrays.asList(update));
+ }
+
+ private void updateSampleProperties(SampleIdentifier id, Map properties,
+ boolean overwrite) {
+ SampleUpdate update = new SampleUpdate();
+ update.setSampleId(id);
+ if(overwrite) {
+ update.setProperties(properties);
+ } else {
+ SampleFetchOptions options = new SampleFetchOptions();
+ options.withProperties();
+ Sample oldSample = openBIS.getSamples(Arrays.asList(id), options).get(id);
+ for(String property : properties.keySet()) {
+ String newValue = properties.get(property);
+ String oldValue = oldSample.getProperty(property);
+ if(oldValue == null || oldValue.isEmpty() || oldValue.equals(newValue)) {
+ update.setProperty(property, newValue);
+ } else {
+ update.setProperty(property, oldValue+", "+newValue);//TODO this can be changed to any other strategy
+ }
+ }
+ }
+ openBIS.updateSamples(Arrays.asList(update));
+ }
+
+ private void updateDatasetProperties(DataSetPermId id, Map properties,
+ boolean overwrite) {
+ DataSetUpdate update = new DataSetUpdate();
+ update.setDataSetId(id);
+ if (overwrite) {
+ update.setProperties(properties);
+ } else {
+ DataSetFetchOptions options = new DataSetFetchOptions();
+ options.withProperties();
+ DataSet oldDataset = openBIS.getDataSets(Arrays.asList(id), options).get(id);
+ for (String property : properties.keySet()) {
+ String newValue = properties.get(property);
+ String oldValue = oldDataset.getProperty(property);
+ if (oldValue == null || oldValue.isEmpty() || oldValue.equals(newValue)) {
+ update.setProperty(property, newValue);
+ } else {
+ update.setProperty(property,
+ oldValue + ", " + newValue);//TODO this can be changed to any other strategy
+ }
+ }
+ }
+ openBIS.updateDataSets(Arrays.asList(update));
+ }
+
+ public OpenbisExperimentWithDescendants getExperimentAndDataFromSample(String sampleID) {
+ SampleSearchCriteria criteria = new SampleSearchCriteria();
+ criteria.withIdentifier().thatEquals(sampleID);
+
+ DataSetFetchOptions dataSetFetchOptions = new DataSetFetchOptions();
+ dataSetFetchOptions.withType();
+ dataSetFetchOptions.withRegistrator();
+ dataSetFetchOptions.withExperiment();
+ dataSetFetchOptions.withSample();
+ SampleFetchOptions fetchOptions = new SampleFetchOptions();
+ fetchOptions.withProperties();
+ fetchOptions.withType().withPropertyAssignments().withPropertyType();
+ fetchOptions.withDataSetsUsing(dataSetFetchOptions);
+
+ ExperimentFetchOptions expFetchOptions = new ExperimentFetchOptions();
+ expFetchOptions.withType();
+ expFetchOptions.withProject();
+ expFetchOptions.withProperties();
+ fetchOptions.withExperimentUsing(expFetchOptions);
+
+ List samples = openBIS.searchSamples(criteria, fetchOptions).getObjects();
+ Sample sample = samples.get(0);
+
+ List datasets = new ArrayList<>();
+ Map> datasetCodeToFiles = new HashMap<>();
+ for (DataSet dataset : sample.getDataSets()) {
+ datasets.add(new DatasetWithProperties(dataset));
+ datasetCodeToFiles.put(dataset.getPermId().getPermId(), getDatasetFiles(dataset));
+ }
+ return new OpenbisExperimentWithDescendants(sample.getExperiment(), samples, datasets,
+ datasetCodeToFiles);
+ }
+
+ public OpenbisExperimentWithDescendants getExperimentStructureFromDataset(String datasetID) {
+ DataSetSearchCriteria criteria = new DataSetSearchCriteria();
+ criteria.withPermId().thatEquals(datasetID);
+
+ SampleFetchOptions sampleFetchOptions = new SampleFetchOptions();
+ sampleFetchOptions.withProperties();
+ sampleFetchOptions.withType().withPropertyAssignments().withPropertyType();
+
+ ExperimentFetchOptions expFetchOptions = new ExperimentFetchOptions();
+ expFetchOptions.withType();
+ expFetchOptions.withProject();
+ expFetchOptions.withProperties();
+
+ DataSetFetchOptions dataSetFetchOptions = new DataSetFetchOptions();
+ dataSetFetchOptions.withType();
+ dataSetFetchOptions.withRegistrator();
+ dataSetFetchOptions.withSampleUsing(sampleFetchOptions);
+ dataSetFetchOptions.withExperimentUsing(expFetchOptions);
+
+ DataSet dataset = openBIS.searchDataSets(criteria, dataSetFetchOptions).getObjects().get(0);
+
+ List samples = new ArrayList<>();
+ if(dataset.getSample() != null) {
+ samples.add(dataset.getSample());
+ }
+
+ List datasets = new ArrayList<>();
+ Map> datasetCodeToFiles = new HashMap<>();
+ datasets.add(new DatasetWithProperties(dataset));
+ datasetCodeToFiles.put(dataset.getPermId().getPermId(), getDatasetFiles(dataset));
+
+ if(dataset.getExperiment() == null) {
+ System.err.println("No experiment found for dataset "+datasetID);
+ }
+ return new OpenbisExperimentWithDescendants(dataset.getExperiment(), samples, datasets,
+ datasetCodeToFiles);
+ }
+}
diff --git a/src/main/java/life/qbic/model/download/OutputPathFinder.java b/src/main/java/life/qbic/model/download/OutputPathFinder.java
new file mode 100644
index 0000000..2dbb977
--- /dev/null
+++ b/src/main/java/life/qbic/model/download/OutputPathFinder.java
@@ -0,0 +1,83 @@
+package life.qbic.model.download;
+
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.DataSetFile;
+import java.io.File;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+
+/**
+ * Methods to determine the final path for the output directory.
+ * The requested data will be downloaded into this directory.
+ */
+public class OutputPathFinder {
+
+ private static final Logger LOG = LogManager.getLogger(OutputPathFinder.class);
+
+ /**
+ * @param path to be shortened
+ * @return path that has no parents (top directory)
+ */
+ private static Path getTopDirectory(Path path) {
+ Path currentPath = Paths.get(path.toString());
+ Path parentPath;
+ while (currentPath.getParent() != null) {
+ parentPath = currentPath.getParent();
+ currentPath = parentPath;
+ }
+ return currentPath;
+ }
+
+ /**
+ * @param possiblePath: string that could be an existing Path to a directory
+ * @return true if path exists, false otherwise
+ */
+ private static boolean isPathValid(String possiblePath){
+ Path path = Paths.get(possiblePath);
+ return Files.isDirectory(path);
+ }
+
+ /**
+ * @param file to download
+ * @param conservePaths if true, directory structure will be conserved
+ * @return final path to file itself
+ */
+ private static Path determineFinalPathFromDataset(DataSetFile file, Boolean conservePaths ) {
+ Path finalPath;
+ if (conservePaths) {
+ finalPath = Paths.get(file.getPath());
+ // drop top parent directory name in the openBIS DSS (usually "/origin")
+ Path topDirectory = getTopDirectory(finalPath);
+ finalPath = topDirectory.relativize(finalPath);
+ } else {
+ finalPath = Paths.get(file.getPath()).getFileName();
+ }
+ return finalPath;
+ }
+
+ /**
+ * @param outputPath provided by user
+ * @param prefix sample code
+ * @param file to download
+ * @param conservePaths provided by user
+ * @return output directory path
+ */
+ public static Path determineOutputDirectory(String outputPath, Path prefix, DataSetFile file, boolean conservePaths){
+ Path filePath = determineFinalPathFromDataset(file, conservePaths);
+ String path = File.separator + prefix.toString() + File.separator + filePath.toString();
+ Path finalPath = Paths.get("");
+ if (outputPath != null && !outputPath.isEmpty()) {
+ if(isPathValid(outputPath)) {
+ finalPath = Paths.get(outputPath + path);
+ } else{
+ LOG.error("The path you provided does not exist.");
+ System.exit(1);
+ }
+ } else {
+ finalPath = Paths.get(System.getProperty("user.dir") + path);
+ }
+ return finalPath;
+ }
+}
diff --git a/src/main/java/life/qbic/model/download/SEEKConnector.java b/src/main/java/life/qbic/model/download/SEEKConnector.java
new file mode 100644
index 0000000..157fbf6
--- /dev/null
+++ b/src/main/java/life/qbic/model/download/SEEKConnector.java
@@ -0,0 +1,1026 @@
+package life.qbic.model.download;
+
+import ch.ethz.sis.openbis.generic.dssapi.v3.dto.datasetfile.DataSetFile;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.http.HttpClient;
+import java.net.http.HttpRequest;
+import java.net.http.HttpRequest.BodyPublishers;
+import java.net.http.HttpResponse;
+import java.net.http.HttpResponse.BodyHandlers;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import java.util.function.Supplier;
+import java.util.regex.Matcher;
+import javax.xml.parsers.ParserConfigurationException;
+import life.qbic.model.AssetInformation;
+import life.qbic.model.OpenbisSeekTranslator;
+import life.qbic.model.SampleInformation;
+import life.qbic.model.isa.SeekStructure;
+import life.qbic.model.isa.GenericSeekAsset;
+import life.qbic.model.isa.ISAAssay;
+import life.qbic.model.isa.ISASample;
+import life.qbic.model.isa.ISASampleType;
+import life.qbic.model.isa.ISAStudy;
+import org.apache.commons.lang3.tuple.ImmutablePair;
+import org.apache.commons.lang3.tuple.Pair;
+import org.apache.http.client.utils.URIBuilder;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.xml.sax.SAXException;
+
+public class SEEKConnector {
+
+ private static final Logger LOG = LogManager.getLogger(SEEKConnector.class);
+ private String apiURL;
+ private byte[] credentials;
+ private OpenbisSeekTranslator translator;
+ private final String DEFAULT_PROJECT_ID;
+ private String currentStudy;
+ private final List ASSET_TYPES = new ArrayList<>(Arrays.asList("data_files", "models",
+ "sops", "documents", "publications"));
+
+ public SEEKConnector(String seekURL, byte[] httpCredentials, String openBISBaseURL,
+ String defaultProjectTitle) throws URISyntaxException, IOException,
+ InterruptedException, ParserConfigurationException, SAXException {
+ this.apiURL = seekURL;
+ this.credentials = httpCredentials;
+ Optional projectID = getProjectWithTitle(defaultProjectTitle);
+ if (projectID.isEmpty()) {
+ throw new RuntimeException("Failed to find project with title: " + defaultProjectTitle + ". "
+ + "Please provide an existing default project.");
+ }
+ DEFAULT_PROJECT_ID = projectID.get();
+ translator = new OpenbisSeekTranslator(openBISBaseURL, DEFAULT_PROJECT_ID);
+ }
+
+ public void setDefaultInvestigation(String investigationTitle)
+ throws URISyntaxException, IOException, InterruptedException {
+ translator.setDefaultInvestigation(searchNodeWithTitle("investigations",
+ investigationTitle));
+ }
+
+ public void setDefaultStudy(String studyTitle)
+ throws URISyntaxException, IOException, InterruptedException {
+ this.currentStudy = searchNodeWithTitle("studies", studyTitle);
+ translator.setDefaultStudy(currentStudy);
+ }
+
+ /**
+ * Lists projects and returns the optional identifier of the one matching the provided ID.
+ * Necessary because project search does not seem to work.
+ * @param projectTitle the title to search for
+ * @return
+ */
+ private Optional getProjectWithTitle(String projectTitle)
+ throws IOException, InterruptedException, URISyntaxException {
+ String endpoint = apiURL+"/projects/";
+ HttpRequest request = HttpRequest.newBuilder()
+ .uri(new URI(endpoint))
+ .headers("Content-Type", "application/json")
+ .headers("Accept", "application/json")
+ .headers("Authorization", "Basic " + new String(credentials))
+ .GET().build();
+ HttpResponse response = HttpClient.newBuilder().build()
+ .send(request, BodyHandlers.ofString());
+ if(response.statusCode() == 200) {
+ JsonNode rootNode = new ObjectMapper().readTree(response.body());
+ JsonNode hits = rootNode.path("data");
+ for (Iterator it = hits.elements(); it.hasNext(); ) {
+ JsonNode hit = it.next();
+ String id = hit.get("id").asText();
+ String title = hit.get("attributes").get("title").asText();
+ if(title.equals(projectTitle)) {
+ return Optional.of(id);
+ }
+ }
+ } else {
+ throw new RuntimeException("Failed : HTTP error code : " + response.statusCode());
+ }
+ return Optional.empty();
+ }
+
+ public String addStudy(ISAStudy assay)
+ throws URISyntaxException, IOException, InterruptedException {
+ String endpoint = apiURL+"/studies";
+
+ HttpResponse response = HttpClient.newBuilder().build()
+ .send(buildAuthorizedPOSTRequest(endpoint, assay.toJson()),
+ BodyHandlers.ofString());
+
+ if(response.statusCode()!=200) {
+ throw new RuntimeException("Failed : HTTP error code : " + response.statusCode());
+ }
+ JsonNode rootNode = new ObjectMapper().readTree(response.body());
+ JsonNode idNode = rootNode.path("data").path("id");
+
+ return idNode.asText();
+ }
+
+ public String addAssay(ISAAssay assay)
+ throws URISyntaxException, IOException, InterruptedException {
+ String endpoint = apiURL+"/assays";
+
+ HttpResponse response = HttpClient.newBuilder().build()
+ .send(buildAuthorizedPOSTRequest(endpoint, assay.toJson()),
+ BodyHandlers.ofString());
+
+ if(response.statusCode()!=200) {
+ throw new RuntimeException("Failed : HTTP error code : " + response.statusCode());
+ }
+ JsonNode rootNode = new ObjectMapper().readTree(response.body());
+ JsonNode idNode = rootNode.path("data").path("id");
+
+ return idNode.asText();
+ }
+
+ public String createStudy(ISAStudy study)
+ throws IOException, URISyntaxException, InterruptedException, IOException {
+ String endpoint = apiURL+"/studies";
+
+ HttpResponse response = HttpClient.newBuilder().build()
+ .send(buildAuthorizedPOSTRequest(endpoint, study.toJson()),
+ BodyHandlers.ofString());
+
+ if(response.statusCode()!=200) {
+ throw new RuntimeException("Failed : HTTP error code : " + response.statusCode());
+ }
+ JsonNode rootNode = new ObjectMapper().readTree(response.body());
+ JsonNode idNode = rootNode.path("data").path("id");
+
+ return idNode.asText();
+ }
+
+ private HttpRequest buildAuthorizedPATCHRequest(String endpoint, String body) throws URISyntaxException {
+ return HttpRequest.newBuilder()
+ .uri(new URI(endpoint))
+ .headers("Content-Type", "application/json")
+ .headers("Accept", "application/json")
+ .headers("Authorization", "Basic " + new String(credentials))
+ .method("PATCH", HttpRequest.BodyPublishers.ofString(body)).build();
+ }
+
+ private HttpRequest buildAuthorizedPOSTRequest(String endpoint, String body) throws URISyntaxException {
+ return HttpRequest.newBuilder()
+ .uri(new URI(endpoint))
+ .headers("Content-Type", "application/json")
+ .headers("Accept", "application/json")
+ .headers("Authorization", "Basic " + new String(credentials))
+ .POST(HttpRequest.BodyPublishers.ofString(body)).build();
+ }
+
+ public boolean studyExists(String id) throws URISyntaxException, IOException, InterruptedException {
+ String endpoint = apiURL+"/studies/"+id;
+ HttpRequest request = HttpRequest.newBuilder()
+ .uri(new URI(endpoint))
+ .headers("Content-Type", "application/json")
+ .headers("Accept", "application/json")
+ .headers("Authorization", "Basic " + new String(credentials))
+ .GET().build();
+ HttpResponse response = HttpClient.newBuilder().build()
+ .send(request, BodyHandlers.ofString());
+ return response.statusCode() == 200;
+ }
+
+ public void printAttributeTypes() throws URISyntaxException, IOException, InterruptedException {
+ String endpoint = apiURL+"/sample_attribute_types";
+ HttpRequest request = HttpRequest.newBuilder()
+ .uri(new URI(endpoint))
+ .headers("Content-Type", "application/json")
+ .headers("Accept", "application/json")
+ .headers("Authorization", "Basic " + new String(credentials))
+ .GET().build();
+ HttpResponse response = HttpClient.newBuilder().build()
+ .send(request, BodyHandlers.ofString());
+ System.err.println(response.body());
+ }
+
+ /*
+-patient id should be linked somehow, maybe gender?
+ */
+
+ public void deleteSampleType(String id) throws URISyntaxException, IOException,
+ InterruptedException {
+ String endpoint = apiURL+"/sample_types";
+ URIBuilder builder = new URIBuilder(endpoint);
+ builder.setParameter("id", id);
+
+ HttpResponse response = HttpClient.newBuilder().build()
+ .send(HttpRequest.newBuilder().uri(builder.build())
+ .headers("Content-Type", "application/json")
+ .headers("Accept", "application/json")
+ .headers("Authorization", "Basic " + new String(credentials))
+ .DELETE().build(), BodyHandlers.ofString());
+
+ if(response.statusCode()!=201) {
+ System.err.println(response.body());
+ throw new RuntimeException("Failed : HTTP error code : " + response.statusCode());
+ }
+ }
+
+ public String createSampleType(ISASampleType sampleType)
+ throws URISyntaxException, IOException, InterruptedException {
+ String endpoint = apiURL+"/sample_types";
+
+ HttpResponse response = HttpClient.newBuilder().build()
+ .send(buildAuthorizedPOSTRequest(endpoint, sampleType.toJson()),
+ BodyHandlers.ofString());
+
+ if(response.statusCode()!=201) {
+ System.err.println(response.body());
+ throw new RuntimeException("Failed : HTTP error code : " + response.statusCode());
+ }
+ JsonNode rootNode = new ObjectMapper().readTree(response.body());
+ JsonNode idNode = rootNode.path("data").path("id");
+
+ return idNode.asText();
+ }
+
+ public String updateSample(ISASample isaSample, String sampleID) throws URISyntaxException, IOException,
+ InterruptedException {
+ String endpoint = apiURL+"/samples/"+sampleID;
+ isaSample.setSampleID(sampleID);
+
+ HttpResponse response = HttpClient.newBuilder().build()
+ .send(buildAuthorizedPATCHRequest(endpoint, isaSample.toJson()),
+ BodyHandlers.ofString());
+
+ if(response.statusCode()!=200) {
+ System.err.println(response.body());
+ throw new RuntimeException("Failed : HTTP error code : " + response.statusCode());
+ }
+ JsonNode rootNode = new ObjectMapper().readTree(response.body());
+ JsonNode idNode = rootNode.path("data").path("id");
+
+ return endpoint+"/"+idNode.asText();
+ }
+
+ public String createSample(ISASample isaSample) throws URISyntaxException, IOException,
+ InterruptedException {
+ String endpoint = apiURL+"/samples";
+
+ HttpResponse response = HttpClient.newBuilder().build()
+ .send(buildAuthorizedPOSTRequest(endpoint, isaSample.toJson()),
+ BodyHandlers.ofString());
+
+ if(response.statusCode()!=200) {
+ System.err.println(response.body());
+ throw new RuntimeException("Failed : HTTP error code : " + response.statusCode());
+ }
+ JsonNode rootNode = new ObjectMapper().readTree(response.body());
+ JsonNode idNode = rootNode.path("data").path("id");
+
+ return endpoint+"/"+idNode.asText();
+ }
+
+ private AssetToUpload createAsset(String datasetCode, GenericSeekAsset data)
+ throws IOException, URISyntaxException, InterruptedException {
+ String endpoint = apiURL+"/"+data.getType();
+
+ HttpResponse response = HttpClient.newBuilder().build()
+ .send(buildAuthorizedPOSTRequest(endpoint, data.toJson()),
+ BodyHandlers.ofString());
+
+ if(response.statusCode()!=201 && response.statusCode()!=200) {
+ System.err.println(response.body());
+ throw new RuntimeException("Failed : HTTP error code : " + response.statusCode());
+ }
+
+ JsonNode rootNode = new ObjectMapper().readTree(response.body());
+ JsonNode idNode = rootNode.path("data")
+ .path("attributes")
+ .path("content_blobs")
+ .path(0).path("link");
+ return new AssetToUpload(idNode.asText(), data.getFileName(), datasetCode, data.fileSizeInBytes());
+ }
+
+ public String uploadFileContent(String blobEndpoint, String file)
+ throws URISyntaxException, IOException, InterruptedException {
+
+ HttpRequest request = HttpRequest.newBuilder()
+ .uri(new URI(blobEndpoint))
+ .headers("Content-Type", "application/octet-stream")
+ .headers("Accept", "application/octet-stream")
+ .headers("Authorization", "Basic " + new String(credentials))
+ .PUT(BodyPublishers.ofFile(new File(file).toPath())).build();
+
+ HttpResponse response = HttpClient.newBuilder().build()
+ .send(request, BodyHandlers.ofString());
+
+ if(response.statusCode()!=200) {
+ System.err.println(response.body());
+ throw new RuntimeException("Failed : HTTP error code : " + response.statusCode());
+ }
+ return blobEndpointToAssetURL(blobEndpoint);
+ }
+
+ public String uploadStreamContent(String blobEndpoint,
+ Supplier streamSupplier)
+ throws URISyntaxException, IOException, InterruptedException {
+
+ HttpRequest request = HttpRequest.newBuilder()
+ .uri(new URI(blobEndpoint))
+ .headers("Content-Type", "application/octet-stream")
+ .headers("Accept", "*/*")
+ .headers("Authorization", "Basic " + new String(credentials))
+ .PUT(BodyPublishers.ofInputStream(streamSupplier)).build();
+
+ HttpResponse response = HttpClient.newBuilder().build()
+ .send(request, BodyHandlers.ofString());
+
+ System.err.println("response was: "+response);
+ System.err.println("response body: "+response.body());
+
+ if(response.statusCode()!=200) {
+ System.err.println(response.body());
+ throw new RuntimeException("Failed : HTTP error code : " + response.statusCode());
+ } else {
+ return blobEndpointToAssetURL(blobEndpoint);
+ }
+ }
+
+ private String blobEndpointToAssetURL(String blobEndpoint) {
+ return blobEndpoint.split("content_blobs")[0];
+ }
+
+ public boolean endPointExists(String endpoint)
+ throws URISyntaxException, IOException, InterruptedException {
+ HttpRequest request = HttpRequest.newBuilder()
+ .uri(new URI(endpoint))
+ .headers("Content-Type", "application/json")
+ .headers("Accept", "application/json")
+ .headers("Authorization", "Basic " + new String(credentials))
+ .GET().build();
+ HttpResponse response = HttpClient.newBuilder().build()
+ .send(request, BodyHandlers.ofString());
+ return response.statusCode() == 200;
+ }
+
+ /**
+ * Creates
+ * @param isaToOpenBISFile
+ * @param assays
+ * @return
+ * @throws IOException
+ * @throws URISyntaxException
+ * @throws InterruptedException
+ */
+ public List createAssetsForAssays(Map isaToOpenBISFile, List assays)
+ throws IOException, URISyntaxException, InterruptedException {
+ List result = new ArrayList<>();
+ for (GenericSeekAsset isaFile : isaToOpenBISFile.keySet()) {
+ if(!assays.isEmpty()) {
+ isaFile.withAssays(assays);
+ }
+ result.add(createAsset(isaToOpenBISFile.get(isaFile).getDataSetPermId().getPermId(),
+ isaFile));
+ }
+ return result;
+ }
+
+ public String listAssays() throws URISyntaxException, IOException, InterruptedException {
+ String endpoint = apiURL+"/assays/";
+ HttpRequest request = HttpRequest.newBuilder()
+ .uri(new URI(endpoint))
+ .headers("Content-Type", "application/json")
+ .headers("Accept", "application/json")
+ .headers("Authorization", "Basic " + new String(credentials))
+ .GET().build();
+ HttpResponse response = HttpClient.newBuilder().build()
+ .send(request, BodyHandlers.ofString());
+ if(response.statusCode() == 200) {
+ return response.body();
+ } else {
+ throw new RuntimeException("Failed : HTTP error code : " + response.statusCode());
+ }
+ }
+
+ public Map getSampleTypeNamesToIDs()
+ throws URISyntaxException, IOException, InterruptedException {
+ String endpoint = apiURL+"/sample_types/";
+ HttpRequest request = HttpRequest.newBuilder()
+ .uri(new URI(endpoint))
+ .headers("Content-Type", "application/json")
+ .headers("Accept", "application/json")
+ .headers("Authorization", "Basic " + new String(credentials))
+ .GET().build();
+ HttpResponse response = HttpClient.newBuilder().build()
+ .send(request, BodyHandlers.ofString());
+ if(response.statusCode() == 200) {
+ return parseSampleTypesJSON(response.body());
+ } else {
+ throw new RuntimeException("Failed : HTTP error code : " + response.statusCode());
+ }
+ }
+
+ private Map parseSampleTypesJSON(String json) throws JsonProcessingException {
+ Map typesToIDs = new HashMap<>();
+ JsonNode rootNode = new ObjectMapper().readTree(json);
+ JsonNode hits = rootNode.path("data");
+ for (Iterator it = hits.elements(); it.hasNext(); ) {
+ JsonNode hit = it.next();
+ String id = hit.get("id").asText();
+ String title = hit.get("attributes").get("title").asText();
+ typesToIDs.put(title, id);
+ }
+ return typesToIDs;
+ }
+
+ public boolean sampleTypeExists(String typeCode)
+ throws URISyntaxException, IOException, InterruptedException {
+ JsonNode result = genericSearch("sample_types", typeCode);
+ JsonNode hits = result.path("data");
+ for (Iterator it = hits.elements(); it.hasNext(); ) {
+ JsonNode hit = it.next();
+ if (hit.get("attributes").get("title").asText().equals(typeCode)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Performs a generic search and returns the response in JSON format
+ * @param nodeType the type of SEEK node to search for
+ * @param searchTerm the term to search for
+ * @return JsonNode of the server's response
+ */
+ private JsonNode genericSearch(String nodeType, String searchTerm)
+ throws URISyntaxException, IOException, InterruptedException {
+ String endpoint = apiURL+"/search";
+ URIBuilder builder = new URIBuilder(endpoint);
+ builder.setParameter("q", searchTerm).setParameter("search_type", nodeType);
+
+ HttpRequest request = HttpRequest.newBuilder()
+ .uri(builder.build())
+ .headers("Content-Type", "application/json")
+ .headers("Accept", "application/json")
+ .headers("Authorization", "Basic " + new String(credentials))
+ .GET().build();
+ HttpResponse response = HttpClient.newBuilder().build()
+ .send(request, BodyHandlers.ofString());
+ if(response.statusCode() == 200) {
+ return new ObjectMapper().readTree(response.body());
+ } else {
+ throw new RuntimeException("Failed : HTTP error code : " + response.statusCode());
+ }
+ }
+
+ private String searchNodeWithTitle(String nodeType, String title)
+ throws URISyntaxException, IOException, InterruptedException {
+ JsonNode result = genericSearch(nodeType, title);
+ JsonNode hits = result.path("data");
+ for (Iterator it = hits.elements(); it.hasNext(); ) {
+ JsonNode hit = it.next();
+ if (hit.get("attributes").get("title").asText().equals(title)) {
+ return hit.get("id").asText();
+ }
+ }
+ throw new RuntimeException("Matching " + nodeType + " title was not found : " + title);
+ }
+
+ /**
+ * Searches for assays containing a search term and returns a list of found assay ids
+ * @param searchTerm the search term that should be in the assay properties - e.g. an openBIS id
+ * @return
+ * @throws URISyntaxException
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ public List searchAssaysInStudyContainingKeyword(String searchTerm)
+ throws URISyntaxException, IOException, InterruptedException {
+
+ JsonNode result = genericSearch("assays", "*"+searchTerm+"*");
+
+ JsonNode hits = result.path("data");
+ List assayIDsInStudy = new ArrayList<>();
+ for (Iterator it = hits.elements(); it.hasNext(); ) {
+ JsonNode hit = it.next();
+ String assayID = hit.get("id").asText();
+ JsonNode assayData = fetchAssayData(assayID).get("data");
+ JsonNode relationships = assayData.get("relationships");
+ String studyID = relationships.get("study").get("data").get("id").asText();
+ if(studyID.equals(currentStudy)) {
+ assayIDsInStudy.add(assayID);
+ }
+ }
+ return assayIDsInStudy;
+ }
+
+ /**
+ * Searches for samples containing a search term and returns a list of found sample ids
+ * @param searchTerm the search term that should be in the assay properties - e.g. an openBIS id
+ * @return
+ * @throws URISyntaxException
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ public List searchSamplesContainingKeyword(String searchTerm)
+ throws URISyntaxException, IOException, InterruptedException {
+
+ JsonNode result = genericSearch("samples", "*"+searchTerm+"*");
+
+ JsonNode hits = result.path("data");
+ List assayIDs = new ArrayList<>();
+ for (Iterator it = hits.elements(); it.hasNext(); ) {
+ JsonNode hit = it.next();
+ assayIDs.add(hit.get("id").asText());
+ }
+ return assayIDs;
+ }
+
+
+ public List searchAssetsContainingKeyword(String searchTerm)
+ throws URISyntaxException, IOException, InterruptedException {
+ List assetIDs = new ArrayList<>();
+ for(String type : ASSET_TYPES) {
+ JsonNode result = genericSearch(type, "*"+searchTerm+"*");
+
+ JsonNode hits = result.path("data");
+ for (Iterator it = hits.elements(); it.hasNext(); ) {
+ JsonNode hit = it.next();
+ assetIDs.add(hit.get("id").asText());
+ }
+ }
+ return assetIDs;
+ }
+
+
+ /**
+ * Updates information of an existing assay, its samples and attached assets. Missing samples and
+ * assets are created, but nothing missing from the new structure is deleted from SEEK.
+ *
+ * @param nodeWithChildren the translated Seek structure as it should be once the update is done
+ * @param assayID the assay id of the existing assay, that should be compared to the new
+ * structure
+ * @return information necessary to make post registration updates in openBIS and upload missing
+ * data to newly created assets. In the case of the update use case, only newly created objects
+ * will be contained in the return object.
+ */
+ public SeekStructurePostRegistrationInformation updateAssayNode(SeekStructure nodeWithChildren,
+ String assayID) throws URISyntaxException, IOException, InterruptedException {
+ JsonNode assayData = fetchAssayData(assayID).get("data");
+ Map sampleInfos = collectSampleInformation(assayData);
+
+ // compare samples
+ Map newSamplesWithReferences = nodeWithChildren.getSamplesWithOpenBISReference();
+
+ List samplesToCreate = new ArrayList<>();
+ for (ISASample newSample : newSamplesWithReferences.keySet()) {
+ String openBisID = newSamplesWithReferences.get(newSample);
+ SampleInformation existingSample = sampleInfos.get(openBisID);
+ if (existingSample == null) {
+ samplesToCreate.add(newSample);
+ System.out.printf("%s not found in SEEK. It will be created.%n", openBisID);
+ } else {
+ Map newAttributes = newSample.fetchCopyOfAttributeMap();
+ for (String key : newAttributes.keySet()) {
+ Object newValue = newAttributes.get(key);
+ Object oldValue = existingSample.getAttributes().get(key);
+
+ boolean oldEmpty = oldValue == null || oldValue.toString().isEmpty();
+ boolean newEmpty = newValue == null || newValue.toString().isEmpty();
+ if ((!oldEmpty && !newEmpty) && !newValue.toString().equals(oldValue.toString())) {
+ System.out.printf("Mismatch found in %s attribute of %s. Sample will be updated.%n",
+ key, openBisID);
+ newSample.setAssayIDs(List.of(assayID));
+ updateSample(newSample, existingSample.getSeekID());
+ }
+ }
+ }
+ }
+
+ // compare assets
+ Map assetInfos = collectAssetInformation(assayData);
+ Map newAssetsToFiles = nodeWithChildren.getISAFileToDatasetFiles();
+
+ List assetsToCreate = new ArrayList<>();
+ for (GenericSeekAsset newAsset : newAssetsToFiles.keySet()) {
+ DataSetFile file = newAssetsToFiles.get(newAsset);
+ String newPermId = file.getDataSetPermId().getPermId();
+ if (!assetInfos.containsKey(newPermId)) {
+ assetsToCreate.add(newAsset);
+ System.out.printf("Assets with Dataset PermId %s not found in SEEK. File %s from this "
+ + "Dataset will be created.%n", newPermId, newAsset.getFileName());
+ }
+ }
+ Map sampleIDsWithEndpoints = new HashMap<>();
+ for (ISASample sample : samplesToCreate) {
+ sample.setAssayIDs(Collections.singletonList(assayID));
+ String sampleEndpoint = createSample(sample);
+ sampleIDsWithEndpoints.put(newSamplesWithReferences.get(sample), sampleEndpoint);
+ }
+ List assetsToUpload = new ArrayList<>();
+ for (GenericSeekAsset asset : assetsToCreate) {
+ asset.withAssays(Collections.singletonList(assayID));
+ assetsToUpload.add(createAsset(newAssetsToFiles.get(asset).getDataSetPermId().getPermId(),
+ asset));
+ }
+ Map> datasetIDsWithEndpoints = new HashMap<>();
+
+ for (AssetToUpload asset : assetsToUpload) {
+ String endpointWithoutBlob = blobEndpointToAssetURL(asset.getBlobEndpoint());
+ String dsCode = asset.getDataSetCode();
+ if (datasetIDsWithEndpoints.containsKey(dsCode)) {
+ datasetIDsWithEndpoints.get(dsCode).add(endpointWithoutBlob);
+ } else {
+ datasetIDsWithEndpoints.put(dsCode, new HashSet<>(
+ List.of(endpointWithoutBlob)));
+ }
+ }
+
+ String assayEndpoint = apiURL + "/assays/" + assayID;
+
+ String expID = nodeWithChildren.getAssayWithOpenBISReference().getRight();
+ Pair experimentIDWithEndpoint = new ImmutablePair<>(expID, assayEndpoint);
+
+ SeekStructurePostRegistrationInformation postRegInfo =
+ new SeekStructurePostRegistrationInformation(assetsToUpload, sampleIDsWithEndpoints,
+ datasetIDsWithEndpoints);
+ postRegInfo.setExperimentIDWithEndpoint(experimentIDWithEndpoint);
+ return postRegInfo;
+ }
+
+ private Map collectAssetInformation(JsonNode assayData)
+ throws URISyntaxException, IOException, InterruptedException {
+ Map assets = new HashMap<>();
+ JsonNode relationships = assayData.get("relationships");
+ for(String type : ASSET_TYPES) {
+ for (Iterator it = relationships.get(type).get("data").elements(); it.hasNext(); ) {
+ String assetID = it.next().get("id").asText();
+ AssetInformation assetInfo = fetchAssetInformation(assetID, type);
+ if(assetInfo.getOpenbisPermId()!=null) {
+ assets.put(assetInfo.getOpenbisPermId(), assetInfo);
+ } else {
+ System.out.printf("No Dataset permID found for existing %s %s (id: %s)%n"
+ + "This asset will be treated as if it would not exist in the update.%n",
+ type, assetInfo.getTitle(), assetID);
+ }
+ }
+ }
+ return assets;
+ }
+
+ private Map collectSampleInformation(JsonNode assayData)
+ throws URISyntaxException, IOException, InterruptedException {
+ Map samples = new HashMap<>();
+ JsonNode relationships = assayData.get("relationships");
+ for (Iterator it = relationships.get("samples").get("data").elements(); it.hasNext(); ) {
+ String sampleID = it.next().get("id").asText();
+ SampleInformation info = fetchSampleInformation(sampleID);
+ samples.put(info.getOpenBisIdentifier(), info);
+ }
+ return samples;
+ }
+
+ private AssetInformation fetchAssetInformation(String assetID, String assetType)
+ throws URISyntaxException, IOException, InterruptedException {
+ String endpoint = apiURL+"/"+assetType+"/"+assetID;
+ URIBuilder builder = new URIBuilder(endpoint);
+
+ HttpRequest request = HttpRequest.newBuilder()
+ .uri(builder.build())
+ .headers("Content-Type", "application/json")
+ .headers("Accept", "application/json")
+ .headers("Authorization", "Basic " + new String(credentials))
+ .GET().build();
+ HttpResponse response = HttpClient.newBuilder().build()
+ .send(request, BodyHandlers.ofString());
+ if(response.statusCode() == 200) {
+ JsonNode attributes = new ObjectMapper().readTree(response.body()).get("data").get("attributes");
+ String title = attributes.get("title").asText();
+ String description = attributes.get("description").asText();
+ AssetInformation result = new AssetInformation(assetID, assetType, title, description);
+ Optional permID = tryParseDatasetPermID(title);
+ if(permID.isPresent()) {
+ result.setOpenbisPermId(permID.get());
+ } else {
+ tryParseDatasetPermID(description).ifPresent(result::setOpenbisPermId);
+ }
+ return result;
+ } else {
+ throw new RuntimeException("Failed : HTTP error code : " + response.statusCode());
+ }
+ }
+
+ private Optional tryParseDatasetPermID(String input) {
+ Matcher titleMatcher = OpenbisConnector.datasetCodePattern.matcher(input);
+ if(titleMatcher.find()) {
+ return Optional.of(titleMatcher.group());
+ }
+ return Optional.empty();
+ }
+
+ public SeekStructurePostRegistrationInformation updateSampleNode(SeekStructure nodeWithChildren,
+ String sampleID) throws URISyntaxException, IOException, InterruptedException {
+ SampleInformation existingSampleInfo = fetchSampleInformation(sampleID);
+ //TODO to be able to connect samples with assets, we need to create a new assay, here
+
+ // compare samples
+ Map newSamplesWithReferences = nodeWithChildren.getSamplesWithOpenBISReference();
+
+ List samplesToCreate = new ArrayList<>();
+ for (ISASample newSample : newSamplesWithReferences.keySet()) {
+ String openBisID = newSamplesWithReferences.get(newSample);
+ if (!existingSampleInfo.getOpenBisIdentifier().equals(openBisID)) {
+ samplesToCreate.add(newSample);
+ System.out.printf("%s not found in SEEK. It will be created.%n", openBisID);
+ } else {
+ Map newAttributes = newSample.fetchCopyOfAttributeMap();
+ for (String key : newAttributes.keySet()) {
+ Object newValue = newAttributes.get(key);
+ Object oldValue = existingSampleInfo.getAttributes().get(key);
+
+ boolean oldEmpty = oldValue == null || oldValue.toString().isEmpty();
+ boolean newEmpty = newValue == null || newValue.toString().isEmpty();
+ if ((!oldEmpty && !newEmpty) && !newValue.equals(oldValue)) {
+ System.out.printf("Mismatch found in attributes of %s. Sample will be updated.%n",
+ openBisID);
+ updateSample(newSample, sampleID);
+ }
+ }
+ }
+ }
+
+ // compare assets
+ Map newAssetsToFiles = nodeWithChildren.getISAFileToDatasetFiles();
+
+ //TODO follow creation of assets for assay, no way to be sure these are attached to similar samples
+ List assetsToCreate = new ArrayList<>();
+
+ Map sampleIDsWithEndpoints = new HashMap<>();
+ for (ISASample sample : samplesToCreate) {
+ String sampleEndpoint = createSample(sample);
+ sampleIDsWithEndpoints.put(newSamplesWithReferences.get(sample), sampleEndpoint);
+ }
+ List assetsToUpload = new ArrayList<>();
+
+ for (GenericSeekAsset asset : assetsToCreate) {
+ assetsToUpload.add(createAsset(newAssetsToFiles.get(asset).getDataSetPermId().getPermId(),
+ asset));
+ }
+ Map> datasetIDsWithEndpoints = new HashMap<>();
+
+ for (AssetToUpload asset : assetsToUpload) {
+ String endpointWithoutBlob = blobEndpointToAssetURL(asset.getBlobEndpoint());
+ String dsCode = asset.getDataSetCode();
+ if (datasetIDsWithEndpoints.containsKey(dsCode)) {
+ datasetIDsWithEndpoints.get(dsCode).add(endpointWithoutBlob);
+ } else {
+ datasetIDsWithEndpoints.put(dsCode, new HashSet<>(
+ List.of(endpointWithoutBlob)));
+ }
+ }
+
+ return new SeekStructurePostRegistrationInformation(assetsToUpload, sampleIDsWithEndpoints,
+ datasetIDsWithEndpoints);
+ }
+
+ private SampleInformation fetchSampleInformation(String sampleID) throws URISyntaxException,
+ IOException, InterruptedException {
+ String endpoint = apiURL+"/samples/"+sampleID;
+ URIBuilder builder = new URIBuilder(endpoint);
+
+ HttpRequest request = HttpRequest.newBuilder()
+ .uri(builder.build())
+ .headers("Content-Type", "application/json")
+ .headers("Accept", "application/json")
+ .headers("Authorization", "Basic " + new String(credentials))
+ .GET().build();
+ HttpResponse response = HttpClient.newBuilder().build()
+ .send(request, BodyHandlers.ofString());
+ if(response.statusCode() == 200) {
+ JsonNode attributeNode = new ObjectMapper().readTree(response.body()).get("data").get("attributes");
+ //title is openbis identifier - this is also added to attribute_map under the name:
+ //App.configProperties.get("seek_openbis_sample_title");
+ String openBisId = attributeNode.get("title").asText();
+ Map attributesMap = new ObjectMapper()
+ .convertValue(attributeNode.get("attribute_map"), Map.class);
+ return new SampleInformation(sampleID, openBisId, attributesMap);
+ } else {
+ throw new RuntimeException("Failed : HTTP error code : " + response.statusCode());
+ }
+ }
+
+ private JsonNode fetchAssayData(String assayID)
+ throws URISyntaxException, IOException, InterruptedException {
+ String endpoint = apiURL+"/assays/"+assayID;
+ URIBuilder builder = new URIBuilder(endpoint);
+
+ HttpRequest request = HttpRequest.newBuilder()
+ .uri(builder.build())
+ .headers("Content-Type", "application/json")
+ .headers("Accept", "application/json")
+ .headers("Authorization", "Basic " + new String(credentials))
+ .GET().build();
+ HttpResponse response = HttpClient.newBuilder().build()
+ .send(request, BodyHandlers.ofString());
+ if(response.statusCode() == 200) {
+ return new ObjectMapper().readTree(response.body());
+ } else {
+ throw new RuntimeException("Failed : HTTP error code : " + response.statusCode());
+ }
+ }
+
+ public SeekStructurePostRegistrationInformation createNode(SeekStructure nodeWithChildren)
+ throws URISyntaxException, IOException, InterruptedException {
+ Pair assayIDPair = nodeWithChildren.getAssayWithOpenBISReference();
+
+ System.out.println("Creating assay...");
+ String assayID = addAssay(assayIDPair.getKey());
+ String assayEndpoint = apiURL+"/assays/"+assayID;
+ Pair experimentIDWithEndpoint =
+ new ImmutablePair<>(assayIDPair.getValue(), assayEndpoint);
+
+ //wait for a bit, so we can be sure the assay that will be referenced by the samples has been created
+ Thread.sleep(3000);
+
+ Map sampleIDsWithEndpoints = new HashMap<>();
+ Map samplesWithReferences = nodeWithChildren.getSamplesWithOpenBISReference();
+ if(!samplesWithReferences.isEmpty()) {
+ System.out.println("Creating samples...");
+ }
+ for(ISASample sample : samplesWithReferences.keySet()) {
+ sample.setAssayIDs(Collections.singletonList(assayID));
+ String sampleEndpoint = createSample(sample);
+ sampleIDsWithEndpoints.put(samplesWithReferences.get(sample), sampleEndpoint);
+ }
+
+ Map isaToFileMap = nodeWithChildren.getISAFileToDatasetFiles();
+
+ if(!isaToFileMap.isEmpty()) {
+ System.out.println("Creating assets...");
+ }
+
+ List assetsToUpload = createAssetsForAssays(isaToFileMap,
+ Collections.singletonList(assayID));
+
+ Map> datasetIDsWithEndpoints = new HashMap<>();
+
+ for(AssetToUpload asset : assetsToUpload) {
+ String endpointWithoutBlob = blobEndpointToAssetURL(asset.getBlobEndpoint());
+ String dsCode = asset.getDataSetCode();
+ if(datasetIDsWithEndpoints.containsKey(dsCode)) {
+ datasetIDsWithEndpoints.get(dsCode).add(endpointWithoutBlob);
+ } else {
+ datasetIDsWithEndpoints.put(dsCode, new HashSet<>(
+ List.of(endpointWithoutBlob)));
+ }
+ }
+ SeekStructurePostRegistrationInformation postRegInfo =
+ new SeekStructurePostRegistrationInformation(assetsToUpload, sampleIDsWithEndpoints,
+ datasetIDsWithEndpoints);
+ postRegInfo.setExperimentIDWithEndpoint(experimentIDWithEndpoint);
+ return postRegInfo;
+ }
+
+ /*
+ public SeekStructurePostRegistrationInformation createSampleWithAssets(SeekStructure nodeWithChildren)
+ throws URISyntaxException, IOException, InterruptedException {
+ Map sampleIDsWithEndpoints = new HashMap<>();
+ Map samplesWithReferences = nodeWithChildren.getSamplesWithOpenBISReference();
+ for(ISASample sample : samplesWithReferences.keySet()) {
+ String sampleEndpoint = createSample(sample);
+ sampleIDsWithEndpoints.put(samplesWithReferences.get(sample), sampleEndpoint);
+ }
+
+ Map isaToFileMap = nodeWithChildren.getISAFileToDatasetFiles();
+
+ List assetsToUpload = createAssetsForAssays(isaToFileMap, new ArrayList<>());
+
+ Map> datasetIDsWithEndpoints = new HashMap<>();
+
+ for(AssetToUpload asset : assetsToUpload) {
+ String endpointWithoutBlob = blobEndpointToAssetURL(asset.getBlobEndpoint());
+ String dsCode = asset.getDataSetCode();
+ if(datasetIDsWithEndpoints.containsKey(dsCode)) {
+ datasetIDsWithEndpoints.get(dsCode).add(endpointWithoutBlob);
+ } else {
+ datasetIDsWithEndpoints.put(dsCode, new HashSet<>(
+ List.of(endpointWithoutBlob)));
+ }
+ }
+ return new SeekStructurePostRegistrationInformation(assetsToUpload, sampleIDsWithEndpoints,
+ datasetIDsWithEndpoints);
+ }
+
+ public SeekStructurePostRegistrationInformation createStandaloneAssets(
+ Map