diff --git a/.github/workflows/master-build.yml b/.github/workflows/master-build.yml
index f737eca922e..70e69121d2a 100644
--- a/.github/workflows/master-build.yml
+++ b/.github/workflows/master-build.yml
@@ -52,6 +52,13 @@ jobs:
- 9300:9300
options: -e="discovery.type=single-node" -e="xpack.security.enabled=false" --health-cmd="curl http://localhost:9200/_cluster/health" --health-interval=10s --health-timeout=5s --health-retries=10
+ minio:
+ image: docker.io/bitnami/minio:2022
+ ports:
+ - 9000:9000
+ - 9001:9001
+ options: -e="MINIO_ROOT_USER=root" -e="MINIO_ROOT_PASSWORD=password" -e="MINIO_DEFAULT_BUCKETS=default"
+
steps:
- name: Test Database
env:
diff --git a/.github/workflows/pr-build.yml b/.github/workflows/pr-build.yml
index 13a23c48952..153708dea80 100644
--- a/.github/workflows/pr-build.yml
+++ b/.github/workflows/pr-build.yml
@@ -51,6 +51,13 @@ jobs:
- 9300:9300
options: -e="discovery.type=single-node" -e="xpack.security.enabled=false" --health-cmd="curl http://localhost:9200/_cluster/health" --health-interval=10s --health-timeout=5s --health-retries=10
+ minio:
+ image: docker.io/bitnami/minio:2022
+ ports:
+ - 9000:9000
+ - 9001:9001
+ options: -e="MINIO_ROOT_USER=root" -e="MINIO_ROOT_PASSWORD=password" -e="MINIO_DEFAULT_BUCKETS=default"
+
steps:
- name: Test Database
env:
diff --git a/.github/workflows/release-build.yml b/.github/workflows/release-build.yml
index df166eb38eb..b9fb8ff7502 100644
--- a/.github/workflows/release-build.yml
+++ b/.github/workflows/release-build.yml
@@ -63,6 +63,13 @@ jobs:
- 9300:9300
options: -e="discovery.type=single-node" -e="xpack.security.enabled=false" --health-cmd="curl http://localhost:9200/_cluster/health" --health-interval=10s --health-timeout=5s --health-retries=10
+ minio:
+ image: docker.io/bitnami/minio:2022
+ ports:
+ - 9000:9000
+ - 9001:9001
+ options: -e="MINIO_ROOT_USER=root" -e="MINIO_ROOT_PASSWORD=password" -e="MINIO_DEFAULT_BUCKETS=default"
+
steps:
- name: Test Database
env:
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 453d1653a2e..d9394acd03e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,7 +7,7 @@ to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
Full Changelog: [https://github.com/netgrif/application-engine/commits/v6.3.2](https://github.com/netgrif/application-engine/commits/v6.3.2)
-## [6.4.0](https://github.com/netgrif/application-engine/releases/tag/v6.4.0) (2024-04-19)
+## [6.4.0](https://github.com/netgrif/application-engine/releases/tag/v6.4.0) (2024-09-26)
### Fixed
- [NAE-1908] NAE-1906 Improvements
@@ -17,6 +17,10 @@ Full Changelog: [https://github.com/netgrif/application-engine/commits/v6.3.2](h
- [NAE-1959] Indexing enumerationMap field fails when no options exist
- [NAE-1960] Enumeration Map does not propagate changes when selecting
- [NAE-1967] Elasticsearch disable dynamic field mapping
+- [NAE-2006] WorkflowService.deleteInstancesOfPetriNet does not remove all cases
+- [NAE-1983] Public view file handling
+- [NAE-2007] Vulnerabilities fix
+- [NAE-1952] Fix Loading Issue for Duplicate TaskRef Entries
### Added
- [NAE-1901] Taskref list rendering update
@@ -35,9 +39,13 @@ Full Changelog: [https://github.com/netgrif/application-engine/commits/v6.3.2](h
- [NAE-1955] Update setData to handle options / choices
- [NAE-1958] Make component properties changeable
- [NAE-1962] Event properties
+- [NAE-1946] Remote file connector to S3
+- [NAE-1927] Shared Roles
+- [NAE-1945] External resource loader
### Changed
- [NAE-1947] HistoryService findAllSetDataEventLogs is not working
+- [NAE-1979] Optimize Maven Resources Plugin Configuration for Correct File Filtering and Copying
## [6.3.3](https://github.com/netgrif/application-engine/releases/tag/v6.3.3) (2024-01-19)
diff --git a/docker-compose.yml b/docker-compose.yml
index fd4b0a5eb29..3e275c95c1a 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -2,7 +2,7 @@ version: "3.3"
services:
docker-mongo:
- image: mongo:7.0.9
+ image: mongo:8.0.3
ports:
- "27017:27017"
deploy:
@@ -15,7 +15,7 @@ services:
memory: "512M"
docker-elastic:
- image: elasticsearch:8.10.4
+ image: elasticsearch:8.15.3
environment:
- cluster.name=elasticsearch
- discovery.type=single-node
@@ -35,9 +35,29 @@ services:
memory: "512M"
docker-redis:
- image: redis:7.2.5
+ image: redis:7.4.1
ports:
- "6379:6379"
+ minio:
+ image: docker.io/bitnami/minio:2022
+ ports:
+ - '9000:9000'
+ - '9001:9001'
+ networks:
+ - minionetwork
+ volumes:
+ - 'minio_data:/data'
+ environment:
+ - MINIO_ROOT_USER=root
+ - MINIO_ROOT_PASSWORD=password
+ - MINIO_DEFAULT_BUCKETS=default
+networks:
+ minionetwork:
+ driver: bridge
+
+volumes:
+ minio_data:
+ driver: local
# kibana:
# image: docker.elastic.co/kibana/kibana:8.10.4
diff --git a/docs/resources/resources_loading.md b/docs/resources/resources_loading.md
new file mode 100644
index 00000000000..963776b5293
--- /dev/null
+++ b/docs/resources/resources_loading.md
@@ -0,0 +1,35 @@
+# Resource Loading
+
+If you want to load resources, which are not included in JAR (for example large files), you can use the resource loader.
+ResourceLoader returns an InputStreamResource. You can turn it into an InputStream and load resources from the directory **resource/** in the working directory of the app.
+The prefix for ExternalResourceLoader is
+
+```
+resource:
+```
+
+For use you can use code like this in your runner:
+```java
+ @Autowired
+ private ResourceLoader resourceLoader;
+
+ @Value("resource:nameOfFile.txt")
+ private Resource customResource;
+
+ @Override
+ void run(String... strings) throws Exception {
+ loadResources("resource:nameOfFile.txt");
+ }
+
+ void loadResources(String resourceUrl) {
+ var resource = resourceLoader.getResource(resourceUrl);
+ var txt = new String(resource.getInputStream().readAllBytes());
+ System.out.println("File content: " + txt);
+ }
+
+ void getCustomResource() throws IOException {
+ var txt = new String(customResource.getInputStream().readAllBytes());
+ System.out.println("Resource from property: " + txt);
+ }
+```
+
diff --git a/docs/roles/shared_roles.md b/docs/roles/shared_roles.md
new file mode 100644
index 00000000000..3ef64b026a0
--- /dev/null
+++ b/docs/roles/shared_roles.md
@@ -0,0 +1,36 @@
+# Shared roles
+Shared roles or global roles are roles that are only created once and can be used and referenced across Petri nets.
+To use a shared role in Petri nets first we must declare it. We can declare it as any other role with addition of ``global``
+attribute set to ``true``:
+```xml
+
+ nae_1927
+ ...
+
+ admin_global
+ Global Administrator
+
+ ...
+
+```
+Then we can reference it as usual:
+```xml
+...
+
+ t1
+ 460
+ 180
+
+
+ admin_global
+
+ true
+ true
+
+
+
+...
+```
+When importing a Petri net, the importer checks, whether the global role has already existed.
+If not, the importer creates one. If there has been already one, the importer passes it to a the newly created net.
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index 7cdc21b159f..28b238f6c07 100644
--- a/pom.xml
+++ b/pom.xml
@@ -106,7 +106,7 @@
org.apache.commons
commons-compress
- 1.21
+ 1.26.0
com.beust
@@ -194,14 +194,14 @@
org.bouncycastle
- bcmail-jdk15on
- 1.70
+ bcmail-jdk18on
+ 1.78.1
org.bouncycastle
- bcprov-jdk15on
- 1.70
+ bcprov-jdk18on
+ 1.78.1
io.jsonwebtoken
@@ -320,8 +320,7 @@
org.apache.xmlgraphics
batik-all
- 1.14
- pom
+ 1.17
commons-io
@@ -349,7 +348,7 @@
org.jsoup
jsoup
- 1.14.3
+ 1.15.4
@@ -444,7 +443,7 @@
org.apache.commons
commons-lang3
- 3.11
+ 3.17.0
org.springframework.boot
@@ -518,7 +517,7 @@
com.google.guava
guava
- 31.1-jre
+ 32.0.0-jre
@@ -564,12 +563,27 @@
jackson-module-jsonSchema
${jackson.version}
+
+ io.minio
+ minio
+ 8.5.12
+
src/main/resources
+
+ **/*.*
+
+ false
+
+
+ src/main/resources
+
+ **/*.properties
+
true
@@ -612,6 +626,9 @@
repackage
repackage
+
+ exec
+
@@ -793,7 +810,7 @@
org.jacoco
jacoco-maven-plugin
- 0.8.5
+ 0.8.10
default-prepare-agent
@@ -819,7 +836,7 @@
org.apache.maven.plugins
maven-resources-plugin
- 3.1.0
+ 3.2.0
@
diff --git a/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/FileField.groovy b/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/FileField.groovy
index 30b6ae8d85a..3fa0eaed1c5 100644
--- a/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/FileField.groovy
+++ b/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/FileField.groovy
@@ -4,9 +4,7 @@ package com.netgrif.application.engine.petrinet.domain.dataset
import org.springframework.data.mongodb.core.mapping.Document
@Document
-class FileField extends Field {
-
- private Boolean remote
+class FileField extends StorageField {
FileField() {
super()
@@ -42,39 +40,11 @@ class FileField extends Field {
this.setDefaultValue(FileFieldValue.fromString(defaultValue))
}
- /**
- * Get complete file path to the file
- * Path is generated as follow:
- * - if file is remote, path is field value / remote URI
- * - if file is local
- * - saved file name consists of Case id, field import id and original file name separated by dash
- * @param caseId
- * @return path to the saved file
- */
- String getFilePath(String caseId) {
- if (this.remote)
- return this.getValue().getPath()
- return this.getValue().getPath(caseId, getStringId())
- }
-
- String getFilePreviewPath(String caseId) {
- return this.getValue().getPreviewPath(caseId, getStringId())
- }
-
- boolean isRemote() {
- return this.remote
- }
-
- void setRemote(boolean remote) {
- this.remote = remote
- }
-
@Override
Field clone() {
FileField clone = new FileField()
super.clone(clone)
- clone.remote = this.remote
-
+ clone.storage = this.storage
return clone
}
}
\ No newline at end of file
diff --git a/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/FileFieldDataType.groovy b/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/FileFieldDataType.groovy
index df0cf5bc7b7..20c9460c147 100644
--- a/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/FileFieldDataType.groovy
+++ b/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/FileFieldDataType.groovy
@@ -18,5 +18,11 @@ enum FileFieldDataType {
return item
}
}
+ return null
+ }
+
+ static FileFieldDataType resolveTypeFromName(String name) {
+ int dot = name.lastIndexOf(".")
+ return resolveType((dot == -1) ? "" : name.substring(dot + 1))
}
}
diff --git a/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/FileFieldValue.groovy b/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/FileFieldValue.groovy
index d8df88e0320..4773e920c7e 100644
--- a/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/FileFieldValue.groovy
+++ b/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/FileFieldValue.groovy
@@ -1,16 +1,15 @@
package com.netgrif.application.engine.petrinet.domain.dataset
-import com.netgrif.application.engine.configuration.ApplicationContextProvider
-import com.netgrif.application.engine.workflow.domain.FileStorageConfiguration
-
class FileFieldValue implements Serializable {
- private static final long serialVersionUID = 1299918326436821185L;
+ private static final long serialVersionUID = 1299918326436821185L
private String name
private String path
+ private String previewPath
+
FileFieldValue() {
}
@@ -19,6 +18,12 @@ class FileFieldValue implements Serializable {
this.path = path
}
+ FileFieldValue(String name, String path, String previewPath) {
+ this.name = name
+ this.path = path
+ this.previewPath = previewPath
+ }
+
static FileFieldValue fromString(String value) {
if (!value.contains(":"))
return new FileFieldValue(value, null)
@@ -39,21 +44,18 @@ class FileFieldValue implements Serializable {
return path
}
- String getPath(String caseId, String fieldId) {
- FileStorageConfiguration fileStorageConfiguration = ApplicationContextProvider.getBean("fileStorageConfiguration") as FileStorageConfiguration
- return "${fileStorageConfiguration.getStoragePath()}/${caseId}-${fieldId}-${name}"
+ void setPath(String path) {
+ this.path = path
}
- String getPreviewPath(String caseId, String fieldId) {
- FileStorageConfiguration fileStorageConfiguration = ApplicationContextProvider.getBean("fileStorageConfiguration") as FileStorageConfiguration
- return "${fileStorageConfiguration.getStoragePath()}/file_preview/${caseId}-${fieldId}-${name}"
+ String getPreviewPath() {
+ return previewPath
}
- void setPath(String path) {
- this.path = path
+ void setPreviewPath(String previewPath) {
+ this.previewPath = previewPath
}
-
@Override
String toString() {
return path
diff --git a/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/FileListField.groovy b/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/FileListField.groovy
index 4e4c13b4145..0a8a432a382 100644
--- a/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/FileListField.groovy
+++ b/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/FileListField.groovy
@@ -1,7 +1,6 @@
package com.netgrif.application.engine.petrinet.domain.dataset
-class FileListField extends Field {
- private Boolean remote
+class FileListField extends StorageField {
FileListField() {
super()
@@ -48,38 +47,11 @@ class FileListField extends Field {
this.getValue().getNamesPaths().add(new FileFieldValue(fileName, path))
}
- /**
- * Get complete file path to the file
- * Path is generated as follow:
- * - if file is remote, path is field value / remote URI
- * - if file is local
- * - saved file path consists of Case id, slash field import id, slash original file name
- * @param caseId
- * @param name
- * @return path to the saved file
- */
- String getFilePath(String caseId, String name) {
- if (this.remote) {
- FileFieldValue first = this.getValue().getNamesPaths().find({ namePath -> namePath.name == name })
- return first != null ? first.path : null
- }
- return FileListFieldValue.getPath(caseId, getStringId(), name)
- }
-
- boolean isRemote() {
- return this.remote
- }
-
- void setRemote(boolean remote) {
- this.remote = remote
- }
-
@Override
Field clone() {
FileListField clone = new FileListField()
super.clone(clone)
- clone.remote = this.remote
-
+ clone.storage = this.storage
return clone
}
}
diff --git a/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/FileListFieldValue.groovy b/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/FileListFieldValue.groovy
index a69f2b33afc..7775e6f4403 100644
--- a/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/FileListFieldValue.groovy
+++ b/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/FileListFieldValue.groovy
@@ -1,8 +1,5 @@
package com.netgrif.application.engine.petrinet.domain.dataset
-import com.netgrif.application.engine.configuration.ApplicationContextProvider
-import com.netgrif.application.engine.workflow.domain.FileStorageConfiguration
-
class FileListFieldValue implements Serializable {
private static final long serialVersionUID = 5299918326436821185L;
@@ -39,11 +36,6 @@ class FileListFieldValue implements Serializable {
return newVal
}
- static String getPath(String caseId, String fieldId, String name) {
- FileStorageConfiguration fileStorageConfiguration = ApplicationContextProvider.getBean("fileStorageConfiguration")
- return "${fileStorageConfiguration.getStoragePath()}/${caseId}/${fieldId}/${name}"
- }
-
@Override
String toString() {
return namesPaths.toString()
diff --git a/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/MinIoStorage.groovy b/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/MinIoStorage.groovy
new file mode 100644
index 00000000000..3d252fea907
--- /dev/null
+++ b/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/MinIoStorage.groovy
@@ -0,0 +1,17 @@
+package com.netgrif.application.engine.petrinet.domain.dataset;
+
+class MinIoStorage extends Storage {
+ private String bucket
+
+ MinIoStorage() {
+ super(StorageType.MINIO)
+ }
+
+ String getBucket() {
+ return bucket
+ }
+
+ void setBucket(String bucket) {
+ this.bucket = bucket
+ }
+}
diff --git a/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/Storage.groovy b/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/Storage.groovy
new file mode 100644
index 00000000000..e846623e454
--- /dev/null
+++ b/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/Storage.groovy
@@ -0,0 +1,45 @@
+package com.netgrif.application.engine.petrinet.domain.dataset
+
+import com.querydsl.core.annotations.PropertyType
+import com.querydsl.core.annotations.QueryType
+
+class Storage {
+ private StorageType type
+ private String host
+
+ Storage() {
+ this.type = StorageType.LOCAL
+ }
+
+ Storage(StorageType type) {
+ this()
+ this.type = type
+ }
+
+ Storage(StorageType type, String host) {
+ this(type)
+ this.host = host
+ }
+
+ StorageType getType() {
+ return type
+ }
+
+ void setType(StorageType type) {
+ this.type = type
+ }
+
+ String getHost() {
+ return host
+ }
+
+ void setHost(String host) {
+ this.host = host
+ }
+
+ @Override
+ @QueryType(PropertyType.NONE)
+ MetaClass getMetaClass() {
+ return this.metaClass
+ }
+}
diff --git a/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/StorageField.groovy b/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/StorageField.groovy
new file mode 100644
index 00000000000..2eeebb95a79
--- /dev/null
+++ b/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/StorageField.groovy
@@ -0,0 +1,29 @@
+package com.netgrif.application.engine.petrinet.domain.dataset
+
+abstract class StorageField extends Field {
+ static final long serialVersionUID = -9172755427378929924L
+ private Storage storage
+
+ StorageField() {
+ super()
+ }
+
+ StorageType getStorageType() {
+ if (storage == null) {
+ return StorageType.LOCAL
+ }
+ return storage.getType()
+ }
+
+ void setStorageType(StorageType storageType) {
+ this.storage.setType(storageType)
+ }
+
+ Storage getStorage() {
+ return storage
+ }
+
+ void setStorage(Storage storage) {
+ this.storage = storage
+ }
+}
diff --git a/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/StorageType.groovy b/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/StorageType.groovy
new file mode 100644
index 00000000000..9068fbfa198
--- /dev/null
+++ b/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/StorageType.groovy
@@ -0,0 +1,6 @@
+package com.netgrif.application.engine.petrinet.domain.dataset
+
+enum StorageType {
+ LOCAL,
+ MINIO;
+}
diff --git a/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/logic/action/ActionDelegate.groovy b/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/logic/action/ActionDelegate.groovy
index 7138f71aeed..acee051de68 100644
--- a/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/logic/action/ActionDelegate.groovy
+++ b/src/main/groovy/com/netgrif/application/engine/petrinet/domain/dataset/logic/action/ActionDelegate.groovy
@@ -756,6 +756,9 @@ class ActionDelegate {
value.each { id -> users.add(new UserFieldValue(userService.findById(id as String, false))) }
value = new UserListFieldValue(users)
}
+ if (field instanceof TaskField && targetTask.isPresent()) {
+ dataService.validateTaskRefValue(value, targetTask.get().getStringId());
+ }
field.value = value
saveChangedValue(field, targetCase)
}
diff --git a/src/main/java/com/netgrif/application/engine/auth/web/responsebodies/ProcessRoleFactory.java b/src/main/java/com/netgrif/application/engine/auth/web/responsebodies/ProcessRoleFactory.java
index 95f6b1ad19d..e0a46844b8f 100644
--- a/src/main/java/com/netgrif/application/engine/auth/web/responsebodies/ProcessRoleFactory.java
+++ b/src/main/java/com/netgrif/application/engine/auth/web/responsebodies/ProcessRoleFactory.java
@@ -19,12 +19,15 @@ public ProcessRole getProcessRole(com.netgrif.application.engine.petrinet.domain
/*if (!role.getStringId().equals(userProcessRole.getRoleId())) {
throw new IllegalArgumentException(String.format("ProcessRole StringId (%s) and UserProcessRole roleId (%s) must match!", role.getStringId(), userProcessRole.getRoleId()));
}*/
- ProcessRole result = new ProcessRole(role, locale);
- PetriNet net = petriNetService.get(new ObjectId(role.getNetId()));
- result.setNetStringId(net.getStringId());
- result.setNetImportId(net.getImportId());
- result.setNetVersion(net.getVersion().toString());
- return result;
+ if (!role.isGlobal()) {
+ ProcessRole result = new ProcessRole(role, locale);
+ PetriNet net = petriNetService.get(new ObjectId(role.getNetId()));
+ result.setNetStringId(net.getStringId());
+ result.setNetImportId(net.getImportId());
+ result.setNetVersion(net.getVersion().toString());
+ return result;
+ }
+ return new ProcessRole(role, locale);
}
}
diff --git a/src/main/java/com/netgrif/application/engine/files/StorageResolverService.java b/src/main/java/com/netgrif/application/engine/files/StorageResolverService.java
new file mode 100644
index 00000000000..89fd7f774cf
--- /dev/null
+++ b/src/main/java/com/netgrif/application/engine/files/StorageResolverService.java
@@ -0,0 +1,37 @@
+package com.netgrif.application.engine.files;
+
+import com.netgrif.application.engine.files.interfaces.IStorageService;
+import com.netgrif.application.engine.files.throwable.StorageNotFoundException;
+import com.netgrif.application.engine.petrinet.domain.dataset.StorageType;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+import java.util.List;
+import java.util.Map;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+@Slf4j
+@Service
+public class StorageResolverService {
+
+ private Map storageServices;
+
+ @Autowired
+ private void setStorageServices(List storageServices) {
+ this.storageServices = storageServices.stream().collect(Collectors.toMap(IStorageService::getType, Function.identity()));
+ }
+
+ public IStorageService resolve(StorageType type) {
+ if (storageServices == null) {
+ log.error("Storage services with interface IStorageService not found.");
+ throw new StorageNotFoundException("Remote Storage not available.");
+ }
+ if (storageServices.containsKey(type)) {
+ return storageServices.get(type);
+ }
+ throw new StorageNotFoundException("Storage Service with type: " + type + " not available.");
+
+ }
+}
diff --git a/src/main/java/com/netgrif/application/engine/files/interfaces/IStorageService.java b/src/main/java/com/netgrif/application/engine/files/interfaces/IStorageService.java
new file mode 100644
index 00000000000..f7554a0e6db
--- /dev/null
+++ b/src/main/java/com/netgrif/application/engine/files/interfaces/IStorageService.java
@@ -0,0 +1,27 @@
+package com.netgrif.application.engine.files.interfaces;
+
+import com.netgrif.application.engine.files.throwable.BadRequestException;
+import com.netgrif.application.engine.files.throwable.ServiceErrorException;
+import com.netgrif.application.engine.files.throwable.StorageException;
+import com.netgrif.application.engine.petrinet.domain.dataset.StorageField;
+import com.netgrif.application.engine.petrinet.domain.dataset.StorageType;
+import org.springframework.web.multipart.MultipartFile;
+
+import java.io.FileNotFoundException;
+import java.io.InputStream;
+
+public interface IStorageService {
+ StorageType getType();
+
+ InputStream get(StorageField> field, String path) throws BadRequestException, ServiceErrorException, FileNotFoundException;
+
+ boolean save(StorageField> field, String path, MultipartFile file) throws StorageException;
+
+ boolean save(StorageField> field, String path, InputStream stream) throws StorageException;
+
+ void delete(StorageField> field, String path) throws StorageException;
+
+ String getPreviewPath(String caseId, String fieldId, String name);
+
+ String getPath(String caseId, String fieldId, String name);
+}
diff --git a/src/main/java/com/netgrif/application/engine/files/local/LocalStorageService.java b/src/main/java/com/netgrif/application/engine/files/local/LocalStorageService.java
new file mode 100644
index 00000000000..b7f43aa0d58
--- /dev/null
+++ b/src/main/java/com/netgrif/application/engine/files/local/LocalStorageService.java
@@ -0,0 +1,87 @@
+package com.netgrif.application.engine.files.local;
+
+import com.netgrif.application.engine.files.interfaces.IStorageService;
+import com.netgrif.application.engine.files.throwable.BadRequestException;
+import com.netgrif.application.engine.files.throwable.ServiceErrorException;
+import com.netgrif.application.engine.files.throwable.StorageException;
+import com.netgrif.application.engine.petrinet.domain.dataset.StorageField;
+import com.netgrif.application.engine.petrinet.domain.dataset.StorageType;
+import com.netgrif.application.engine.workflow.domain.FileStorageConfiguration;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+import org.springframework.web.multipart.MultipartFile;
+
+import java.io.*;
+
+@Slf4j
+@Service
+public class LocalStorageService implements IStorageService {
+ FileStorageConfiguration fileStorageConfiguration;
+
+ @Autowired
+ public void setFileStorageConfiguration(FileStorageConfiguration fileStorageConfiguration) {
+ this.fileStorageConfiguration = fileStorageConfiguration;
+ }
+
+ @Override
+ public StorageType getType() {
+ return StorageType.LOCAL;
+ }
+
+ @Override
+ public InputStream get(StorageField> field, String path) throws BadRequestException, ServiceErrorException, FileNotFoundException {
+ return new FileInputStream(path);
+ }
+
+ @Override
+ public boolean save(StorageField> field, String path, MultipartFile file) throws StorageException {
+ try (InputStream stream = file.getInputStream()) {
+ return this.save(field, path, stream);
+ } catch (StorageException | IOException e) {
+ throw new StorageException("File cannot be saved", e);
+ }
+ }
+
+ @Override
+ public boolean save(StorageField> field, String path, InputStream stream) throws StorageException {
+ File savedFile = createNewFile(path);
+ try (FileOutputStream fout = new FileOutputStream(savedFile)) {
+ stream.transferTo(fout);
+ } catch (IOException e) {
+ log.error(e.getMessage());
+ throw new StorageException("File " + path + " could not be saved", e);
+ }
+ return true;
+ }
+
+ private File createNewFile(String path) throws StorageException {
+ File savedFile = new File(path);
+ savedFile.getParentFile().mkdirs();
+ try {
+ if (!savedFile.createNewFile()) {
+ savedFile.delete();
+ savedFile.createNewFile();
+ }
+ } catch (IOException e) {
+ throw new StorageException("Empty file " + path + " could not be created", e);
+ }
+ return savedFile;
+ }
+
+ @Override
+ public void delete(StorageField> field, String path) throws StorageException {
+ new File(path).delete();
+ }
+
+ @Override
+ public String getPreviewPath(String caseId, String fieldId, String name) {
+ return fileStorageConfiguration.getStoragePath() + "/file_preview/" + caseId + "/" + fieldId + "-" + name;
+ }
+
+ @Override
+ public String getPath(String caseId, String fieldId, String name) {
+ return fileStorageConfiguration.getStoragePath() + "/" + caseId + "/" + fieldId + "-" + name;
+ }
+
+}
diff --git a/src/main/java/com/netgrif/application/engine/files/minio/MinIoHostInfo.java b/src/main/java/com/netgrif/application/engine/files/minio/MinIoHostInfo.java
new file mode 100644
index 00000000000..e2ad4164503
--- /dev/null
+++ b/src/main/java/com/netgrif/application/engine/files/minio/MinIoHostInfo.java
@@ -0,0 +1,12 @@
+package com.netgrif.application.engine.files.minio;
+
+import lombok.Data;
+
+import java.util.Map;
+
+@Data
+public class MinIoHostInfo {
+ private String host;
+ private String user;
+ private String password;
+}
diff --git a/src/main/java/com/netgrif/application/engine/files/minio/MinIoProperties.java b/src/main/java/com/netgrif/application/engine/files/minio/MinIoProperties.java
new file mode 100644
index 00000000000..8863c93308a
--- /dev/null
+++ b/src/main/java/com/netgrif/application/engine/files/minio/MinIoProperties.java
@@ -0,0 +1,26 @@
+package com.netgrif.application.engine.files.minio;
+
+import lombok.Data;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.stereotype.Component;
+
+import java.util.Map;
+
+@Data
+@Component
+@ConfigurationProperties(prefix = "nae.storage.minio")
+public class MinIoProperties {
+ public static final String HOST = "host";
+ public static final String DEFAULT_BUCKET = "default";
+ private boolean enabled = false;
+ private Map hosts;
+ /**
+ * Minimal part size is 5MB=5242880
+ * */
+ private long partSize = 5242880L;
+
+ public MinIoHostInfo getHosts(String host) {
+ return hosts.get(host);
+ }
+}
+
diff --git a/src/main/java/com/netgrif/application/engine/files/minio/MinIoStorageService.java b/src/main/java/com/netgrif/application/engine/files/minio/MinIoStorageService.java
new file mode 100644
index 00000000000..d8ee3d88c6b
--- /dev/null
+++ b/src/main/java/com/netgrif/application/engine/files/minio/MinIoStorageService.java
@@ -0,0 +1,142 @@
+package com.netgrif.application.engine.files.minio;
+
+import com.netgrif.application.engine.files.interfaces.IStorageService;
+import com.netgrif.application.engine.files.throwable.BadRequestException;
+import com.netgrif.application.engine.files.throwable.ServiceErrorException;
+import com.netgrif.application.engine.files.throwable.StorageException;
+import com.netgrif.application.engine.petrinet.domain.dataset.MinIoStorage;
+import com.netgrif.application.engine.petrinet.domain.dataset.StorageField;
+import com.netgrif.application.engine.petrinet.domain.dataset.StorageType;
+import io.minio.GetObjectArgs;
+import io.minio.MinioClient;
+import io.minio.PutObjectArgs;
+import io.minio.RemoveObjectArgs;
+import io.minio.errors.*;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.stereotype.Service;
+import org.springframework.web.multipart.MultipartFile;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.InvalidKeyException;
+import java.security.NoSuchAlgorithmException;
+
+@Slf4j
+@Service
+@ConditionalOnProperty(
+ value = "nae.storage.minio.enabled",
+ havingValue = "true"
+)
+public class MinIoStorageService implements IStorageService {
+ private MinIoProperties properties;
+
+ @Autowired
+ public void setProperties(MinIoProperties properties) {
+ this.properties = properties;
+ }
+
+ @Override
+ public StorageType getType() {
+ return StorageType.MINIO;
+ }
+
+ @Override
+ public InputStream get(StorageField> field, String path) throws BadRequestException, ServiceErrorException, FileNotFoundException {
+ MinIoStorage storage = (MinIoStorage) field.getStorage();
+ try (MinioClient minioClient = client(storage.getHost())) {
+ return minioClient.getObject(
+ GetObjectArgs.builder()
+ .bucket(storage.getBucket() )
+ .object(path)
+ .build()
+ );
+ } catch (ErrorResponseException e) {
+ log.error(e.getMessage(), e);
+ if (e.response().code() == 404) {
+ throw new FileNotFoundException("File " + path + " not found.");
+ } else if (e.response().code() == 400) {
+ throw new BadRequestException("Getting file from minio failed.", e);
+ } else {
+ throw new ServiceErrorException("Some http error from minio", e);
+ }
+ } catch (InvalidKeyException e) {
+ log.error("Key " + path + " is corrupted.", e);
+ throw new BadRequestException("Key " + path + " is corrupted.", e);
+ } catch (Exception e) {
+ log.error("Some internal error from minio", e);
+ throw new ServiceErrorException("The file cannot be retrieved", e);
+ }
+ }
+
+ @Override
+ public boolean save(StorageField> field, String path, MultipartFile file) throws StorageException {
+ try (InputStream stream = file.getInputStream()) {
+ return this.save(field, path, stream);
+ } catch (StorageException | IOException e) {
+ throw new StorageException("File cannot be saved", e);
+ }
+ }
+
+ @Override
+ public boolean save(StorageField> field, String path, InputStream stream) throws StorageException {
+ MinIoStorage storage = (MinIoStorage) field.getStorage();
+ try (MinioClient minioClient = client(storage.getHost())) {
+ return minioClient.putObject(PutObjectArgs
+ .builder()
+ .bucket(storage.getBucket()).object(path)
+ .stream(stream, -1, properties.getPartSize())
+ .build()).etag() != null;
+ } catch (ErrorResponseException e) {
+ log.error(e.getMessage(), e);
+ throw new StorageException(e.getMessage(), e);
+ } catch (Exception e) {
+ log.error(e.getMessage(), e);
+ throw new ServiceErrorException(e.getMessage());
+ }
+ }
+
+ @Override
+ public void delete(StorageField> field, String path) throws StorageException {
+ MinIoStorage storage = (MinIoStorage) field.getStorage();
+ try (MinioClient minioClient = client(storage.getHost())) {
+ minioClient.removeObject(RemoveObjectArgs
+ .builder()
+ .bucket(storage.getBucket())
+ .object(path)
+ .build());
+ } catch (InsufficientDataException | InternalException | InvalidResponseException |
+ IOException | NoSuchAlgorithmException | ServerException | XmlParserException e) {
+ throw new ServiceErrorException(e.getMessage(), e);
+ } catch (InvalidKeyException e) {
+ throw new BadRequestException(e.getMessage());
+ } catch (Exception e) {
+ log.error("File cannot be deleted", e);
+ throw new StorageException("File cannot be deleted", e);
+ }
+ }
+
+
+ @Override
+ public String getPreviewPath(String caseId, String fieldId, String name) {
+ return caseId + "-" + fieldId + "-" + name + ".file_preview";
+ }
+
+ @Override
+ public String getPath(String caseId, String fieldId, String name) {
+ return caseId + "/" + fieldId + "-" + name;
+ }
+
+ public static String getBucketOrDefault(String bucket) {
+ return bucket != null ? bucket : MinIoProperties.DEFAULT_BUCKET;
+ }
+
+ protected MinioClient client(String host) {
+ return MinioClient.builder()
+ .endpoint(properties.getHosts(host).getHost())
+ .credentials(properties.getHosts(host).getUser(), properties.getHosts(host).getPassword())
+ .build();
+ }
+}
diff --git a/src/main/java/com/netgrif/application/engine/files/throwable/BadRequestException.java b/src/main/java/com/netgrif/application/engine/files/throwable/BadRequestException.java
new file mode 100644
index 00000000000..090e7456d2b
--- /dev/null
+++ b/src/main/java/com/netgrif/application/engine/files/throwable/BadRequestException.java
@@ -0,0 +1,11 @@
+package com.netgrif.application.engine.files.throwable;
+
+public class BadRequestException extends RuntimeException {
+ public BadRequestException(String message) {
+ super(message);
+ }
+
+ public BadRequestException(String message, Throwable cause) {
+ super(message, cause);
+ }
+}
diff --git a/src/main/java/com/netgrif/application/engine/files/throwable/ServiceErrorException.java b/src/main/java/com/netgrif/application/engine/files/throwable/ServiceErrorException.java
new file mode 100644
index 00000000000..b4b9493630a
--- /dev/null
+++ b/src/main/java/com/netgrif/application/engine/files/throwable/ServiceErrorException.java
@@ -0,0 +1,11 @@
+package com.netgrif.application.engine.files.throwable;
+
+public class ServiceErrorException extends RuntimeException {
+ public ServiceErrorException(String message, Throwable cause) {
+ super(message, cause);
+ }
+
+ public ServiceErrorException(String message) {
+ super(message);
+ }
+}
diff --git a/src/main/java/com/netgrif/application/engine/files/throwable/StorageException.java b/src/main/java/com/netgrif/application/engine/files/throwable/StorageException.java
new file mode 100644
index 00000000000..a03fa9dfe7b
--- /dev/null
+++ b/src/main/java/com/netgrif/application/engine/files/throwable/StorageException.java
@@ -0,0 +1,8 @@
+package com.netgrif.application.engine.files.throwable;
+
+public class StorageException extends Exception {
+ public StorageException(String message, Throwable cause) {
+ super(message, cause);
+ }
+
+}
diff --git a/src/main/java/com/netgrif/application/engine/files/throwable/StorageNotEnabledException.java b/src/main/java/com/netgrif/application/engine/files/throwable/StorageNotEnabledException.java
new file mode 100644
index 00000000000..3d1f1585870
--- /dev/null
+++ b/src/main/java/com/netgrif/application/engine/files/throwable/StorageNotEnabledException.java
@@ -0,0 +1,8 @@
+package com.netgrif.application.engine.files.throwable;
+
+public class StorageNotEnabledException extends RuntimeException {
+ private static final long serialVersionUID = 7462958789076658518L;
+ public StorageNotEnabledException(String message) {
+ super(message);
+ }
+}
diff --git a/src/main/java/com/netgrif/application/engine/files/throwable/StorageNotFoundException.java b/src/main/java/com/netgrif/application/engine/files/throwable/StorageNotFoundException.java
new file mode 100644
index 00000000000..e8e26b6a354
--- /dev/null
+++ b/src/main/java/com/netgrif/application/engine/files/throwable/StorageNotFoundException.java
@@ -0,0 +1,7 @@
+package com.netgrif.application.engine.files.throwable;
+
+public class StorageNotFoundException extends RuntimeException {
+ public StorageNotFoundException(String message) {
+ super(message);
+ }
+}
diff --git a/src/main/java/com/netgrif/application/engine/importer/service/FieldFactory.java b/src/main/java/com/netgrif/application/engine/importer/service/FieldFactory.java
index db4d45c931d..578ff8f0883 100644
--- a/src/main/java/com/netgrif/application/engine/importer/service/FieldFactory.java
+++ b/src/main/java/com/netgrif/application/engine/importer/service/FieldFactory.java
@@ -1,12 +1,14 @@
package com.netgrif.application.engine.importer.service;
-import com.netgrif.application.engine.auth.service.interfaces.IUserService;
+import com.netgrif.application.engine.files.minio.MinIoProperties;
+import com.netgrif.application.engine.files.throwable.StorageNotEnabledException;
import com.netgrif.application.engine.importer.model.*;
import com.netgrif.application.engine.importer.service.throwable.MissingIconKeyException;
import com.netgrif.application.engine.petrinet.domain.Component;
import com.netgrif.application.engine.petrinet.domain.Format;
import com.netgrif.application.engine.petrinet.domain.I18nString;
import com.netgrif.application.engine.petrinet.domain.dataset.*;
+import com.netgrif.application.engine.petrinet.domain.dataset.factory.StorageFactory;
import com.netgrif.application.engine.petrinet.domain.dataset.logic.action.runner.Expression;
import com.netgrif.application.engine.petrinet.domain.dataset.logic.validation.DynamicValidation;
import com.netgrif.application.engine.petrinet.domain.views.View;
@@ -15,6 +17,7 @@
import com.netgrif.application.engine.workflow.service.interfaces.IDataValidationExpressionEvaluator;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Value;
import java.time.LocalDate;
import java.time.LocalDateTime;
@@ -26,10 +29,15 @@
import java.util.function.Consumer;
import java.util.stream.Collectors;
+import static com.netgrif.application.engine.files.minio.MinIoStorageService.getBucketOrDefault;
+
@org.springframework.stereotype.Component
@Slf4j
public final class FieldFactory {
+ @Value("${nae.storage.default-type}")
+ private String defaultStorageType;
+
@Autowired
private FormatFactory formatFactory;
@@ -43,10 +51,13 @@ public final class FieldFactory {
private IDataValidator dataValidator;
@Autowired
- private IUserService userService;
+ private IDataValidationExpressionEvaluator dataValidationExpressionEvaluator;
+ private MinIoProperties minIoProperties;
@Autowired
- private IDataValidationExpressionEvaluator dataValidationExpressionEvaluator;
+ public void setMinIoProperties(MinIoProperties minIoProperties) {
+ this.minIoProperties = minIoProperties;
+ }
public static Set parseMultichoiceValue(Case useCase, String fieldId) {
Object values = useCase.getFieldValue(fieldId);
@@ -514,7 +525,7 @@ private UserListField buildUserListField(Data data, Importer importer) {
private FileField buildFileField(Data data) {
FileField fileField = new FileField();
- fileField.setRemote(data.getRemote() != null);
+ resolveStorage(data, fileField);
setDefaultValue(fileField, data, defaultValue -> {
if (defaultValue != null) {
fileField.setDefaultValue(defaultValue);
@@ -525,7 +536,7 @@ private FileField buildFileField(Data data) {
private FileListField buildFileListField(Data data) {
FileListField fileListField = new FileListField();
- fileListField.setRemote(data.getRemote() != null);
+ resolveStorage(data, fileListField);
setDefaultValues(fileListField, data, defaultValues -> {
if (defaultValues != null && !defaultValues.isEmpty()) {
fileListField.setDefaultValue(defaultValues);
@@ -842,4 +853,7 @@ private Map getFieldOptions(MapOptionsField, ?> field, Cas
}
}
+ private void resolveStorage(Data data, StorageField> field) {
+ field.setStorage(StorageFactory.createStorage(data, defaultStorageType, minIoProperties));
+ }
}
\ No newline at end of file
diff --git a/src/main/java/com/netgrif/application/engine/importer/service/Importer.java b/src/main/java/com/netgrif/application/engine/importer/service/Importer.java
index 4954d077d88..7c6551bd47d 100644
--- a/src/main/java/com/netgrif/application/engine/importer/service/Importer.java
+++ b/src/main/java/com/netgrif/application/engine/importer/service/Importer.java
@@ -1055,22 +1055,39 @@ protected void createRole(Role importRole) {
throw new IllegalArgumentException("Role ID '" + ProcessRole.ANONYMOUS_ROLE + "' is a reserved identifier, roles with this ID cannot be defined!");
}
+ ProcessRole role;
+ if (shouldInitializeRole(importRole)) {
+ role = initRole(importRole);
+ } else {
+ role = new ArrayList<>(processRoleService.findAllByImportId(ProcessRole.GLOBAL + importRole.getId())).get(0);
+ }
+ role.set_id(new ProcessResourceId(new ObjectId(net.getStringId())));
+
+ net.addRole(role);
+ roles.put(importRole.getId(), role);
+ }
+
+ protected boolean shouldInitializeRole(Role importRole) {
+ return importRole.isGlobal() == null || !importRole.isGlobal() ||
+ (importRole.isGlobal() && processRoleService.findAllByImportId(ProcessRole.GLOBAL + importRole.getId()).isEmpty());
+ }
+
+ protected ProcessRole initRole(Role importRole) {
ProcessRole role = new ProcessRole();
Map events = createEventsMap(importRole.getEvent());
-
- role.setImportId(importRole.getId());
+ role.setImportId(importRole.isGlobal() != null && importRole.isGlobal() ? ProcessRole.GLOBAL + importRole.getId() : importRole.getId());
role.setEvents(events);
-
if (importRole.getName() == null) {
role.setName(toI18NString(importRole.getTitle()));
} else {
role.setName(toI18NString(importRole.getName()));
}
- role.set_id(new ProcessResourceId(new ObjectId(net.getStringId())));
-
- role.setNetId(net.getStringId());
- net.addRole(role);
- roles.put(importRole.getId(), role);
+ if (importRole.isGlobal() != null && importRole.isGlobal()) {
+ role.setGlobal(importRole.isGlobal());
+ } else {
+ role.setNetId(net.getStringId());
+ }
+ return role;
}
protected Map createEventsMap(List events) {
diff --git a/src/main/java/com/netgrif/application/engine/petrinet/domain/dataset/factory/StorageFactory.java b/src/main/java/com/netgrif/application/engine/petrinet/domain/dataset/factory/StorageFactory.java
new file mode 100644
index 00000000000..7de0932f468
--- /dev/null
+++ b/src/main/java/com/netgrif/application/engine/petrinet/domain/dataset/factory/StorageFactory.java
@@ -0,0 +1,34 @@
+package com.netgrif.application.engine.petrinet.domain.dataset.factory;
+
+import com.netgrif.application.engine.files.minio.MinIoProperties;
+import com.netgrif.application.engine.files.throwable.StorageNotEnabledException;
+import com.netgrif.application.engine.importer.model.Data;
+import com.netgrif.application.engine.petrinet.domain.dataset.*;
+
+import static com.netgrif.application.engine.files.minio.MinIoStorageService.getBucketOrDefault;
+
+public class StorageFactory {
+
+ public static Storage createStorage(Data data, String defaultStorageType, MinIoProperties minIoProperties) {
+ Storage storage;
+ StorageType storageType = StorageType.valueOf((data.getStorage() == null || data.getStorage().getType() == null) ? defaultStorageType : data.getStorage().getType().toUpperCase());
+ switch (storageType) {
+ case MINIO:
+ storage = new MinIoStorage();
+ if (!minIoProperties.isEnabled()) {
+ throw new StorageNotEnabledException("Storage of type [" + StorageType.MINIO + "] is not enabled.");
+ }
+ if (data.getStorage().getHost() != null) {
+ storage.setHost(data.getStorage().getHost());
+ }
+ if (data.getStorage().getBucket() != null) {
+ ((MinIoStorage) storage).setBucket(getBucketOrDefault(data.getStorage().getBucket()));
+ }
+ break;
+ default:
+ storage = new Storage(StorageType.valueOf(defaultStorageType));
+ break;
+ }
+ return storage;
+ }
+}
diff --git a/src/main/java/com/netgrif/application/engine/petrinet/domain/roles/ProcessRole.java b/src/main/java/com/netgrif/application/engine/petrinet/domain/roles/ProcessRole.java
index 91cdab861da..68b05280b53 100755
--- a/src/main/java/com/netgrif/application/engine/petrinet/domain/roles/ProcessRole.java
+++ b/src/main/java/com/netgrif/application/engine/petrinet/domain/roles/ProcessRole.java
@@ -27,20 +27,23 @@ public class ProcessRole extends Imported {
public static final String ANONYMOUS_ROLE = "anonymous";
+ public static final String GLOBAL = "global_";
+
+ @Setter
+ private boolean global;
+
@Id
@Setter
private ProcessResourceId _id;
private I18nString name;
- @Getter
@Setter
private String netId;
@Setter
private String description;
- @Getter
@Setter
private Map events;
diff --git a/src/main/java/com/netgrif/application/engine/petrinet/service/ProcessRoleService.java b/src/main/java/com/netgrif/application/engine/petrinet/service/ProcessRoleService.java
index 540228b1b5d..9fced9b665a 100644
--- a/src/main/java/com/netgrif/application/engine/petrinet/service/ProcessRoleService.java
+++ b/src/main/java/com/netgrif/application/engine/petrinet/service/ProcessRoleService.java
@@ -61,7 +61,12 @@ public ProcessRoleService(ProcessRoleRepository processRoleRepository,
@Override
public List saveAll(Iterable entities) {
- return processRoleRepository.saveAll(entities);
+ return StreamSupport.stream(entities.spliterator(), false).map(processRole -> {
+ if (!processRole.isGlobal() || processRoleRepository.findAllByImportId(processRole.getImportId()).isEmpty()) {
+ return processRoleRepository.save(processRole);
+ }
+ return null;
+ }).filter(Objects::nonNull).collect(Collectors.toList());
}
@Override
@@ -89,24 +94,27 @@ public void assignRolesToUser(String userId, Set requestedRolesIds, Logg
Set rolesRemovedFromUser = getRolesRemovedFromUser(userOldRoles, requestedRoles);
String idOfPetriNetContainingRole = getPetriNetIdRoleBelongsTo(rolesNewToUser, rolesRemovedFromUser);
-
- if (idOfPetriNetContainingRole == null)
+ if (!isGlobalFromFirstRole(rolesNewToUser) && !isGlobalFromFirstRole(rolesRemovedFromUser) && idOfPetriNetContainingRole == null) {
return;
-
- PetriNet petriNet = petriNetService.getPetriNet(idOfPetriNetContainingRole);
-
+ }
+ PetriNet petriNet = null;
+ if (idOfPetriNetContainingRole != null) {
+ petriNet = petriNetService.getPetriNet(idOfPetriNetContainingRole);
+ }
Set rolesNewToUserIds = mapUserRolesToIds(rolesNewToUser);
Set rolesRemovedFromUserIds = mapUserRolesToIds(rolesRemovedFromUser);
Set newRoles = this.findByIds(rolesNewToUserIds);
Set removedRoles = this.findByIds(rolesRemovedFromUserIds);
-
- runAllPreActions(newRoles, removedRoles, user, petriNet, params);
+ if (petriNet != null) {
+ runAllPreActions(newRoles, removedRoles, user, petriNet, params);
+ }
requestedRoles = updateRequestedRoles(user, rolesNewToUser, rolesRemovedFromUser);
replaceUserRolesAndPublishEvent(requestedRolesIds, user, requestedRoles);
- runAllPostActions(newRoles, removedRoles, user, petriNet, params);
-
+ if (petriNet != null) {
+ runAllPostActions(newRoles, removedRoles, user, petriNet, params);
+ }
securityContextService.saveToken(userId);
if (Objects.equals(userId, loggedUser.getId())) {
loggedUser.getProcessRoles().clear();
@@ -136,6 +144,14 @@ private String getPetriNetIdRoleBelongsTo(Set newRoles, Set roles) {
+ if (roles.isEmpty()) {
+ return false;
+ }
+ ProcessRole role = roles.iterator().next();
+ return role.isGlobal();
+ }
+
private String getPetriNetIdFromFirstRole(Set newRoles) {
return newRoles.iterator().next().getNetId();
}
@@ -291,7 +307,7 @@ public ProcessRole findById(String id) {
@Override
public void deleteRolesOfNet(PetriNet net, LoggedUser loggedUser) {
log.info("[" + net.getStringId() + "]: Initiating deletion of all roles of Petri net " + net.getIdentifier() + " version " + net.getVersion().toString());
- List deletedRoleIds = this.findAll(net).stream().map(ProcessRole::get_id).collect(Collectors.toList());
+ List deletedRoleIds = this.findAll(net.getStringId()).stream().filter(processRole -> processRole.getNetId() != null).map(ProcessRole::get_id).collect(Collectors.toList());
Set deletedRoleStringIds = deletedRoleIds.stream().map(ProcessResourceId::toString).collect(Collectors.toSet());
List usersWithRemovedRoles = this.userService.findAllByProcessRoles(deletedRoleStringIds, false);
diff --git a/src/main/java/com/netgrif/application/engine/resource/domain/ExternalResource.java b/src/main/java/com/netgrif/application/engine/resource/domain/ExternalResource.java
new file mode 100644
index 00000000000..60ef15b763b
--- /dev/null
+++ b/src/main/java/com/netgrif/application/engine/resource/domain/ExternalResource.java
@@ -0,0 +1,22 @@
+package com.netgrif.application.engine.resource.domain;
+
+import org.springframework.core.io.InputStreamResource;
+import org.springframework.core.io.Resource;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+
+public class ExternalResource {
+
+ public Resource getResource(String filePath) {
+ try {
+ File file = new File(filePath);
+ InputStream in = new FileInputStream(file);
+ return new InputStreamResource(in);
+ } catch (IOException ex) {
+ throw new RuntimeException(ex);
+ }
+ }
+}
diff --git a/src/main/java/com/netgrif/application/engine/resource/domain/ExternalResourceLoader.java b/src/main/java/com/netgrif/application/engine/resource/domain/ExternalResourceLoader.java
new file mode 100644
index 00000000000..5a22920a082
--- /dev/null
+++ b/src/main/java/com/netgrif/application/engine/resource/domain/ExternalResourceLoader.java
@@ -0,0 +1,62 @@
+package com.netgrif.application.engine.resource.domain;
+
+import org.springframework.core.io.Resource;
+import org.springframework.core.io.ResourceLoader;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.Optional;
+
+public class ExternalResourceLoader implements ResourceLoader {
+
+ public static final String RESOURCES_FOLDER = "resources";
+ public static final String[] RESOURCE_PREFIXES = new String[]{
+ "resource:",
+ "nae:",
+ "nae-resource:",
+ "nr:"
+ };
+
+ public static final String DEFAULT_RESOURCE_PREFIX = RESOURCE_PREFIXES[0];
+
+ public static final String NAE_RESOURCE_PREFIX = RESOURCE_PREFIXES[1];
+
+ public static final String NAE_RESOURCE_RESOURCE_PREFIX = RESOURCE_PREFIXES[2];
+
+ public static final String NR_RESOURCE_PREFIX = RESOURCE_PREFIXES[3];
+
+ private final ResourceLoader delegate;
+
+ public ExternalResourceLoader(ResourceLoader delegate) {
+ this.delegate = delegate;
+ }
+
+ @Override
+ public Resource getResource(String location) {
+ if (location.isBlank()) return delegate.getResource(location);
+ Optional detectedPrefix = getExternalResourcePrefix(location);
+ return detectedPrefix.isPresent() ? getResourceWithPrefix(location, detectedPrefix.get()) : delegate.getResource(location);
+ }
+
+ @Override
+ public ClassLoader getClassLoader() {
+ return this.delegate.getClassLoader();
+ }
+
+ /**
+ * Get detected resource prefix if the location is an external resource.
+ *
+ * @param location Resource location
+ * @return Optional of resource prefix. If the location is not external resource, empty Optional is returned.
+ */
+ public static Optional getExternalResourcePrefix(String location) {
+ if (location == null || location.isBlank()) return Optional.empty();
+ return Arrays.stream(RESOURCE_PREFIXES).filter(location.toLowerCase()::startsWith).findFirst();
+ }
+
+ private Resource getResourceWithPrefix(String location, String prefix) {
+ String path = location.substring(prefix.length());
+ ExternalResource resource = new ExternalResource();
+ return resource.getResource(RESOURCES_FOLDER + File.pathSeparator + path);
+ }
+}
diff --git a/src/main/java/com/netgrif/application/engine/resource/service/ExternalResourceLoaderProcessor.java b/src/main/java/com/netgrif/application/engine/resource/service/ExternalResourceLoaderProcessor.java
new file mode 100644
index 00000000000..330a1be8c69
--- /dev/null
+++ b/src/main/java/com/netgrif/application/engine/resource/service/ExternalResourceLoaderProcessor.java
@@ -0,0 +1,33 @@
+package com.netgrif.application.engine.resource.service;
+
+import com.netgrif.application.engine.resource.domain.ExternalResourceLoader;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.context.ResourceLoaderAware;
+import org.springframework.core.io.DefaultResourceLoader;
+import org.springframework.core.io.ProtocolResolver;
+import org.springframework.core.io.Resource;
+import org.springframework.core.io.ResourceLoader;
+import org.springframework.stereotype.Component;
+
+@Slf4j
+@Component
+public class ExternalResourceLoaderProcessor implements ResourceLoaderAware, ProtocolResolver {
+
+ @Override
+ public void setResourceLoader(ResourceLoader resourceLoader) {
+ if(DefaultResourceLoader.class.isAssignableFrom(resourceLoader.getClass())) {
+ ((DefaultResourceLoader)resourceLoader).addProtocolResolver(this);
+ } else {
+ log.error("Could not assign protocol for resource loader.");
+ }
+ }
+
+ @Override
+ public Resource resolve(String location, ResourceLoader resourceLoader) {
+ if(ExternalResourceLoader.getExternalResourcePrefix(location).isPresent()){
+ ExternalResourceLoader loader = new ExternalResourceLoader(resourceLoader);
+ return loader.getResource(location);
+ }
+ return null;
+ }
+}
diff --git a/src/main/java/com/netgrif/application/engine/startup/ApplicationRunnerExecutor.java b/src/main/java/com/netgrif/application/engine/startup/ApplicationRunnerExecutor.java
index 00b80dd20bf..bbc943771b3 100644
--- a/src/main/java/com/netgrif/application/engine/startup/ApplicationRunnerExecutor.java
+++ b/src/main/java/com/netgrif/application/engine/startup/ApplicationRunnerExecutor.java
@@ -73,7 +73,7 @@ protected void callRunner(T runner, ApplicationRunnerOrderResolver.SortedRunners
protected ApplicationRunnerOrderResolver.SortedRunners resolveRunners() {
Map customRunners = (Map) ApplicationContextProvider.getAppContext().getBeansOfType(GenericTypeResolver.resolveTypeArgument(getClass(), ApplicationRunnerExecutor.class));
ApplicationRunnerOrderResolver.SortedRunners runners = orderResolver.sortByRunnerOrderAnnotation(customRunners.values());
- runners.sortUnresolvedRunners();
+ runners.resolveAllRunners();
return runners;
}
diff --git a/src/main/java/com/netgrif/application/engine/startup/ApplicationRunnerOrderResolver.java b/src/main/java/com/netgrif/application/engine/startup/ApplicationRunnerOrderResolver.java
index aebbe9a678a..dcb78301817 100644
--- a/src/main/java/com/netgrif/application/engine/startup/ApplicationRunnerOrderResolver.java
+++ b/src/main/java/com/netgrif/application/engine/startup/ApplicationRunnerOrderResolver.java
@@ -27,7 +27,7 @@ public class ApplicationRunnerOrderResolver {
* @param the type of the runners
* @param runners the collection of runners to be sorted
* @return a {@link SortedRunners} object containing two lists: one with the sorted runners and one with the unresolved runners.
- * To resolve order of the unresolved list call {@link SortedRunners#sortUnresolvedRunners()} method.
+ * To resolve order of the unresolved list call {@link SortedRunners#resolveAllRunners()} method.
*/
public SortedRunners sortByRunnerOrderAnnotation(Collection runners) {
if (runners == null) return null;
@@ -90,7 +90,20 @@ public SortedRunners(List sorted, List unresolved) {
* @return {@code true} if all unresolved runners have been successfully sorted and the unresolved list is empty;
* {@code false} otherwise.
*/
- public boolean sortUnresolvedRunners() {
+ public boolean resolveAllRunners() {
+ sortUnresolvedRunners();
+ replaced.values().forEach(this::removeRunner);
+ return unresolved.isEmpty();
+ }
+
+ protected void removeRunner(Class> runnerClass) {
+ int classIndex = indexOfClass(sorted, runnerClass);
+ if (classIndex == -1) return;
+ T runner = sorted.remove(classIndex);
+ if (runner != null) removeRunner(runnerClass);
+ }
+
+ protected boolean sortUnresolvedRunners() {
boolean changed = false;
changed = changed || resolveSortingAnnotation(BeforeRunner.class, this::insertBeforeRunner);
changed = changed || resolveSortingAnnotation(AfterRunner.class, this::insertAfterRunner);
diff --git a/src/main/java/com/netgrif/application/engine/workflow/domain/repositories/CaseRepository.java b/src/main/java/com/netgrif/application/engine/workflow/domain/repositories/CaseRepository.java
index 6f037a46fa0..78d24df4143 100644
--- a/src/main/java/com/netgrif/application/engine/workflow/domain/repositories/CaseRepository.java
+++ b/src/main/java/com/netgrif/application/engine/workflow/domain/repositories/CaseRepository.java
@@ -2,6 +2,7 @@
import com.netgrif.application.engine.workflow.domain.Case;
import com.netgrif.application.engine.workflow.domain.QCase;
+import com.netgrif.application.engine.workflow.domain.ProcessResourceId;
import org.bson.types.ObjectId;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
@@ -34,6 +35,20 @@ public interface CaseRepository extends MongoRepository, QuerydslP
@Query("{ '_id.objectId': ?0 }")
Optional findByIdObjectId(ObjectId objectId);
+ default Optional findById(String compositeId) {
+ String[] parts = compositeId.split(ProcessResourceId.ID_SEPARATOR);
+ if (parts.length == 2) {
+ String networkId = parts[0];
+ ObjectId objectId = new ObjectId(parts[1]);
+ return findByNetworkIdAndObjectId(networkId, objectId);
+ } else {
+ return findByIdObjectId(new ObjectId(compositeId));
+ }
+ }
+
+ @Query("{ '_id.shortProcessId': ?0, '_id.objectId': ?1 }")
+ Optional findByNetworkIdAndObjectId(String ProcessId, ObjectId objectId);
+
@Override
default void customize(QuerydslBindings bindings, QCase qCase) {
}
diff --git a/src/main/java/com/netgrif/application/engine/workflow/service/DataService.java b/src/main/java/com/netgrif/application/engine/workflow/service/DataService.java
index f1b317c1755..c39f546d1f2 100644
--- a/src/main/java/com/netgrif/application/engine/workflow/service/DataService.java
+++ b/src/main/java/com/netgrif/application/engine/workflow/service/DataService.java
@@ -9,6 +9,9 @@
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.netgrif.application.engine.auth.domain.IUser;
import com.netgrif.application.engine.auth.service.interfaces.IUserService;
+import com.netgrif.application.engine.files.StorageResolverService;
+import com.netgrif.application.engine.files.interfaces.IStorageService;
+import com.netgrif.application.engine.files.throwable.StorageException;
import com.netgrif.application.engine.event.events.data.GetDataEvent;
import com.netgrif.application.engine.event.events.data.SetDataEvent;
import com.netgrif.application.engine.history.domain.dataevents.GetDataEventLog;
@@ -55,7 +58,6 @@
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.*;
-import java.net.URL;
import java.time.LocalDateTime;
import java.util.List;
import java.util.*;
@@ -98,6 +100,9 @@ public class DataService implements IDataService {
@Autowired
protected IValidationService validation;
+ @Autowired
+ private StorageResolverService storageResolverService;
+
@Value("${nae.image.preview.scaling.px:400}")
protected int imageScale;
@@ -106,7 +111,7 @@ public class DataService implements IDataService {
@Override
public GetDataEventOutcome getData(String taskId) {
- return getData(taskId, null);
+ return getData(taskId, new HashMap<>());
}
@Override
@@ -246,7 +251,7 @@ public SetDataEventOutcome setData(Task task, ObjectNode values, Map());
}
@Override
- public FileFieldInputStream getFileByTaskAndName(String taskId, String fieldId, String name, Map params) {
+ public FileFieldInputStream getFileByTaskAndName(String taskId, String fieldId, String name, Map params) throws FileNotFoundException {
Task task = taskService.findOne(taskId);
return getFileByCaseAndName(task.getCaseId(), fieldId, name, params);
}
@Override
- public FileFieldInputStream getFileByCase(String caseId, Task task, String fieldId, boolean forPreview) {
+ public FileFieldInputStream getFileByCase(String caseId, Task task, String fieldId, boolean forPreview) throws FileNotFoundException {
Case useCase = workflowService.findOne(caseId);
FileField field = (FileField) useCase.getPetriNet().getDataSet().get(fieldId);
return getFile(useCase, task, field, forPreview);
}
@Override
- public FileFieldInputStream getFileByCaseAndName(String caseId, String fieldId, String name) {
+ public FileFieldInputStream getFileByCaseAndName(String caseId, String fieldId, String name) throws FileNotFoundException {
return getFileByCaseAndName(caseId, fieldId, name, new HashMap<>());
}
@Override
- public FileFieldInputStream getFileByCaseAndName(String caseId, String fieldId, String name, Map params) {
+ public FileFieldInputStream getFileByCaseAndName(String caseId, String fieldId, String name, Map params) throws FileNotFoundException {
Case useCase = workflowService.findOne(caseId);
FileListField field = (FileListField) useCase.getPetriNet().getDataSet().get(fieldId);
return getFileByName(useCase, field, name, params);
}
@Override
- public FileFieldInputStream getFileByName(Case useCase, FileListField field, String name) {
+ public FileFieldInputStream getFileByName(Case useCase, FileListField field, String name) throws FileNotFoundException {
return getFileByName(useCase, field, name, new HashMap<>());
}
@Override
- public FileFieldInputStream getFileByName(Case useCase, FileListField field, String name, Map params) {
+ public FileFieldInputStream getFileByName(Case useCase, FileListField field, String name, Map params) throws FileNotFoundException {
runGetActionsFromFileField(field.getEvents(), useCase, params);
if (useCase.getFieldValue(field.getStringId()) == null)
return null;
@@ -480,43 +485,35 @@ public FileFieldInputStream getFileByName(Case useCase, FileListField field, Str
workflowService.save(useCase);
field.setValue((FileListFieldValue) useCase.getFieldValue(field.getStringId()));
- Optional fileField = field.getValue().getNamesPaths().stream().filter(namePath -> namePath.getName().equals(name)).findFirst();
- if (!fileField.isPresent() || fileField.get().getPath() == null) {
+ Optional fileFieldValue = field.getValue().getNamesPaths().stream().filter(namePath -> namePath.getName().equals(name)).findFirst();
+ if (fileFieldValue.isEmpty() || fileFieldValue.get().getPath() == null) {
log.error("File " + name + " not found!");
- return null;
- }
-
- try {
- return new FileFieldInputStream(field.isRemote() ? download(fileField.get().getPath()) :
- new FileInputStream(fileField.get().getPath()), name);
- } catch (IOException e) {
- log.error("Getting file failed: ", e);
- return null;
+ throw new FileNotFoundException("File " + name + " not found!");
}
+ return new FileFieldInputStream(storageResolverService.resolve(field.getStorageType()).get(field, fileFieldValue.get().getPath()), name);
}
@Override
- public FileFieldInputStream getFile(Case useCase, Task task, FileField field, boolean forPreview) {
+ public FileFieldInputStream getFile(Case useCase, Task task, FileField field, boolean forPreview) throws FileNotFoundException {
return getFile(useCase, task, field, forPreview, new HashMap<>());
}
@Override
- public FileFieldInputStream getFile(Case useCase, Task task, FileField field, boolean forPreview, Map params) {
+ public FileFieldInputStream getFile(Case useCase, Task task, FileField field, boolean forPreview, Map params) throws FileNotFoundException {
runGetActionsFromFileField(field.getEvents(), useCase, params);
- if (useCase.getFieldValue(field.getStringId()) == null)
- return null;
+ if (useCase.getFieldValue(field.getStringId()) == null) {
+ throw new FileNotFoundException("Field " + field.getStringId() + " not found on case " + useCase.getStringId());
+ }
workflowService.save(useCase);
field.setValue((FileFieldValue) useCase.getFieldValue(field.getStringId()));
-
try {
if (forPreview) {
return getFilePreview(field, useCase);
} else {
- return new FileFieldInputStream(field, field.isRemote() ? download(field.getValue().getPath()) :
- new FileInputStream(field.getValue().getPath()));
+ return new FileFieldInputStream(field, storageResolverService.resolve(field.getStorageType()).get(field, field.getValue().getPath()));
}
- } catch (IOException e) {
+ } catch (IOException | StorageException e) {
log.error("Getting file failed: ", e);
return null;
}
@@ -530,35 +527,37 @@ private void runGetActionsFromFileField(Map events, Ca
}
}
- private FileFieldInputStream getFilePreview(FileField field, Case useCase) throws IOException {
- File localPreview = new File(field.getFilePreviewPath(useCase.getStringId()));
- if (localPreview.exists()) {
- return new FileFieldInputStream(field, new FileInputStream(localPreview));
- }
- File file;
- if (field.isRemote()) {
- file = getRemoteFile(field);
- } else {
- file = new File(field.getValue().getPath());
+ private FileFieldInputStream getFilePreview(FileField field, Case useCase) throws IOException, StorageException {
+ IStorageService storageService = storageResolverService.resolve(field.getStorageType());
+ InputStream stream = storageService.get(field, field.getValue().getPath());
+ File file = File.createTempFile(field.getStringId(), "." + FileFieldDataType.resolveTypeFromName(field.getValue().getName()).extension);
+ file.deleteOnExit();
+ FileOutputStream fos = new FileOutputStream(file);
+ IOUtils.copy(stream, fos);
+ fos.close();
+ stream.close();
+ byte[] bytes = generateFilePreviewToStream(file).toByteArray();
+ try (InputStream inputStream = new ByteArrayInputStream(bytes)) {
+ String previewPath = storageService.getPreviewPath(useCase.getStringId(), field.getImportId(), field.getValue().getName());
+ storageService.save(field, previewPath, inputStream);
+ field.getValue().setPreviewPath(previewPath);
+ inputStream.reset();
+ return new FileFieldInputStream(field, inputStream);
+ } catch (StorageException e) {
+ stream.close();
+ throw new EventNotExecutableException("File preview cannot be saved", e);
}
- int dot = file.getName().lastIndexOf(".");
- FileFieldDataType fileType = FileFieldDataType.resolveType((dot == -1) ? "" : file.getName().substring(dot + 1));
+ }
+
+ private ByteArrayOutputStream generateFilePreviewToStream(File file) throws IOException {
+ FileFieldDataType fileType = FileFieldDataType.resolveTypeFromName(file.getName());
BufferedImage image = getBufferedImageFromFile(file, fileType);
if (image.getWidth() > imageScale || image.getHeight() > imageScale) {
image = scaleImagePreview(image);
}
ByteArrayOutputStream os = new ByteArrayOutputStream();
ImageIO.write(image, !fileType.extension.equals(FileFieldDataType.PDF.extension) ? fileType.extension : FileFieldDataType.JPG.extension, os);
- saveFilePreview(localPreview, os);
- return new FileFieldInputStream(field, new ByteArrayInputStream(os.toByteArray()));
- }
-
- private void saveFilePreview(File localPreview, ByteArrayOutputStream os) throws IOException {
- localPreview.getParentFile().mkdirs();
- localPreview.createNewFile();
- FileOutputStream fos = new FileOutputStream(localPreview);
- fos.write(os.toByteArray());
- fos.close();
+ return os;
}
private BufferedImage getBufferedImageFromFile(File file, FileFieldDataType fileType) throws IOException {
@@ -567,9 +566,9 @@ private BufferedImage getBufferedImageFromFile(File file, FileFieldDataType file
PDDocument document = PDDocument.load(file);
PDFRenderer renderer = new PDFRenderer(document);
image = renderer.renderImage(0);
+ document.close();
} else {
image = ImageIO.read(file);
-
}
return image;
}
@@ -584,20 +583,10 @@ private BufferedImage scaleImagePreview(BufferedImage image) {
return image;
}
- private File getRemoteFile(FileField field) throws IOException {
- File file;
- InputStream is = download(field.getValue().getPath());
- file = File.createTempFile(field.getStringId(), "pdf");
- file.deleteOnExit();
- FileOutputStream fos = new FileOutputStream(file);
- IOUtils.copy(is, fos);
- return file;
- }
@Override
- public InputStream download(String url) throws IOException {
- URL connection = new URL(url);
- return new BufferedInputStream(connection.openStream());
+ public InputStream download(FileListField field, FileFieldValue fieldValue) throws StorageException, FileNotFoundException {
+ return storageResolverService.resolve(field.getStorageType()).get(field, fieldValue.getPath());
}
@Override
@@ -611,12 +600,26 @@ public SetDataEventOutcome saveFile(String taskId, String fieldId, MultipartFile
ImmutablePair pair = getCaseAndFileField(taskId, fieldId);
FileField field = pair.getRight();
Case useCase = pair.getLeft();
+ IStorageService storageService = storageResolverService.resolve(field.getStorageType());
+ try {
+ if (useCase.getDataSet().get(field.getStringId()).getValue() != null && field.getValue().getPath() != null) {
+ storageService.delete(field, field.getValue().getPath());
+ if (field.getValue().getPreviewPath() != null) {
+ storageService.delete(field, field.getValue().getPreviewPath());
+ }
+ useCase.getDataSet().get(field.getStringId()).setValue(null);
+ }
- if (field.isRemote()) {
- upload(useCase, field, multipartFile);
- } else {
- saveLocalFile(useCase, field, multipartFile);
+ field.setValue(multipartFile.getOriginalFilename());
+ String path = storageService.getPath(useCase.getStringId(), field.getStringId(), multipartFile.getOriginalFilename());
+ field.getValue().setPath(path);
+ storageService.save(field, path, multipartFile);
+ } catch (StorageException e) {
+ log.error("File " + multipartFile.getOriginalFilename() + " in case " + useCase.getStringId() + " could not be saved to file field " + field.getStringId(), e);
+ throw new EventNotExecutableException("File " + multipartFile.getOriginalFilename() + " in case " + useCase.getStringId() + " could not be saved to file field " + field.getStringId(), e);
}
+
+ useCase.getDataSet().get(field.getStringId()).setValue(field.getValue());
return new SetDataEventOutcome(useCase, task, getChangedFieldByFileFieldContainer(fieldId, task, useCase, params));
}
@@ -631,12 +634,26 @@ public SetDataEventOutcome saveFiles(String taskId, String fieldId, MultipartFil
ImmutablePair pair = getCaseAndFileListField(taskId, fieldId);
FileListField field = pair.getRight();
Case useCase = pair.getLeft();
+ IStorageService storageService = storageResolverService.resolve(field.getStorageType());
+ for (MultipartFile multipartFile : multipartFiles) {
+ try {
+ if (field.getValue() != null && field.getValue().getNamesPaths() != null) {
+ Optional fileFieldValue = field.getValue().getNamesPaths().stream().filter(namePath -> namePath.getName().equals(multipartFile.getOriginalFilename())).findFirst();
+ if (fileFieldValue.isPresent()) {
+ storageService.delete(field, fileFieldValue.get().getPath());
+ field.getValue().getNamesPaths().remove(fileFieldValue.get());
+ }
+ }
+ String path = storageService.getPath(useCase.getStringId(), field.getStringId(), multipartFile.getOriginalFilename());
+ field.addValue(multipartFile.getOriginalFilename(), path);
+ storageService.save(field, path, multipartFile);
+ } catch (StorageException e) {
+ log.error(e.getMessage());
+ throw new EventNotExecutableException("File " + multipartFile.getOriginalFilename() + " in case " + useCase.getStringId() + " could not be saved to file list field " + field.getStringId(), e);
+ }
- if (field.isRemote()) {
- upload(useCase, field, multipartFiles);
- } else {
- saveLocalFiles(useCase, field, multipartFiles);
}
+ useCase.getDataSet().get(field.getStringId()).setValue(field.getValue());
return new SetDataEventOutcome(useCase, task, getChangedFieldByFileFieldContainer(fieldId, task, useCase, params));
}
@@ -650,78 +667,6 @@ private List getChangedFieldByFileFieldContainer(String fieldId, T
return outcomes;
}
- private boolean saveLocalFiles(Case useCase, FileListField field, MultipartFile[] multipartFiles) {
- for (MultipartFile oneFile : multipartFiles) {
- if (field.getValue() != null && field.getValue().getNamesPaths() != null) {
- Optional fileField = field.getValue().getNamesPaths().stream().filter(namePath -> namePath.getName().equals(oneFile.getOriginalFilename())).findFirst();
- if (fileField.isPresent()) {
- new File(field.getFilePath(useCase.getStringId(), oneFile.getOriginalFilename())).delete();
- field.getValue().getNamesPaths().remove(fileField.get());
- }
- }
-
- field.addValue(oneFile.getOriginalFilename(), field.getFilePath(useCase.getStringId(), oneFile.getOriginalFilename()));
- File file = new File(field.getFilePath(useCase.getStringId(), oneFile.getOriginalFilename()));
-
- try {
- writeFile(oneFile, file);
- } catch (IOException e) {
- log.error(e.getMessage());
- throw new EventNotExecutableException("File " + oneFile.getName() + " in case " + useCase.getStringId() + " could not be saved to file list field " + field.getStringId(), e);
- }
- }
- useCase.getDataSet().get(field.getStringId()).setValue(field.getValue());
- return true;
- }
-
- private boolean saveLocalFile(Case useCase, FileField field, MultipartFile multipartFile) {
- if (useCase.getDataSet().get(field.getStringId()).getValue() != null) {
- new File(field.getFilePath(useCase.getStringId())).delete();
- useCase.getDataSet().get(field.getStringId()).setValue(null);
- }
-
- field.setValue(multipartFile.getOriginalFilename());
- field.getValue().setPath(field.getFilePath(useCase.getStringId()));
- File file = new File(field.getFilePath(useCase.getStringId()));
- try {
- writeFile(multipartFile, file);
- } catch (IOException e) {
- log.error(e.getMessage());
- throw new EventNotExecutableException("File " + multipartFile.getName() + " in case " + useCase.getStringId() + " could not be saved to file field " + field.getStringId(), e);
- }
-
- useCase.getDataSet().get(field.getStringId()).setValue(field.getValue());
- return true;
- }
-
- private void writeFile(MultipartFile multipartFile, File file) throws IOException {
- file.getParentFile().mkdirs();
- if (!file.createNewFile()) {
- file.delete();
- file.createNewFile();
- }
-
- FileOutputStream fout = new FileOutputStream(file);
- fout.write(multipartFile.getBytes());
- fout.close();
- }
-
- protected boolean upload(Case useCase, FileField field, MultipartFile multipartFile) {
- throw new UnsupportedOperationException("Upload new file to the remote storage is not implemented yet.");
- }
-
- protected boolean upload(Case useCase, FileListField field, MultipartFile[] multipartFiles) {
- throw new UnsupportedOperationException("Upload new files to the remote storage is not implemented yet.");
- }
-
- protected boolean deleteRemote(Case useCase, FileField field) {
- throw new UnsupportedOperationException("Delete file from the remote storage is not implemented yet.");
- }
-
- protected boolean deleteRemote(Case useCase, FileListField field, String name) {
- throw new UnsupportedOperationException("Delete file from the remote storage is not implemented yet.");
- }
-
@Override
public SetDataEventOutcome deleteFile(String taskId, String fieldId) {
return deleteFile(taskId, fieldId, new HashMap<>());
@@ -733,13 +678,16 @@ public SetDataEventOutcome deleteFile(String taskId, String fieldId, Map fileField = field.getValue().getNamesPaths().stream().filter(namePath -> namePath.getName().equals(name)).findFirst();
-
- if (fileField.isPresent()) {
- if (field.isRemote()) {
- deleteRemote(useCase, field, name);
- } else {
- new File(fileField.get().getPath()).delete();
- field.getValue().getNamesPaths().remove(fileField.get());
+ IStorageService storageService = storageResolverService.resolve(field.getStorageType());
+ Optional fileFieldValue = field.getValue().getNamesPaths().stream().filter(namePath -> namePath.getName().equals(name)).findFirst();
+ if (fileFieldValue.isPresent()) {
+ try {
+ storageService.delete(field, fileFieldValue.get().getPath());
+ field.getValue().getNamesPaths().remove(fileFieldValue.get());
+ useCase.getDataSet().get(field.getStringId()).setValue(field.getValue());
+ } catch (StorageException e) {
+ log.error(e.getMessage());
+ throw new EventNotExecutableException("File " + name + " in case " + useCase.getStringId() + " and field " + fieldId + " could not be deleted.", e);
}
- useCase.getDataSet().get(field.getStringId()).setValue(field.getValue());
}
return new SetDataEventOutcome(useCase, task, getChangedFieldByFileFieldContainer(fieldId, task, useCase, params));
}
@@ -798,10 +746,8 @@ public Page setImmediateFields(Page tasks) {
@Override
public List getImmediateFields(Task task) {
Case useCase = workflowService.findOne(task.getCaseId());
-
List fields = task.getImmediateDataFields().stream().map(id -> fieldFactory.buildFieldWithoutValidation(useCase, id, task.getTransitionId())).collect(Collectors.toList());
LongStream.range(0L, fields.size()).forEach(index -> fields.get((int) index).setOrder(index));
-
return fields;
}
@@ -841,7 +787,7 @@ public Case applyFieldConnectedChanges(Case useCase, Field field) {
}
@Override
- public SetDataEventOutcome changeComponentProperties(Case useCase, String transitionId, String fieldId, Map properties) {
+ public SetDataEventOutcome changeComponentProperties(Case useCase, String transitionId, String fieldId, Map properties) {
Predicate predicate = QTask.task.caseId.eq(useCase.getStringId()).and(QTask.task.transitionId.eq(transitionId));
Task task = taskService.searchOne(predicate);
return this.changeComponentProperties(useCase, task, fieldId, properties);
@@ -889,7 +835,7 @@ private List resolveDataEvents(Field field, DataEventType trigger,
return eventService.processDataEvents(field, trigger, phase, useCase, task, params);
}
- private Object parseFieldsValues(JsonNode jsonNode, DataField dataField) {
+ private Object parseFieldsValues(JsonNode jsonNode, DataField dataField, String taskId) {
ObjectNode node = (ObjectNode) jsonNode;
Object value;
switch (getFieldTypeFromNode(node)) {
@@ -959,8 +905,9 @@ private Object parseFieldsValues(JsonNode jsonNode, DataField dataField) {
value = list;
break;
case "taskRef":
- value = parseListStringValues(node);
- // TODO 29.9.2020: validate task ref value? is such feature desired?
+ List listTask = parseListStringValues(node);
+ validateTaskRefValue(listTask, taskId);
+ value = listTask;
break;
case "stringCollection":
value = parseListStringValues(node);
@@ -1112,7 +1059,8 @@ private Map parseOptions(JsonNode node) {
SimpleModule module = new SimpleModule();
module.addDeserializer(I18nString.class, new I18nStringDeserializer());
mapper.registerModule(module);
- Map optionsMapped = mapper.convertValue(optionsNode, new TypeReference