Skip to main content
summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
Diffstat (limited to 'org.eclipse.osbp.xtext.datainterchange/src/org/eclipse')
-rw-r--r--org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/DataDSL.xtext74
-rw-r--r--org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/DataDSLRuntimeModule.java105
-rw-r--r--org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/GenerateDataDSL.mwe2212
-rw-r--r--org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/formatting/DataDSLFormatter.xtend17
-rw-r--r--org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/imports/ShouldImportProvider.java21
-rw-r--r--org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataDSLJvmModelInferrer.xtend775
-rw-r--r--org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataDSLModelGenerator.xtend609
-rw-r--r--org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataResult.java20
-rw-r--r--org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/scoping/DataDSLImportSectionNamespaceScopeProvider.java8
-rw-r--r--org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/scoping/DataDSLScopeProvider.xtend148
-rw-r--r--org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/validation/DataDSLValidator.xtend31
-rw-r--r--org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/valueconverter/DataDSLQualifiedNameProvider.java3
12 files changed, 1169 insertions, 854 deletions
diff --git a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/DataDSL.xtext b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/DataDSL.xtext
index 16f1808..babe2ab 100644
--- a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/DataDSL.xtext
+++ b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/DataDSL.xtext
@@ -12,7 +12,7 @@
*
*/
-grammar org.eclipse.osbp.xtext.datainterchange.DataDSL with org.eclipse.xtext.xbase.annotations.XbaseWithAnnotations
+grammar org.eclipse.osbp.xtext.datainterchange.DataDSL with org.eclipse.osbp.xtext.oxtype.OXtype
import "http://osbp.eclipse.org/xtext/datainterchange/DataDSL"
@@ -21,49 +21,52 @@ import "http://osbp.eclipse.org/dsl/entity/v1" as entity
import "http://www.eclipse.org/emf/2002/Ecore" as ecore
DataInterchangeModel:
+ importSection=XImportSection?
packages+=DataInterchangePackage*;
DataInterchangePackage:
{DataInterchangePackage} 'package' name=QualifiedName 'title' title=TRANSLATABLESTRING
- ('{' (imports+=DataInterchangeImport)* (datInts+=DataInterchange)* '}' )?;
+ ('{' (datInts+=DataInterchange)* '}' )?;
DataInterchange:
{DataInterchange} 'interchange' name=TRANSLATABLEID
- (description?='described by' descriptionValue=TRANSLATABLESTRING)?
- (createReport?='create report')?
- ('averageElementSize' elementSize=INT)?
- ('progressBarStyle' progressBarStyle=ProgressBarStylesEnum)?
- (refreshEnabled?='refreshViewWhenFinished' refresh=STRING)?
- 'file' fileEndpoint=DataInterchangeFile 'beans' '{' (path+=DataInterchangeBean)* '}';
+ (description?='describedBy' descriptionValue=TRANSLATABLESTRING)?
+ mode=EntityManagerMode
+ ('vectorName' vectorName=STRING)?
+ (createReport?='report')?
+ ('elementSize' elementSize=INT)?
+ 'file' fileEndpoint=DataInterchangeFile 'path' '{' (path+=DataInterchangeBean)* '}';
DataInterchangeFile:
DataInterchangeFileXML | DataInterchangeFileCSV | DataInterchangeFileEDI;
DataInterchangeFileXML:
- {DataInterchangeFileXML} 'XML' fileURL=STRING;
+ {DataInterchangeFileXML} 'XML' fileURL=STRING ((byAttribute?='mapByAttribute')? & ('encoding' encoding=STRING)?);
DataInterchangeFileCSV:
{DataInterchangeFileCSV} 'CSV' fileURL=STRING
(
('delimiter' delimiter=STRING)? & ('quoteCharacter' quoteCharacter=STRING)? & ('skipLines' skipLines=INT)? &
- (indent?='indent')? & (strict?='strict')? & (validateHeader?='validateHeader')? & ('encoding' encoding=STRING)?
+ (indent?='indent')? & ('encoding' encoding=STRING)?
);
DataInterchangeFileEDI:
- {DataInterchangeFileEDI} 'EDI' fileURL=STRING 'mappingModel' mappingModel=STRING (validate?='validate')?;
+ {DataInterchangeFileEDI} 'EDI' fileURL=STRING ('encoding' encoding=STRING)? 'mappingModel' mappingModel=STRING (validate?='validate')?;
DataInterchangeBean:
{DataInterchangeBean}
'entity' entity=[entity::LEntity|LFQN]
- ('createOn' elementMap=STRING)?
+ ('nodeName' nodeName=STRING)?
+ ('createOn' elementMap=STRING)?
(markLatest?='marker' latestProperty=[entity::LEntityAttribute])?
- ((recordElement?='recordElement') |
- (recordList?='recordList'))?
('expression' '{' (expression+=DataInterchangeExpression)* '}')?
('lookup' '{' (lookup+=DataInterchangeLookup)* '}')?
('format' '{' (format+=DataInterchangeFormat)* '}')?
('mapping' '{' (mappings+=DataInterchangeMapping)* '}')?
- ;
+ ('keys' '{' (lookupKeys+=DataInterchangeKey)* '}')?;
+
+DataInterchangeKey:
+ {DataInterchangeKey} 'key' property=[entity::LEntityAttribute];
DataInterchangeLookup:
{DataInterchangeLookup}
@@ -99,22 +102,19 @@ DataInterchangeEntityExpression:
DataInterchangePredefinedExpression:
{DataInterchangePredefinedExpression} 'assign' targetProperty=[entity::LEntityAttribute] 'with' bean=PredefinedBeanEnum 'as' beanType=PredefinedBeanTypeEnum;
-
+
DataInterchangeMapping:
- {DataInterchangeMapping} 'map' property=[entity::LEntityAttribute] 'to' data=STRING;
-
-DataInterchangeImport returns types::LImport:
-'import' importedNamespace=DataInterchangeQualifiedNameWithWildCard;
-
-DataInterchangeQualifiedNameWithWildCard:
- QualifiedName ('.' '*')?;
-
-QualifiedName:
- ValidID (=>'.' ValidID)*;
-
-ValidID:
- ID;
+ DataInterchangeValueMapping | DataInterchangeBlobMapping;
+
+DataInterchangeValueMapping:
+ {DataInterchangeValueMapping} 'map' property=[entity::LEntityAttribute] 'to' data=STRING;
+
+DataInterchangeBlobMapping:
+ {DataInterchangeBlobMapping} 'mapBlob' property=[entity::LEntityAttribute]
+ 'to' data=STRING ('extension' blobFileExtension=STRING)? ('path' blobPath=STRING)?
+ 'mimeType' mimeType=PredefinedBlobMimeTypeEnum ; // it would be better if the mime type can be optional and deduced from extension
+
LFQN:
ID ('.' ID)*;
@@ -124,6 +124,14 @@ TRANSLATABLESTRING:
TRANSLATABLEID:
ID;
+enum PredefinedBlobMimeTypeEnum:
+ plain = "plain"|
+ jpg = "jpg"|
+ png = "png"|
+ mpeg = "mpeg"|
+ octetstream = "octet-stream"|
+ pdf = "pdf";
+
enum PredefinedBeanEnum:
now = 'NowDate'|
start = 'StartDate'|
@@ -136,7 +144,7 @@ enum PredefinedBeanTypeEnum:
random = 'Random'|
execContext = 'ExecuteContext';
-enum ProgressBarStylesEnum:
- none = 'none'|
- normal = 'normal'|
- important = 'important';
+enum EntityManagerMode:
+ persist = 'persist'|
+ merge = 'merge'|
+ remove = 'remove';
diff --git a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/DataDSLRuntimeModule.java b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/DataDSLRuntimeModule.java
index 461b754..ccb9c26 100644
--- a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/DataDSLRuntimeModule.java
+++ b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/DataDSLRuntimeModule.java
@@ -11,31 +11,56 @@
* Christophe Loetz (Loetz GmbH&Co.KG) - initial implementation
*
*/
- package org.eclipse.osbp.xtext.datainterchange;
+package org.eclipse.osbp.xtext.datainterchange;
import javax.inject.Singleton;
+import org.eclipse.osbp.dsl.common.xtext.scoping.ScopingInfoProvider;
+import org.eclipse.osbp.xtext.datainterchange.formatting.DataDSLFormatter;
+import org.eclipse.osbp.xtext.datainterchange.imports.ShouldImportProvider;
+import org.eclipse.osbp.xtext.datainterchange.jvmmodel.DataDSLModelGenerator;
+import org.eclipse.osbp.xtext.datainterchange.scoping.DataDSLImportSectionNamespaceScopeProvider;
+import org.eclipse.osbp.xtext.datainterchange.scoping.DataDSLScopeProvider;
+import org.eclipse.osbp.xtext.datainterchange.valueconverter.DataDSLQualifiedNameProvider;
+import org.eclipse.osbp.xtext.datainterchange.valueconverter.DataDSLValueConverterService;
+import org.eclipse.osbp.xtext.i18n.DSLOutputConfigurationProvider;
+import org.eclipse.osbp.xtext.oxtype.imports.IShouldImportProvider;
+import org.eclipse.osbp.xtext.oxtype.imports.OXTypeRewritableImportSection;
+import org.eclipse.osbp.xtext.oxtype.linker.JvmTypeAwareLinker;
+import org.eclipse.osbp.xtext.oxtype.linking.OXTypeLinkingService;
+import org.eclipse.osbp.xtext.oxtype.resource.SemanticLoadingResource;
+import org.eclipse.osbp.xtext.oxtype.scoping.IScopingInfoProvider;
+import org.eclipse.osbp.xtext.oxtype.serializer.JvmTypeAwareTransientValueService;
import org.eclipse.xtext.conversion.IValueConverterService;
+import org.eclipse.xtext.findReferences.TargetURICollector;
import org.eclipse.xtext.generator.IGenerator;
import org.eclipse.xtext.generator.IOutputConfigurationProvider;
+import org.eclipse.xtext.linking.ILinkingService;
import org.eclipse.xtext.naming.IQualifiedNameProvider;
+import org.eclipse.xtext.resource.persistence.IResourceStorageFacade;
import org.eclipse.xtext.scoping.IScopeProvider;
-import org.eclipse.osbp.dsl.xtext.lazyresolver.LazyJvmTypeLinker;
-import org.eclipse.osbp.dsl.xtext.lazyresolver.SemanticLoadingResource;
-import org.eclipse.osbp.dsl.xtext.lazyresolver.linker.LazyJvmTypeTransientValueService;
+import org.eclipse.xtext.serializer.sequencer.ITransientValueService;
+import org.eclipse.xtext.xbase.imports.RewritableImportSection;
+import org.eclipse.xtext.xbase.jvmmodel.JvmModelTargetURICollector;
+import org.eclipse.xtext.xbase.resource.BatchLinkableResourceStorageFacade;
import com.google.inject.Binder;
-
-import org.eclipse.osbp.xtext.datainterchange.jvmmodel.DataDSLModelGenerator;
-import org.eclipse.osbp.xtext.datainterchange.scoping.DataDSLScopeProvider;
-import org.eclipse.osbp.xtext.datainterchange.valueconverter.DataDSLQualifiedNameProvider;
-import org.eclipse.osbp.xtext.datainterchange.valueconverter.DataDSLValueConverterService;
-import org.eclipse.osbp.xtext.i18n.DSLOutputConfigurationProvider;
+import com.google.inject.name.Names;
/**
- * Use this class to register components to be used at runtime / without the Equinox extension registry.
+ * Use this class to register components to be used at runtime / without the
+ * Equinox extension registry.
*/
public class DataDSLRuntimeModule extends org.eclipse.osbp.xtext.datainterchange.AbstractDataDSLRuntimeModule {
+
+ public Class<? extends IResourceStorageFacade> bindResourceStorageFacade() {
+ return BatchLinkableResourceStorageFacade.class;
+ }
+
+ public Class<? extends TargetURICollector> bindTargetURICollector() {
+ return JvmModelTargetURICollector.class;
+ }
+
@Override
public Class<? extends IGenerator> bindIGenerator() {
return DataDSLModelGenerator.class;
@@ -45,7 +70,16 @@ public class DataDSLRuntimeModule extends org.eclipse.osbp.xtext.datainterchange
public Class<? extends IScopeProvider> bindIScopeProvider() {
return DataDSLScopeProvider.class;
}
-
+
+ @SuppressWarnings("restriction")
+ public Class<? extends org.eclipse.xtext.xbase.scoping.batch.XbaseBatchScopeProvider> bindXbaseBatchScopeProvider() {
+ return DataDSLScopeProvider.class;
+ }
+
+ public Class<? extends org.eclipse.xtext.formatting.IFormatter> bindIFormatter() {
+ return DataDSLFormatter.class;
+ }
+
@Override
public Class<? extends IQualifiedNameProvider> bindIQualifiedNameProvider() {
return DataDSLQualifiedNameProvider.class;
@@ -67,11 +101,52 @@ public class DataDSLRuntimeModule extends org.eclipse.osbp.xtext.datainterchange
}
public Class<? extends org.eclipse.xtext.linking.ILinker> bindILinker() {
- return LazyJvmTypeLinker.class;
+ return JvmTypeAwareLinker.class;
+ }
+
+ public void configureITransientValueService(Binder binder) {
+ binder.bind(ITransientValueService.class).to(JvmTypeAwareTransientValueService.class);
+ }
+
+ @Override
+ public void configureIScopeProviderDelegate(Binder binder) {
+ binder.bind(IScopeProvider.class)
+ .annotatedWith(Names.named("org.eclipse.xtext.scoping.impl.AbstractDeclarativeScopeProvider.delegate"))
+ .to(DataDSLImportSectionNamespaceScopeProvider.class);
+ }
+
+ public Class<? extends ILinkingService> bindILinkingService() {
+ return OXTypeLinkingService.class;
}
- public Class<? extends org.eclipse.xtext.serializer.sequencer.ITransientValueService> bindSerializerITransientValueService() {
- return LazyJvmTypeTransientValueService.class;
+ // public Class<? extends org.eclipse.xtext.resource.IDerivedStateComputer>
+ // bindIDerivedStateComputer() {
+ // return ExtendedJvmModelAssociator.class;
+ // }
+ //
+ // public Class<? extends IExtendedModelAssociator>
+ // bindIIndexModelAssociator() {
+ // return ExtendedJvmModelAssociator.class;
+ // }
+ //
+ // public Class<? extends IJvmModelAssociator> bindIJvmModelAssociator() {
+ // return ExtendedJvmModelAssociator.class;
+ // }
+ //
+ // public Class<? extends IJvmModelAssociations> bindIJvmModelAssociations()
+ // {
+ // return ExtendedJvmModelAssociator.class;
+ // }
+
+ public Class<? extends RewritableImportSection.Factory> bindRewritableImportSection$Factory() {
+ return OXTypeRewritableImportSection.Factory.class;
+ }
+
+ public Class<? extends IScopingInfoProvider> bindIScopingInfoProvider() {
+ return ScopingInfoProvider.class;
}
+ public Class<? extends IShouldImportProvider> bindIShouldImportProvider() {
+ return ShouldImportProvider.class;
+ }
}
diff --git a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/GenerateDataDSL.mwe2 b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/GenerateDataDSL.mwe2
index 1b9f021..939a076 100644
--- a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/GenerateDataDSL.mwe2
+++ b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/GenerateDataDSL.mwe2
@@ -14,151 +14,85 @@
module org.eclipse.osbp.xtext.datainterchange.GenerateDataDSL
import org.eclipse.emf.mwe.utils.*
-import org.eclipse.xtext.generator.*
-import org.eclipse.xtext.ui.generator.*
-import org.eclipse.osbp.dsl.mwe.*
+import org.eclipse.xtext.xtext.generator.*
+import org.eclipse.xtext.xtext.generator.model.project.*
-var projectName = "org.eclipse.osbp.xtext.datainterchange"
-var grammarURI = "platform:/resource/${projectName}/src/org/eclipse/osbp/xtext/datainterchange/DataDSL.xtext"
-var fileExtensions = "data"
-var runtimeProject = "../${projectName}"
-var generateXtendStub = true
-var encoding = "UTF-8"
+var rootPath = ".."
+var fileHeaderText = "/**
+ *
+ * Copyright (c) 2011, 2016 - Loetz GmbH&Co.KG (69115 Heidelberg, Germany)
+ *
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * Contributors:
+ * Christophe Loetz (Loetz GmbH&Co.KG) - initial implementation
+ *
+ * generated by Xtext \${version}
+ *
+ */
+
+"
Workflow {
- bean = org.eclipse.emf.ecore.xcore.XcoreStandaloneSetup : xcore {}
- bean = org.eclipse.xtext.mwe.Reader {
- register = xcore
- }
- bean = MavenStandaloneSetup {
- scanClassPath = true
- platformUri = "${runtimeProject}/.."
- registerGenModelFile = "platform:/resource/org.eclipse.osbp.xtext.datainterchange/model/DataDSL.xcore"
-// registerEcoreFile = "platform:/resource/org.eclipse.osbp.xtext.datainterchange/model/DataDSL.xcore"
- registerGeneratedEPackage = "org.eclipse.osbp.xtext.datainterchange.DataDSLPackage"
- registerGenModelFile = "platform:/resource/org.eclipse.osbp.dsl.entity.xtext/model/entity.xcore"
-// registerEcoreFile = "platform:/resource/org.eclipse.osbp.dsl.entity.xtext/model/entity.xcore"
+ component = XtextGenerator {
+ configuration = {
+ project = StandardProjectConfig {
+ baseName = "org.eclipse.osbp.xtext.datainterchange"
+ rootPath = rootPath
+ genericIde = {
+ enabled = true
+ name = "org.eclipse.osbp.xtext.datainterchange.ide"
+ }
+ runtimeTest = {
+ enabled = true
+ }
+ eclipsePlugin = {
+ enabled = true
+ }
+ eclipsePluginTest = {
+ enabled = false
+ }
+ createEclipseMetaData = true
+ }
+ code = {
+ encoding = "UTF-8"
+ lineDelimiter = "\n"
+ fileHeader = fileHeaderText
+ }
+ }
- registerGenModelFile = "platform:/resource/org.eclipse.osbp.dsl.common.xtext/model/types.xcore"
-// registerEcoreFile = "platform:/resource/org.eclipse.osbp.dsl.common.xtext/model/types.xcore"
+ language = StandardLanguage {
+ name = "org.eclipse.osbp.xtext.datainterchange.DataDSL"
+ fileExtensions = "data"
+
+ referencedResource = "platform:/resource/org.eclipse.osbp.xtext.datainterchange/model/DataDSL.xcore"
+
+ referencedResource = "platform:/resource/org.eclipse.osbp.xtext.oxtype/model/OXtype.ecore"
+ referencedResource = "platform:/resource/org.eclipse.osbp.xtext.oxtype/model/OXtype.genmodel"
+ referencedResource = "platform:/resource/org.eclipse.osbp.dsl.common.xtext/model/types.xcore"
+ referencedResource = "platform:/resource/org.eclipse.osbp.dsl.entity.xtext/model/entity.xcore"
+
+ generateXtendStubs = false
+
+ serializer = {
+ generateStub = false
+ }
+ validator = {
+ // composedCheck = "org.eclipse.xtext.validation.NamesAreUniqueValidator"
+ }
- registerGenModelFile = "platform:/resource/org.eclipse.xtext.common.types/model/JavaVMTypes.genmodel"
- registerEcoreFile = "platform:/resource/org.eclipse.xtext.common.types/model/JavaVMTypes.ecore"
+ generator = {
+ generateStub = false
+ generateJavaMain = false
+ generateXtendStub = false
+ }
- registerGenModelFile = "platform:/resource/org.eclipse.xtext.xbase/model/Xbase.genmodel"
- registerEcoreFile = "platform:/resource/org.eclipse.xtext.xbase/model/Xtype.ecore"
- registerEcoreFile = "platform:/resource/org.eclipse.xtext.xbase/model/Xbase.ecore"
- registerEcoreFile = "platform:/resource/org.eclipse.xtext.xbase/model/XAnnotations.ecore"
- }
-
- component = DirectoryCleaner {
- directory = "${runtimeProject}/src-gen"
- exclude = "README.txt"
- }
-
- component = DirectoryCleaner {
- directory = "${runtimeProject}.ui/src-gen"
- exclude = "README.txt"
- }
-
- component = DirectoryCleaner {
- directory = "${runtimeProject}.tests/src-gen"
- exclude = "README.txt"
- }
-
- component = Generator {
- pathRtProject = runtimeProject
- pathUiProject = "${runtimeProject}.ui"
- pathTestProject = "${runtimeProject}.tests"
- projectNameRt = projectName
- projectNameUi = "${projectName}.ui"
- encoding = encoding
- language = auto-inject {
- uri = grammarURI
-
- // Java API to access grammar elements (required by several other fragments)
- fragment = grammarAccess.GrammarAccessFragment auto-inject {}
-
- // generates Java API for the generated EPackages
- fragment = ecore.EMFGeneratorFragment auto-inject {}
-
- // the old serialization component
- // fragment = parseTreeConstructor.ParseTreeConstructorFragment auto-inject {}
-
- // serializer 2.0
- fragment = serializer.SerializerFragment auto-inject {
- generateStub = false
- }
-
- // a custom ResourceFactory for use with EMF
- fragment = resourceFactory.ResourceFactoryFragment auto-inject {}
-
- // The antlr parser generator fragment.
- fragment = parser.antlr.XtextAntlrGeneratorFragment auto-inject {
- // options = {
- // backtrack = true
- // }
- }
-
- // Xtend-based API for validation
- fragment = validation.ValidatorFragment auto-inject {
- // composedCheck = "org.eclipse.xtext.validation.ImportUriValidator"
- // composedCheck = "org.eclipse.xtext.validation.NamesAreUniqueValidator"
- }
-
- // old scoping and exporting API
- // fragment = scoping.ImportURIScopingFragment auto-inject {}
- // fragment = exporting.SimpleNamesFragment auto-inject {}
-
- // scoping and exporting API
- fragment = scoping.ImportNamespacesScopingFragment auto-inject {}
- fragment = exporting.QualifiedNamesFragment auto-inject {}
- fragment = builder.BuilderIntegrationFragment auto-inject {}
-
- // generator API
- fragment = generator.GeneratorFragment auto-inject {}
-
- // formatter API
- fragment = formatting.FormatterFragment auto-inject {}
-
- // labeling API
- fragment = labeling.LabelProviderFragment auto-inject {}
-
- // outline API
- fragment = outline.OutlineTreeProviderFragment auto-inject {}
- fragment = outline.QuickOutlineFragment auto-inject {}
-
- // quickfix API
- fragment = quickfix.QuickfixProviderFragment auto-inject {}
-
- // content assist API
- fragment = contentAssist.ContentAssistFragment auto-inject {}
-
- // generates a more lightweight Antlr parser and lexer tailored for content assist
- fragment = parser.antlr.XtextAntlrUiGeneratorFragment auto-inject {}
-
- // generates junit test support classes into Generator#pathTestProject
- fragment = junit.Junit4Fragment auto-inject {}
-
- // rename refactoring
- fragment = refactoring.RefactorElementNameFragment auto-inject {}
-
- // provides the necessary bindings for java types integration
- fragment = types.TypesGeneratorFragment auto-inject {}
-
- // generates the required bindings only if the grammar inherits from Xbase
- fragment = xbase.XbaseGeneratorFragment auto-inject {}
-
- // provides a preference page for template proposals
- fragment = templates.CodetemplatesGeneratorFragment auto-inject {}
-
- // provides a compare view
- fragment = compare.CompareFragment auto-inject {}
-
- // parse grammar and generate i18n grammar
- fragment = org.eclipse.osbp.xtext.basic.generator.BasicDslGrammarI18nGenerator auto-inject {}
- }
- }
+ }
+ }
}
+
diff --git a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/formatting/DataDSLFormatter.xtend b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/formatting/DataDSLFormatter.xtend
index 5a5b23a..f332944 100644
--- a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/formatting/DataDSLFormatter.xtend
+++ b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/formatting/DataDSLFormatter.xtend
@@ -14,16 +14,15 @@
* This copyright notice shows up in the generated Java code
*
*/
-
package org.eclipse.osbp.xtext.datainterchange.formatting
-import com.google.inject.Inject;
+import com.google.inject.Inject
+import org.eclipse.osbp.xtext.oxtype.formatting.GenericFormatter
+import org.eclipse.osbp.xtext.oxtype.services.OXtypeGrammarAccess
import org.eclipse.xtext.formatting.impl.AbstractDeclarativeFormatter
import org.eclipse.xtext.formatting.impl.FormattingConfig
-import org.eclipse.osbp.utils.xtext.GenericFormatter
-import org.eclipse.xtext.xbase.services.XtypeGrammarAccess
-//import org.eclipse.osbp.xtext.datainterchange.services.DataDSLGrammarAccess
+//import org.eclipse.osbp.xtext.datainterchange.services.DataDSLGrammarAccess
/**
* This class contains custom formatting description.
*
@@ -34,13 +33,13 @@ import org.eclipse.xtext.xbase.services.XtypeGrammarAccess
*/
class DataDSLFormatter extends AbstractDeclarativeFormatter {
-// @Inject extension DataDSLGrammarAccess
- @Inject XtypeGrammarAccess grammarAccess
+ // @Inject extension DataDSLGrammarAccess
+ @Inject OXtypeGrammarAccess grammarAccess
override protected void configureFormatting(FormattingConfig c) {
val genericFormatter = new GenericFormatter()
- genericFormatter.formatFirstLevelBlocks( c, grammar.grammar, "DataInterchange", "DataInterchangeImport" )
- genericFormatter.genericFormatting( c, grammar, grammarAccess )
+ genericFormatter.formatFirstLevelBlocks(c, grammar.grammar, "DataInterchange")
+ genericFormatter.genericFormatting(c, grammar, grammarAccess)
}
}
diff --git a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/imports/ShouldImportProvider.java b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/imports/ShouldImportProvider.java
new file mode 100644
index 0000000..36d6003
--- /dev/null
+++ b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/imports/ShouldImportProvider.java
@@ -0,0 +1,21 @@
+package org.eclipse.osbp.xtext.datainterchange.imports;
+
+import org.eclipse.emf.ecore.EClass;
+import org.eclipse.emf.ecore.EObject;
+import org.eclipse.emf.ecore.EReference;
+import org.eclipse.osbp.dsl.semantic.entity.LEntity;
+import org.eclipse.osbp.dsl.semantic.entity.OSBPEntityPackage;
+import org.eclipse.osbp.xtext.oxtype.imports.DefaultShouldImportProvider;
+
+public class ShouldImportProvider extends DefaultShouldImportProvider {
+
+ @Override
+ protected boolean doShouldImport(EObject toImport, EReference eRef, EObject context) {
+ return toImport instanceof LEntity;
+ }
+
+ protected boolean doShouldProposeAllElements(EObject object, EReference reference) {
+ EClass type = reference.getEReferenceType();
+ return OSBPEntityPackage.Literals.LENTITY.isSuperTypeOf(type);
+ }
+}
diff --git a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataDSLJvmModelInferrer.xtend b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataDSLJvmModelInferrer.xtend
index 724e1c9..344f957 100644
--- a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataDSLJvmModelInferrer.xtend
+++ b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataDSLJvmModelInferrer.xtend
@@ -17,6 +17,10 @@
package org.eclipse.osbp.xtext.datainterchange.jvmmodel
+import com.vaadin.server.ClientConnector.AttachEvent
+import com.vaadin.server.ClientConnector.AttachListener
+import com.vaadin.server.ClientConnector.DetachEvent
+import com.vaadin.server.ClientConnector.DetachListener
import com.vaadin.ui.Button
import com.vaadin.ui.CssLayout
import com.vaadin.ui.Label
@@ -25,35 +29,35 @@ import java.io.OutputStream
import java.net.URL
import java.util.ArrayList
import java.util.HashMap
-import java.util.Iterator
import java.util.Locale
import java.util.Map
import java.util.concurrent.ExecutorService
-import javax.annotation.PostConstruct
-import javax.annotation.PreDestroy
import javax.inject.Inject
import javax.persistence.EntityManager
import javax.xml.transform.Transformer
import javax.xml.transform.TransformerFactory
import org.eclipse.e4.core.contexts.IEclipseContext
-import org.eclipse.e4.core.services.events.IEventBroker
+import org.eclipse.e4.ui.di.Focus
import org.eclipse.e4.ui.model.application.MApplication
+import org.eclipse.osbp.core.api.persistence.IPersistenceService
+import org.eclipse.osbp.datainterchange.api.IDataInterchange
import org.eclipse.osbp.dsl.entity.xtext.extensions.ModelExtensions
-import org.eclipse.osbp.dsl.semantic.common.types.LReference
-import org.eclipse.osbp.dsl.semantic.entity.LEntity
import org.eclipse.osbp.osgi.hybrid.api.AbstractHybridVaaclipseView
-import org.eclipse.osbp.persistence.IPersistenceService
+import org.eclipse.osbp.runtime.common.event.EventDispatcherEvent
+import org.eclipse.osbp.runtime.common.event.IEventDispatcher
+import org.eclipse.osbp.ui.api.e4.IE4Focusable
import org.eclipse.osbp.ui.api.metadata.IDSLMetadataService
import org.eclipse.osbp.ui.api.user.IUser
-import org.eclipse.osbp.utils.constants.GeneratorConstants
-import org.eclipse.osbp.utils.entitymock.IEntityImportInitializationListener
import org.eclipse.osbp.xtext.datainterchange.DataInterchange
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeBean
+import org.eclipse.osbp.xtext.datainterchange.DataInterchangeBlobMapping
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeFileCSV
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeFileEDI
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeFileXML
import org.eclipse.osbp.xtext.datainterchange.DataInterchangePackage
+import org.eclipse.osbp.xtext.datainterchange.EntityManagerMode
import org.eclipse.osbp.xtext.datainterchange.common.WorkerThreadRunnable
+import org.eclipse.osbp.xtext.entitymock.common.IEntityImportInitializationListener
import org.eclipse.osbp.xtext.i18n.DSLOutputConfigurationProvider
import org.eclipse.xtext.common.types.JvmDeclaredType
import org.eclipse.xtext.common.types.JvmField
@@ -63,20 +67,16 @@ import org.eclipse.xtext.naming.IQualifiedNameProvider
import org.eclipse.xtext.xbase.jvmmodel.AbstractModelInferrer
import org.eclipse.xtext.xbase.jvmmodel.IJvmDeclaredTypeAcceptor
import org.eclipse.xtext.xbase.jvmmodel.JvmTypesBuilder
-import org.milyn.Smooks
-import org.milyn.SmooksFactory
-import org.milyn.container.ExecutionContext
import org.osgi.framework.BundleEvent
import org.osgi.framework.BundleListener
-import org.osgi.service.event.EventHandler
import org.slf4j.Logger
+import org.eclipse.osbp.ui.api.customfields.IBlobService
/**
* <p>
* Data Interchange Repository Domain Specific Language
* This inferrer infers models of extension .data and generates code to be used by any data interchanging process
- * to facilitate the communication with external data sources and drains. Underlying components
- * are from the smooks repository
+ * to facilitate the communication with external data sources and drains.
* </p>
*
* @author Joerg Riegel
@@ -102,38 +102,41 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
* @param isPreIndexingPhase
* true if in preindexing phase
*/
- def dispatch void infer(DataInterchangePackage dataInterchangePackage, IJvmDeclaredTypeAcceptor acceptor, boolean isPreIndexingPhase) {
- dataInterchangePackage.generatePckgName(acceptor)
- // create a view
- var cls = dataInterchangePackage.toClass(dataInterchangePackage.name.toString.concat("TriggerView"));
- cls.simpleName = cls.simpleName.toFirstUpper
- acceptor.accept(cls,
- [
- superTypes += _typeReferenceBuilder.typeRef(AbstractHybridVaaclipseView)
- superTypes += _typeReferenceBuilder.typeRef(BundleListener)
+ def dispatch void infer(DataInterchangePackage dataInterchangePackage, IJvmDeclaredTypeAcceptor acceptor, boolean isPreIndexingPhase) {
+ dataInterchangePackage.generatePckgName(acceptor)
+ // create a view
+ var cls = dataInterchangePackage.toClass(dataInterchangePackage.name.toString.concat("TriggerView"));
+ cls.simpleName = cls.simpleName.toFirstUpper
+ acceptor.accept(cls,
+ [
+ superTypes += _typeReferenceBuilder.typeRef(AbstractHybridVaaclipseView)
+ superTypes += _typeReferenceBuilder.typeRef(BundleListener)
superTypes += _typeReferenceBuilder.typeRef(IUser.UserLocaleListener)
- documentation = GeneratorConstants.GENERATED_CLASSES_DOCUMENTATION
- annotations += _annotationTypesBuilder.annotationRef(SuppressWarnings, "serial")
- packageName = dataInterchangePackage.fullyQualifiedName.toString
- it.toFields(dataInterchangePackage)
- it.toConstructor(dataInterchangePackage)
+ superTypes += _typeReferenceBuilder.typeRef(IEventDispatcher.Receiver)
+ superTypes += _typeReferenceBuilder.typeRef(DetachListener)
+ superTypes += _typeReferenceBuilder.typeRef(AttachListener)
+ superTypes += _typeReferenceBuilder.typeRef(IE4Focusable)
+ packageName = dataInterchangePackage.fullyQualifiedName.toString
+ it.fileHeader = dataInterchangePackage.documentation
+ it.toFields(dataInterchangePackage)
+ it.toConstructor(dataInterchangePackage)
it.toOperations(dataInterchangePackage)
- ])
-
- // create smooks classes
- for (dataInterchange : dataInterchangePackage.datInts) {
- var clsName2 = dataInterchange.fullyQualifiedName
- acceptor.accept(dataInterchange.toClass(clsName2),
- [
- superTypes += _typeReferenceBuilder.typeRef(WorkerThreadRunnable)
- annotations += _annotationTypesBuilder.annotationRef(SuppressWarnings, "serial")
- documentation = GeneratorConstants.GENERATED_CLASSES_DOCUMENTATION
- it.toConstructor(dataInterchange)
+ ])
+
+ // create classes
+ for (dataInterchange : dataInterchangePackage.datInts) {
+ var clsName2 = dataInterchange.fullyQualifiedName
+ acceptor.accept(dataInterchange.toClass(clsName2),
+ [
+ superTypes += _typeReferenceBuilder.typeRef(WorkerThreadRunnable)
+ annotations += _annotationTypesBuilder.annotationRef(SuppressWarnings, "serial")
+ it.fileHeader = dataInterchangePackage.documentation
+ it.toConstructor(dataInterchange)
it.toFields(dataInterchange)
it.toOperations(dataInterchange)
- ])
- }
- }
+ ])
+ }
+ }
/**
@@ -148,29 +151,18 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
var JvmField field = null
field = pkg.toField("sidebar", _typeReferenceBuilder.typeRef(VerticalLayout))
type.members += field
- var name = pkg.name.toString.replace(".",":")
- var String[] parts = name.split(":")
- val String clsName = parts.get(parts.size-1).toFirstUpper
- field = pkg.toField("log", _typeReferenceBuilder.typeRef(Logger)) [setInitializer([ append('''LoggerFactory.getLogger(«clsName»TriggerView.class)''') ])]
+ field = pkg.toField("log", _typeReferenceBuilder.typeRef(Logger)) [setInitializer([ append('''LoggerFactory.getLogger(«pkg.fullyQualifiedName.lastSegment.toFirstUpper»TriggerView.class)''') ])]
field.final = true
field.static = true
type.members += field
field = pkg.toField("menu", _typeReferenceBuilder.typeRef(CssLayout))
type.members += field
- field = pkg.toField("workerInfo", _typeReferenceBuilder.typeRef(EventHandler))
- type.members += field
field = pkg.toField("branding", _typeReferenceBuilder.typeRef(CssLayout))
type.members += field
- field = pkg.toField("eventBroker", _typeReferenceBuilder.typeRef(IEventBroker)) [
- annotations += _annotationTypesBuilder.annotationRef(Inject)
- ]
- type.members += field
-
field = pkg.toField("persistenceService", _typeReferenceBuilder.typeRef(IPersistenceService)) [
annotations += _annotationTypesBuilder.annotationRef(Inject)
]
type.members += field
-
field = pkg.toField("progressBars", _typeReferenceBuilder.typeRef(Map, _typeReferenceBuilder.typeRef(String), _typeReferenceBuilder.typeRef(WorkerThreadRunnable)))
type.members += field
field = pkg.toField("executorService", _typeReferenceBuilder.typeRef(ExecutorService))
@@ -178,12 +170,18 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
type.members += field
field = pkg.toField("dslMetadataService", _typeReferenceBuilder.typeRef(IDSLMetadataService)) [annotations += _annotationTypesBuilder.annotationRef(Inject)]
type.members += field
+ field = pkg.toField("dataInterchange", _typeReferenceBuilder.typeRef(IDataInterchange)) [annotations += _annotationTypesBuilder.annotationRef(Inject)]
+ type.members += field
+ field = pkg.toField("eventDispatcher", _typeReferenceBuilder.typeRef(IEventDispatcher))[annotations += _annotationTypesBuilder.annotationRef(Inject)]
+ type.members += field
+ field = pkg.toField("blobService", _typeReferenceBuilder.typeRef(IBlobService))[annotations += _annotationTypesBuilder.annotationRef(Inject)]
+ type.members += field
field = pkg.toField("user", _typeReferenceBuilder.typeRef(IUser))[annotations += _annotationTypesBuilder.annotationRef(Inject)]
type.members += field
field = pkg.toField("logo", _typeReferenceBuilder.typeRef(Label))
type.members += field
field = pkg.toField("buttons",
- _typeReferenceBuilder.typeRef(HashMap, _typeReferenceBuilder.typeRef(Button), _typeReferenceBuilder.typeRef(ArrayList)))
+ _typeReferenceBuilder.typeRef(HashMap, _typeReferenceBuilder.typeRef(Button), _typeReferenceBuilder.typeRef(ArrayList, _typeReferenceBuilder.typeRef(String))))
type.members += field
}
@@ -219,11 +217,6 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
type.members += pkg.toMethod("createComponents", _typeReferenceBuilder.typeRef(Void::TYPE), [
body = [ append('''«pkg.createComponents»''')]
])
- // unique name
- type.members += pkg.toMethod("uniqueName", _typeReferenceBuilder.typeRef(String), [
- parameters += pkg.toParameter("name", _typeReferenceBuilder.typeRef(String))
- body = [ append('''«pkg.uniqueName»''')]
- ])
// is duplicate
type.members += pkg.toMethod("isDuplicate", _typeReferenceBuilder.typeRef(boolean), [
parameters += pkg.toParameter("name", _typeReferenceBuilder.typeRef(String))
@@ -234,23 +227,52 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
parameters += pkg.toParameter("button", _typeReferenceBuilder.typeRef(Button))
body = [ append('''«pkg.findButtonLayout»''')]
])
+ // remove progressbar
+ type.members += pkg.toMethod("removeProgressBar", _typeReferenceBuilder.typeRef(Void::TYPE), [
+ parameters += pkg.toParameter("workerName", _typeReferenceBuilder.typeRef(String))
+ body = [ append('''«pkg.removeProgressBar»''')]
+ ])
// on bundle stopping - shutdown executorService
type.members += pkg.toMethod("bundleChanged", _typeReferenceBuilder.typeRef(Void::TYPE), [
annotations += _annotationTypesBuilder.annotationRef(Override)
parameters += pkg.toParameter("event", _typeReferenceBuilder.typeRef(BundleEvent))
body = [ append('''«pkg.bundleChanged»''')]
])
- // subscribe to eventBroker
- type.members += pkg.toMethod("subscribe", _typeReferenceBuilder.typeRef(Void::TYPE), [
- visibility = JvmVisibility.PROTECTED
- annotations += _annotationTypesBuilder.annotationRef(PostConstruct)
- body = [append('''«pkg.subscribe»''')]
- ])
- // unsubscribe from eventBroker
- type.members += pkg.toMethod("unsubscribe", _typeReferenceBuilder.typeRef(Void::TYPE), [
- visibility = JvmVisibility.PROTECTED
- annotations += _annotationTypesBuilder.annotationRef(PreDestroy)
- body = [append('''«pkg.unsubscribe»''')]
+ // attach
+ type.members += pkg.toMethod("attach", _typeReferenceBuilder.typeRef(Void::TYPE),
+ [
+ parameters += pkg.toParameter("event", _typeReferenceBuilder.typeRef(AttachEvent))
+ annotations += _annotationTypesBuilder.annotationRef(Override)
+ body = [append(
+ '''
+ user.addUserLocaleListener(this);
+ eventDispatcher.addEventReceiver(this);''')]
+
+ ])
+
+ // detach
+ type.members += pkg.toMethod("detach", _typeReferenceBuilder.typeRef(Void::TYPE),
+ [
+ parameters += pkg.toParameter("event", _typeReferenceBuilder.typeRef(DetachEvent))
+ annotations += _annotationTypesBuilder.annotationRef(Override)
+ body = [append(
+ '''
+ user.removeUserLocaleListener(this);
+ eventDispatcher.removeEventReceiver(this);''')]
+ ])
+ // focus
+ type.members += pkg.toMethod("setFocus", _typeReferenceBuilder.typeRef(Void::TYPE), [
+ annotations += _annotationTypesBuilder.annotationRef(Focus)
+ body = [append(
+ '''
+ Component parent = getParent();
+ while(!(parent instanceof Panel) && parent != null) {
+ parent = parent.getParent();
+ }
+ if(parent != null) {
+ ((Panel)parent).focus();
+ }''')]
+
])
// locale notification
type.members += pkg.toMethod("localeChanged", _typeReferenceBuilder.typeRef(Void::TYPE),
@@ -260,79 +282,17 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
parameters += pkg.toParameter("locale", _typeReferenceBuilder.typeRef(Locale))
body = [append('''«pkg.localeChanged»''')]
])
+ // event notification
+ type.members += pkg.toMethod("receiveEvent", _typeReferenceBuilder.typeRef(Void::TYPE),
+ [
+ visibility = JvmVisibility.PUBLIC
+ annotations += _annotationTypesBuilder.annotationRef(Override)
+ parameters += pkg.toParameter("event", _typeReferenceBuilder.typeRef(EventDispatcherEvent))
+ body = [append('''«pkg.receiveEvent»''')]
+ ])
}
/**
- * handle the worker progress UI. provide the appropriate code.
- *
- * @param pkg
- * the current package inferred {@link DataInterchangePackage}
- * @return code fragment
- */
- def String workerInfo(DataInterchangePackage pkg) {
- var body = ""
- body = '''
- «body»
- new EventHandler() {
- @Override
- public void handleEvent(Event event) {
- // a worker notified this view that it is finished
- String workername = (String)event.getProperty(EventUtils.DATA);
- if (progressBars.containsKey(workername)) {
- final WorkerThreadRunnable worker = progressBars.get(workername);
- ((VerticalLayout)worker.getProgressBarArea().getParent()).removeComponent(worker.getProgressBarArea());
- progressBars.remove(workername);
- }
- }
- };
- '''
- return body
- }
-
- /**
- * subscribe the event broker for messages on changed locale and worker progress.
- *
- * @param pkg
- * the current package inferred {@link DataInterchangePackage}
- * @return code fragment
- */
- def String subscribe(DataInterchangePackage pkg) {
- var body = ""
- body = '''
- «body»
- eventBroker.subscribe(EventBrokerMsg.WORKER_THREAD_INFO, workerInfo);
- '''
- return body
- }
-
- /**
- * unsubscribe from event broker for messages.
- *
- * @param pkg
- * the current package inferred {@link DataInterchangePackage}
- * @return code fragment
- */
- def String unsubscribe(DataInterchangePackage pkg) {
- var body = ""
- body = '''
- «body»
- eventBroker.unsubscribe(workerInfo);
- '''
- return body
- }
-
- /**
- * normalize package name.
- *
- * @param pkg
- * the current package inferred {@link DataInterchangePackage}
- * @return normalized package name
- */
- def String toEventID(DataInterchangePackage pkg) {
- return pkg.fullyQualifiedName.toString.toUpperCase.replaceAll("(\\W)","_")
- }
-
- /**
* let the app wait for finishing the workers before allowing to stop bundle.
*
* @param pkg
@@ -409,32 +369,6 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
return body
}
- /**
- * make a name unique by concatenating a number.
- *
- * @param pkg
- * the current package inferred {@link DataInterchangePackage}
- * @return code fragment
- */
- def String uniqueName(DataInterchangePackage pkg) {
- var body = ""
- body = '''
- «body»
- Integer cnt = 0;
- String searchName;
- do {
- if (cnt > 0) {
- searchName = name+cnt.toString();
- } else {
- searchName = name;
- }
- cnt ++;
- }while(isDuplicate(searchName));
- return searchName;
- '''
- return body
- }
-
def String descriptionI18nKey(DataInterchange dataInterchange) {
if ((dataInterchange.descriptionValue == null) || dataInterchange.descriptionValue.isEmpty) {
dataInterchange.name
@@ -453,8 +387,10 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
*/
def String createView(DataInterchangePackage pkg) {
var body = '''
- buttons = new HashMap<Button, ArrayList>();
- workerInfo = «pkg.workerInfo»
+ getContext().set(IE4Focusable.class, this);
+ parent.addAttachListener(this);
+ parent.addDetachListener(this);
+ buttons = new HashMap<>();
Bundle bundle = FrameworkUtil.getBundle(getClass());
if (bundle != null) {
BundleContext ctx = bundle.getBundleContext();
@@ -490,7 +426,8 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
sidebar.setExpandRatio(menu, 1.0f);
'''
- body = '''«body»
+ body = '''
+ «body»
// add menu items
Button b;
VerticalLayout buttonLayout;
@@ -506,15 +443,13 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
@Override
public void buttonClick(ClickEvent event) {
log.debug("pressed «dataInterchange.name» import");
- String uniqueName = uniqueName("«dataInterchange.name»");
- «dataInterchange.getBasicRunConfiguration(false, dataInterchange.getFileURL)»
- «dataInterchange.defaultVariableName».setName(uniqueName);
- «dataInterchange.defaultVariableName».setEventBroker(eventBroker);
+ «dataInterchange.getBasicRunConfiguration(false, dataInterchange.getFileURL, WorkerThreadRunnable.Direction.IMPORT.name)»
+ «dataInterchange.defaultVariableName».setName(UUID.randomUUID().toString());
+ «dataInterchange.defaultVariableName».setEventDispatcher(eventDispatcher);
«dataInterchange.defaultVariableName».setUi(UI.getCurrent());
«dataInterchange.defaultVariableName».setDirection(WorkerThreadRunnable.Direction.IMPORT);
- «IF dataInterchange.progressBarStyle!=null»«dataInterchange.defaultVariableName».setProgressBarStyleName("«dataInterchange.progressBarStyle.literal»");«ENDIF»
findButtonLayout(event.getButton()).addComponent(«dataInterchange.defaultVariableName».getProgressBarArea());
- progressBars.put(uniqueName, «dataInterchange.defaultVariableName»);
+ progressBars.put(«dataInterchange.defaultVariableName».getName(), «dataInterchange.defaultVariableName»);
executorService.execute(«dataInterchange.defaultVariableName»);
log.debug("«dataInterchange.name» import added to executor queue");
}
@@ -533,15 +468,13 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
@Override
public void buttonClick(ClickEvent event) {
log.debug("pressed «dataInterchange.name» export");
- String uniqueName = uniqueName("«dataInterchange.name»");
- «dataInterchange.getBasicRunConfiguration(false, dataInterchange.getFileURL)»
- «dataInterchange.defaultVariableName».setName(uniqueName);
- «dataInterchange.defaultVariableName».setEventBroker(eventBroker);
+ «dataInterchange.getBasicRunConfiguration(false, dataInterchange.getFileURL, WorkerThreadRunnable.Direction.EXPORT.name)»
+ «dataInterchange.defaultVariableName».setName(UUID.randomUUID().toString());
+ «dataInterchange.defaultVariableName».setEventDispatcher(eventDispatcher);
«dataInterchange.defaultVariableName».setUi(UI.getCurrent());
«dataInterchange.defaultVariableName».setDirection(WorkerThreadRunnable.Direction.EXPORT);
- «IF dataInterchange.progressBarStyle!=null»«dataInterchange.defaultVariableName».setProgressBarStyleName("«dataInterchange.progressBarStyle.literal»");«ENDIF»
findButtonLayout(event.getButton()).addComponent(«dataInterchange.defaultVariableName».getProgressBarArea());
- progressBars.put(uniqueName, «dataInterchange.defaultVariableName»);
+ progressBars.put(«dataInterchange.defaultVariableName».getName(), «dataInterchange.defaultVariableName»);
executorService.execute(«dataInterchange.defaultVariableName»);
log.debug("«dataInterchange.name» export added to executor queue");
}
@@ -555,7 +488,6 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
«body»
menu.addStyleName("menu");
menu.setHeight("100%");
- user.addUserLocaleListener(this);
'''
return body
}
@@ -564,7 +496,7 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
return dataInterchange.name.toFirstLower
}
- def String getBasicRunConfiguration(DataInterchange dataInterchange, boolean fqClass, String fileURL) {
+ def String getBasicRunConfiguration(DataInterchange dataInterchange, boolean fqClass, String fileURL, String direction) {
var className = ""
if (fqClass) {
className = dataInterchange.fullyQualifiedName.toString
@@ -572,10 +504,39 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
else {
className = dataInterchange.name
}
- return '''
- «className» «dataInterchange.getDefaultVariableName» = new «className»();
- «dataInterchange.getDefaultVariableName».setFileURL("«fileURL»");
- «dataInterchange.getDefaultVariableName».setPersistenceService(persistenceService);
+ return
+ '''
+ «className» «dataInterchange.getDefaultVariableName» = new «className»();
+ String url = ProductConfiguration.getDatainterchangeConfiguration();
+ if(url.isEmpty()) {
+ url = System.getProperty("user.home")+"/.osbee/"+"«(dataInterchange.eContainer as DataInterchangePackage).title»Config.xml";
+ }
+ File file = new File(url);
+ if(file.exists()) {
+ FileInputStream fileInput;
+ try {
+ fileInput = new FileInputStream(file);
+ Properties properties = new Properties();
+ properties.loadFromXML(fileInput);
+ fileInput.close();
+ if(properties.getProperty("«dataInterchange.name»-«direction.toLowerCase()»") == null) {
+ «dataInterchange.getDefaultVariableName».setFileURL("«fileURL»");
+ } else {
+ «dataInterchange.getDefaultVariableName».setFileURL(properties.getProperty("«dataInterchange.name»-«direction.toLowerCase()»"));
+ }
+ } catch (IOException e) {
+ StringWriter sw = new StringWriter();
+ e.printStackTrace(new PrintWriter(sw));
+ log.error("{}", sw.toString());
+ return;
+ }
+ } else {
+ «dataInterchange.getDefaultVariableName».setFileURL("«fileURL»");
+ }
+ «dataInterchange.getDefaultVariableName».setPersistenceService(persistenceService);
+ «dataInterchange.getDefaultVariableName».setDataInterchange(dataInterchange);
+ «dataInterchange.getDefaultVariableName».setEventDispatcher(eventDispatcher);
+ «dataInterchange.getDefaultVariableName».setBlobService(blobService);
'''
}
@@ -600,7 +561,7 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
}
/**
- * <p>build the constructor for each smooks class.</p>
+ * <p>build the constructor for each class.</p>
*
* @param pkg
* the current datainterchange inferred {@link DataInterchange}
@@ -623,24 +584,16 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
field.final = true
field.static = true
type.members += field
- field = dataInterchange.toField("smooksOSGIFactory", _typeReferenceBuilder.typeRef(SmooksFactory)) [setInitializer([ append('''null''') ])]
+ field = dataInterchange.toField("dataInterchange", _typeReferenceBuilder.typeRef(IDataInterchange))
type.members += field
- field = dataInterchange.toField("smooks", _typeReferenceBuilder.typeRef(Smooks))
- type.members += field
-// field = dataInterchange.toField("xmlBinding", _typeReferenceBuilder.typeRef(XMLBinding))
-// type.members += field
field = dataInterchange.toField("em", _typeReferenceBuilder.typeRef(EntityManager))
type.members += field
field = dataInterchange.toField("fileURL", _typeReferenceBuilder.typeRef(URL))
type.members += field
- field = dataInterchange.toField("executionContext", _typeReferenceBuilder.typeRef(ExecutionContext))
- type.members += field
field = dataInterchange.toField("file", _typeReferenceBuilder.typeRef(OutputStream))
type.members += field
field = dataInterchange.toField("out", _typeReferenceBuilder.typeRef(OutputStream))
type.members += field
- field = dataInterchange.toField("pollingInterval", _typeReferenceBuilder.typeRef(int)) [setInitializer([ append('''500''') ])]
- type.members += field
field = dataInterchange.toField("transformerFactory", _typeReferenceBuilder.typeRef(TransformerFactory)) [setInitializer([ append('''TransformerFactory.newInstance()''') ])]
type.members += field
field = dataInterchange.toField("transformer", _typeReferenceBuilder.typeRef(Transformer))
@@ -652,13 +605,15 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
*
*/
def void toOperations(JvmDeclaredType type, DataInterchange dataInterchange) {
+ type.members += dataInterchange.toGetter("dataInterchange", _typeReferenceBuilder.typeRef(IDataInterchange))
+ type.members += dataInterchange.toSetter("dataInterchange", _typeReferenceBuilder.typeRef(IDataInterchange))
type.members += dataInterchange.toMethod("run", _typeReferenceBuilder.typeRef(Void::TYPE), [
annotations += _annotationTypesBuilder.annotationRef(Override)
body = [ append('''run(null);''')]
])
type.members += dataInterchange.toMethod("run", _typeReferenceBuilder.typeRef(Void::TYPE), [
parameters += dataInterchange.toParameter("importListener", _typeReferenceBuilder.typeRef(IEntityImportInitializationListener))
- body = [ append('''«dataInterchange.performSmooks»''')]
+ body = [ append('''«dataInterchange.performInterchange»''')]
])
type.members += dataInterchange.toMethod("init", _typeReferenceBuilder.typeRef(boolean), [
visibility = JvmVisibility.PROTECTED
@@ -682,24 +637,26 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
'''
fileURL = null;
String path = filePath;
- if (filePath.startsWith("file://") && !org.eclipse.osbp.utils.common.SystemInformation.isMacOS()) {
+ if (filePath.startsWith("file://") && org.eclipse.osbp.utils.common.SystemInformation.isWindowsOS()) {
path = filePath.substring("file://".length());
}
try {
fileURL = new URL(path);
} catch (MalformedURLException e1) {
- if(e1.getMessage().startsWith("unknown protocol")) {
+ if(e1.getMessage().startsWith("unknown protocol") || e1.getMessage().startsWith("no protocol")) {
try {
fileURL = Paths.get(path).toUri().toURL();
} catch (MalformedURLException e2) {
- log.error(e2.getLocalizedMessage()+e2.getCause());
+ StringWriter sw = new StringWriter();
+ e2.printStackTrace(new PrintWriter(sw));
+ log.error("{}", sw.toString());
}
}
}
'''
/**
- * init smooks factory to create the core import process.
+ * init factory to create the core import process.
* setup listeners for UI communication.
* setup persistence layer.
*
@@ -711,10 +668,6 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
var firstEntity = (dataInterchange.path.iterator.next as DataInterchangeBean)
var body =
'''
- if(getEventBroker()!=null) {
- pollingInterval = UI.getCurrent().getPollInterval();
- UI.getCurrent().setPollInterval(500);
- }
try {
transformerFactory.setAttribute("indent-number", 4);
transformer = transformerFactory.newTransformer();
@@ -723,30 +676,31 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
transformer.setOutputProperty(OutputKeys.STANDALONE, "yes");
transformer.setOutputProperty(OutputKeys.MEDIA_TYPE, "text/xml");
- // init smooks
+ // init
setProgressBarEnabled(true);
setProgressIndeterminated(true);
- if (log.isDebugEnabled()) log.debug("initializing smooks factory");
- smooksOSGIFactory = new SmooksOSGIFactory(FrameworkUtil.getBundle(this.getClass()));
- smooks = smooksOSGIFactory.createInstance(FrameworkUtil.getBundle(this.getClass()).getResource("«DSLOutputConfigurationProvider.SMOOKS_OUTPUT_DIRECTORY»/«dataInterchange.name»-"+direction.toString().toLowerCase()+".xml").openStream());
- // prepare execution context
- executionContext = smooks.createExecutionContext();
- executionContext.setEventListener(this);
+ if (log.isDebugEnabled()) log.debug("initializing datainterchange factory");
// get entity manager
- if (log.isDebugEnabled()) log.debug("opening entity manager to persist smooks results");
+ if (log.isDebugEnabled()) log.debug("opening entity manager to persist results");
getPersistenceService().registerPersistenceUnit("«firstEntity.entity.persistenceUnit»", «firstEntity.entity.fullyQualifiedName».class);
em = getPersistenceService().getEntityManagerFactory("«firstEntity.entity.persistenceUnit»").createEntityManager();
+ if(dataInterchange != null) {
+ dataInterchange.open(FrameworkUtil.getBundle(getClass()),"«DSLOutputConfigurationProvider.SMOOKS_OUTPUT_DIRECTORY»/«dataInterchange.name»-"+direction.toString().toLowerCase()+".xml");
+ dataInterchange.setEventListener(this);
+ dataInterchange.setEntityManager(em);
+ }
«IF dataInterchange.createReport»
- // create a filtering report -- impacts performance
if (log.isDebugEnabled()) log.debug("reporting is on - impacting performance");
- String location = FrameworkUtil.getBundle(this.getClass()).getLocation()+"«DSLOutputConfigurationProvider.SMOOKS_OUTPUT_DIRECTORY»/«dataInterchange.name»-"+direction.toString().toLowerCase()+"-report.html";
- executionContext.setEventListener(new HtmlReportGenerator(location));
- «ENDIF»
- } catch (Exception e) {
- if(getEventBroker()!=null) {
- UI.getCurrent().setPollInterval(pollingInterval);
+ if(dataInterchange != null) {
+ String location = FrameworkUtil.getBundle(this.getClass()).getLocation()+"«DSLOutputConfigurationProvider.SMOOKS_OUTPUT_DIRECTORY»/«dataInterchange.name»-"+direction.toString().toLowerCase()+"-report.html";
+ location = location.replace("reference:file:/", "");
+ dataInterchange.enableReport(location);
}
- log.error(e.getLocalizedMessage()+e.getCause());
+ «ENDIF»
+ } catch (TransformerConfigurationException | SAXException | IOException e) {
+ StringWriter sw = new StringWriter();
+ e.printStackTrace(new PrintWriter(sw));
+ log.error("{}", sw.toString());
return false;
}
if(direction == Direction.EXPORT) {
@@ -756,7 +710,9 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
try {
uri = fileURL.toURI();
} catch (URISyntaxException e) {
- log.error(e.getLocalizedMessage()+e.getCause());
+ StringWriter sw = new StringWriter();
+ e.printStackTrace(new PrintWriter(sw));
+ log.error("{}", sw.toString());
return false;
}
do {
@@ -776,11 +732,15 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
uri = new URI(uri.getScheme()+":"+uri.getPath().substring(0,pos)+openTry+"."+uri.getPath().substring(pos+1));
}
} catch (URISyntaxException e) {
- log.error(e.getLocalizedMessage()+e.getCause());
+ StringWriter sw = new StringWriter();
+ e.printStackTrace(new PrintWriter(sw));
+ log.error("{}", sw.toString());
return false;
}
} catch (IOException e) {
- log.error(e.getLocalizedMessage()+e.getCause());
+ StringWriter sw = new StringWriter();
+ e.printStackTrace(new PrintWriter(sw));
+ log.error("{}", sw.toString());
return false;
}
}while(file == null);
@@ -792,7 +752,7 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
}
/**
- * use smooks factory to create the core import process.
+ * use factory to create the core import process.
* setup listeners for UI communication.
* setup persistence layer.
*
@@ -800,12 +760,17 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
* the current datainterchange inferred {@link DataInterchange}
* @return code fragment
*/
- def String performSmooks(DataInterchange dataInterchange) {
- var firstEntity = (dataInterchange.path.iterator.next as DataInterchangeBean)
+ def String performInterchange(DataInterchange dataInterchange) {
+ var firstEntityBean = (dataInterchange.path.iterator.next as DataInterchangeBean)
+ firstEntityBean.hasBlobMapping = firstEntityBean.mappings.filter(DataInterchangeBlobMapping).size > 0
var body = '''
if(!init(getDirection())) {
return;
}
+ if(dataInterchange == null) {
+ log.error("dataInterchange is not present - download from www.osbee.org");
+ return;
+ }
try {
if(getDirection()==WorkerThreadRunnable.Direction.IMPORT) {
'''
@@ -814,14 +779,8 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
if (importListener != null) {
importListener.notifyInitializationStep("datainterchange «dataInterchange.name.toFirstUpper» load.", 0.4, 0.45, 0, 0);
}
- // execute the smooks filtering
- JavaResult result = new JavaResult();
- InputStream in = fileURL.openStream();
- byte[] contents = StreamUtils.readStream(in);
- setLength(contents.length);
- setAvgElementSize(«IF dataInterchange.elementSize==0»10«ELSE»«dataInterchange.elementSize»«ENDIF»);
- StreamSource source = new StreamSource(new ByteArrayInputStream(contents));
'''
+
for (path:dataInterchange.path) {
for (lookup:path.lookup) {
if (lookup.cached) {
@@ -832,179 +791,233 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
}
}
}
+
+ // import beans as list if no mark latest, other wise as a single bean
body = '''
«body»
- PersistenceUtil.setDAORegister(executionContext, new EntityManagerRegister(em));
- // execute smooks filtering
+ Object result = null;
if (log.isDebugEnabled()) log.debug("filtering starts");
setProgressIndeterminated(false);
- if (importListener != null) {
- importListener.notifyInitializationStep("datainterchange Warehouses load..", 0.4, 0.47, 0, 0);
- }
- smooks.filterSource(executionContext, source, result);
- if (importListener != null) {
- importListener.notifyInitializationStep("datainterchange Warehouses load...", 0.4, 0.48, 0, 0);
- }
- if (log.isDebugEnabled()) log.debug("smooks filtering finished");
+ InputStream contents = dataInterchange.openStream(fileURL);
+ setLength(contents.available());
+ setAvgElementSize(«IF dataInterchange.elementSize==0»10«ELSE»«dataInterchange.elementSize»«ENDIF»);
+ result = dataInterchange.importSource(contents, "«firstEntityBean.entity.name»«IF !firstEntityBean.markLatest»List«ENDIF»"«IF dataInterchange.fileEndpoint.encoding !== null», "«dataInterchange.fileEndpoint.encoding»"«ENDIF»);
+ if (log.isDebugEnabled()) log.debug("filtering finished");
'''
- if (firstEntity.recordList) {
+
+ if (!firstEntityBean.markLatest) {
body = '''
- «body»
- // retrieve bean list
- List<«firstEntity.entity.fullyQualifiedName»> «firstEntity.entity.name.toFirstLower»List = Arrays.asList((«firstEntity.entity.fullyQualifiedName»[]) result.getBean("«firstEntity.entity.name»List"));
- // persist
+ «body»
+ if(result != null) {
+ List<«firstEntityBean.entity.fullyQualifiedName»> «firstEntityBean.entity.name.toFirstLower»List = Arrays.asList((«firstEntityBean.entity.fullyQualifiedName»[]) result);
em.getTransaction().begin();
if (log.isDebugEnabled()) log.debug("persisting results");
- int total = «firstEntity.entity.name.toFirstLower»List.size();
+ int total = «firstEntityBean.entity.name.toFirstLower»List.size();
int count = 0;
long lastStep = System.currentTimeMillis();
if (importListener != null) {
importListener.notifyInitializationStep("datainterchange «dataInterchange.name.toFirstUpper»", 0.4, 0.5, count, total);
}
- for(«firstEntity.entity.fullyQualifiedName» «firstEntity.entity.name.toFirstLower»:«firstEntity.entity.name.toFirstLower»List) {
- try {
- em.persist(«firstEntity.entity.name.toFirstLower»);
+ for(«firstEntityBean.entity.fullyQualifiedName» «firstEntityBean.entity.name.toFirstLower»:«firstEntityBean.entity.name.toFirstLower»List) {
+ '''
+ if(firstEntityBean.hasBlobMapping && dataInterchange.mode!=EntityManagerMode.REMOVE){
+ for(mapping:firstEntityBean.mappings){
+ if(mapping instanceof DataInterchangeBlobMapping){
+ var m = (mapping as DataInterchangeBlobMapping)
+ var blobFileName = '''«firstEntityBean.entity.name.toFirstLower».get«m.property.name.toFirstUpper»()«IF m.blobFileExtension != null» + ".«m.blobFileExtension»"«ENDIF»'''
+ body = '''
+ «body»
+ try (InputStream inputStream = new BufferedInputStream(
+ «IF m.blobPath == null»
+ this.getClass().getClassLoader().getResourceAsStream("/«firstEntityBean.entity.name»/" + «blobFileName»)
+ «ELSE»
+ new FileInputStream("«m.blobPath»/" + «blobFileName»)«ENDIF»)) {
+ String «firstEntityBean.entity.name.toFirstLower»_«m.property.name»Id = getBlobService().createBlobMapping(
+ inputStream,
+ «firstEntityBean.entity.name.toFirstLower».get«m.property.name.toFirstUpper»(),
+ "«m.mimeType»"
+ );
+ «firstEntityBean.entity.name.toFirstLower».set«m.property.name.toFirstUpper»(«firstEntityBean.entity.name.toFirstLower»_«m.property.name»Id);
+ } catch (IOException e) {
+ log.error(e.getLocalizedMessage());
}
- catch (ConstraintViolationException cve) {
- log.error("«firstEntity.entity.name.toFirstLower» #"+(count+1)+"/"+total+": "+cve.getLocalizedMessage());
- for (ConstraintViolation violation : cve.getConstraintViolations()) {
- Object value = violation.getInvalidValue();
- if (value == null) {
- value = "<null>";
+ '''
+ }
+ }
+ }
+ body =
+ '''
+ «body»
+ try {
+ «IF dataInterchange.mode==EntityManagerMode.PERSIST»em.persist(«firstEntityBean.entity.name.toFirstLower»);
+ «ELSEIF dataInterchange.mode==EntityManagerMode.MERGE»em.merge(«firstEntityBean.entity.name.toFirstLower»);
+ «ELSEIF dataInterchange.mode==EntityManagerMode.REMOVE»«firstEntityBean.entity.fullyQualifiedName» toBeRemoved = em.merge(«firstEntityBean.entity.name.toFirstLower»);
+ em.remove(toBeRemoved);«ENDIF»
+ }
+ catch (ConstraintViolationException cve) {
+ log.error("«firstEntityBean.entity.name.toFirstLower» #"+(count+1)+"/"+total+": "+cve.getLocalizedMessage());
+ for (ConstraintViolation<?> violation : cve.getConstraintViolations()) {
+ Object value = violation.getInvalidValue();
+ if (value == null) {
+ value = "<null>";
+ }
+ log.error("- property:"
+ +violation.getLeafBean().toString()+"."+violation.getPropertyPath().toString()
+ +" value:'"+value.toString()
+ +" violation:"+violation.getMessage());
}
- log.error("- property:"
- +violation.getLeafBean().toString()+"."+violation.getPropertyPath().toString()
- +" value:'"+value.toString()
- +" violation:"+violation.getMessage());
+ }
+ count++;
+ long thisStep = System.currentTimeMillis();
+ if ((importListener != null) && ((count % importListener.getInitializationSubStepNotifySize() == 0) || (thisStep-lastStep > 2500))) {
+ lastStep = System.currentTimeMillis();
+ importListener.notifyInitializationStep("datainterchange «dataInterchange.name.toFirstUpper»", 0.4, 0.5, count, total);
}
}
- count++;
- long thisStep = System.currentTimeMillis();
- if ((importListener != null) && ((count % importListener.getInitializationSubStepNotifySize() == 0) || (thisStep-lastStep > 2500))) {
- lastStep = System.currentTimeMillis();
+ if (importListener != null) {
importListener.notifyInitializationStep("datainterchange «dataInterchange.name.toFirstUpper»", 0.4, 0.5, count, total);
}
- }
- if (importListener != null) {
- importListener.notifyInitializationStep("datainterchange «dataInterchange.name.toFirstUpper»", 0.4, 0.5, count, total);
- }
- if (log.isDebugEnabled()) log.debug("committing results");
- em.getTransaction().commit();
+ if (log.isDebugEnabled()) log.debug("committing results");
+
+ em.getTransaction().commit();
'''
} else {
body = '''
«body»
- // retrieve the root bean
- «firstEntity.entity.fullyQualifiedName» «firstEntity.entity.name.toFirstLower» = («firstEntity.entity.fullyQualifiedName») result.getBean("«firstEntity.entity.name»");
- // persist
- em.getTransaction().begin();
- if (log.isDebugEnabled()) log.debug("persisting results");
- em.persist(«firstEntity.entity.name.toFirstLower»);
- if (log.isDebugEnabled()) log.debug("committing results");
- em.getTransaction().commit();
+ if(result != null) {
+ «firstEntityBean.entity.fullyQualifiedName» «firstEntityBean.entity.name.toFirstLower» = («firstEntityBean.entity.fullyQualifiedName») result;
+ '''
+ if(firstEntityBean.hasBlobMapping && dataInterchange.mode!=EntityManagerMode.REMOVE){
+ for(mapping:firstEntityBean.mappings){
+ if(mapping instanceof DataInterchangeBlobMapping){
+ var m = (mapping as DataInterchangeBlobMapping)
+ var blobFileName = '''«firstEntityBean.entity.name.toFirstLower».get«m.property.name.toFirstUpper»()«IF m.blobFileExtension != null» + "." + "«m.blobFileExtension»"«ENDIF»'''
+ body = '''
+ «body»
+ try (InputStream inputStream = new BufferedInputStream(
+ «IF m.blobPath == null»
+ this.getClass().getClassLoader().getResourceAsStream("/«firstEntityBean.entity.name»/" + «blobFileName»)
+ «ELSE»
+ new FileInputStream("«m.blobPath»/" + «blobFileName»)«ENDIF»)) {
+ String «m.property.name.toFirstLower»Id = blobUpload.createBlobMapping(
+ inputStream,
+ «firstEntityBean.entity.name.toFirstLower».get«m.property.name.toFirstUpper»(),
+ "«m.mimeType»",
+ blobAPI
+ );
+ «firstEntityBean.entity.name.toFirstLower».set«m.property.name.toFirstUpper»(«m.property.name.toFirstLower»Id);
+ } catch (IOException e) {
+ log.error(e.getLocalizedMessage());
+ }
+ '''
+ }
+ }
+ }
+ body = '''
+ «body»
+ em.getTransaction().begin();
+ if (log.isDebugEnabled()) log.debug("storing results");
+ «IF dataInterchange.mode==EntityManagerMode.PERSIST»em.persist(«firstEntityBean.entity.name.toFirstLower»);
+ «ELSEIF dataInterchange.mode==EntityManagerMode.MERGE»em.merge(«firstEntityBean.entity.name.toFirstLower»);
+ «ELSEIF dataInterchange.mode==EntityManagerMode.REMOVE»«firstEntityBean.entity.fullyQualifiedName» toBeRemoved = em.merge(«firstEntityBean.entity.name.toFirstLower»);
+ em.remove(toBeRemoved);«ENDIF»
+ if (log.isDebugEnabled()) log.debug("committing results");
+ em.getTransaction().commit();
'''
}
- if (firstEntity.markLatest) {
+ if (firstEntityBean.markLatest) {
body = '''
«body»
- if (log.isDebugEnabled()) log.debug("mark results as latest import");
- em.setProperty(QueryHints.PESSIMISTIC_LOCK, PessimisticLock.Lock);
- em.getTransaction().begin();
- em.createQuery("update «firstEntity.entity.name» set «firstEntity.latestProperty.name» = 0").executeUpdate();
- em.createQuery("update «firstEntity.entity.name» set «firstEntity.latestProperty.name» = 1 where «»id= :id").setParameter("id", «firstEntity.entity.name.toFirstLower».getId()).executeUpdate();
- if (log.isDebugEnabled()) log.debug("committing mark");
- em.getTransaction().commit();
+ if (log.isDebugEnabled()) log.debug("mark results as latest import");
+ em.setProperty(QueryHints.PESSIMISTIC_LOCK, PessimisticLock.Lock);
+ em.getTransaction().begin();
+ em.createQuery("update «firstEntityBean.entity.name» set «firstEntityBean.latestProperty.name» = 0").executeUpdate();
+ em.createQuery("update «firstEntityBean.entity.name» set «firstEntityBean.latestProperty.name» = 1 where «»id= :id").setParameter("id", «firstEntityBean.entity.name.toFirstLower».getId()).executeUpdate();
+ if (log.isDebugEnabled()) log.debug("committing mark");
+ em.getTransaction().commit();
'''
}
- if (dataInterchange.refreshEnabled) {
+ for (path:dataInterchange.path) {
+ var entity = path.entity
body = '''
«body»
- if(getEventBroker()!=null) {
- getEventBroker().send(EventBrokerMsg.REFRESH_VIEW+"«dataInterchange.refresh»", "*");
- }
+ if(getEventDispatcher() != null) {
+ EventDispatcherEvent «entity.name.toLowerCase»Event = new EventDispatcherEvent(EventDispatcherCommand.REFRESH, "«entity.fullyQualifiedName»", "«dataInterchange.fullyQualifiedName»");
+ getEventDispatcher().sendEvent(«entity.name.toLowerCase»Event);
+ }
'''
}
body = '''
«body»
- if (log.isDebugEnabled()) log.debug("results persisted");
+ if (log.isDebugEnabled()) log.debug("results persisted");
+ } else {
+ if (log.isDebugEnabled()) log.debug("no results found");
+ }
'''
- var iter = dataInterchange.path.iterator
- var root = (iter.next as DataInterchangeBean).entity
- body = '''
- «body»
- } else {
- if (log.isDebugEnabled()) log.debug("prepare export");
-««« «IF dataInterchange.fileEndpoint instanceof DataInterchangeFileXML»
-««« // Create and initilise the XMLBinding instance...
-««« xmlBinding = new XMLBinding(smooks);
-««« xmlBinding.intiailize();
-««« xmlBinding.setOmitXMLDeclaration(true);
-««« «ENDIF»
-««« «dataInterchange.buildEntityGraph(iter, root)»
- int pageNumber = 1;
- int pageSize = 1000;
- CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
-
- if (log.isDebugEnabled()) log.debug("evaluate root entity count");
- CriteriaQuery<Long> countQuery = criteriaBuilder.createQuery(Long.class);
- countQuery.select(criteriaBuilder.count(countQuery.from(«firstEntity.entity.fullyQualifiedName».class)));
- Long count = em.createQuery(countQuery).getSingleResult();
- if (log.isDebugEnabled()) log.debug("root entity count is "+count.toString());
- setLength(count*«IF dataInterchange.elementSize==0»10«ELSE»«dataInterchange.elementSize»«ENDIF»);
- setAvgElementSize(1);
-
- CriteriaQuery<«firstEntity.entity.fullyQualifiedName»> criteriaQuery = criteriaBuilder.createQuery(«firstEntity.entity.fullyQualifiedName».class);
- Root<«firstEntity.entity.fullyQualifiedName»> from = criteriaQuery.from(«firstEntity.entity.fullyQualifiedName».class);
- /* ... not necessary due to eager loading of entities via JPA ... «dataInterchange.buildJoins(iter, root)» ...*/
- CriteriaQuery<«firstEntity.entity.fullyQualifiedName»> select = criteriaQuery.multiselect(from);
-
- TypedQuery<«firstEntity.entity.fullyQualifiedName»> typedQuery = em.createQuery(select);
-««« typedQuery.setHint(QueryHints.JPA_LOAD_GRAPH, «firstEntity.entity.name.toFirstLower»Graph);
- setProgressIndeterminated(false);
- while (pageNumber < count.intValue()) {
- if (log.isDebugEnabled()) log.debug("fetch and process entry "+pageNumber+" to "+(pageNumber+pageSize));
- typedQuery.setFirstResult(pageNumber - 1);
- typedQuery.setMaxResults(pageSize);
- List<«firstEntity.entity.fullyQualifiedName»> queryResults = typedQuery.getResultList();
-««« «IF dataInterchange.fileEndpoint instanceof DataInterchangeFileCSV»
+ var root = dataInterchange.path.findFirst[it|!it.markLatest]
+ if(root !== null) {
+ body = '''
+ «body»
+ } else {
+ if (log.isDebugEnabled()) log.debug("prepare export");
+ CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
+
+ if (log.isDebugEnabled()) log.debug("evaluate root entity count");
+ CriteriaQuery<Long> countQuery = criteriaBuilder.createQuery(Long.class);
+ countQuery.select(criteriaBuilder.count(countQuery.from(«root.entity.fullyQualifiedName».class)));
+ Long count = em.createQuery(countQuery).getSingleResult();
+ if (log.isDebugEnabled()) log.debug("root entity count is "+count.toString());
+ setLength(count*«IF dataInterchange.elementSize==0»10«ELSE»«dataInterchange.elementSize»«ENDIF»);
+ setAvgElementSize(1);
+
+ CriteriaQuery<«root.entity.fullyQualifiedName»> criteriaQuery = criteriaBuilder.createQuery(«root.entity.fullyQualifiedName».class);
+ Root<«root.entity.fullyQualifiedName»> from = criteriaQuery.from(«root.entity.fullyQualifiedName».class);
+ CriteriaQuery<«root.entity.fullyQualifiedName»> select = criteriaQuery.multiselect(from);
+
+ TypedQuery<«root.entity.fullyQualifiedName»> typedQuery = em.createQuery(select);
+ setProgressIndeterminated(false);
+ List<«root.entity.fullyQualifiedName»> allResults = typedQuery.getResultList();
StringWriter writer = new StringWriter();
- smooks.filterSource(executionContext, new JavaSource(queryResults), new StreamResult(writer));
+ if(dataInterchange != null) {
+ dataInterchange.exportSource(allResults, writer);
+ }
out.write(writer.toString().getBytes());
-««« «ELSEIF dataInterchange.fileEndpoint instanceof DataInterchangeFileXML»
-««« for(«firstEntity.entity.fullyQualifiedName» row:queryResults) {
-««« String outXML = xmlBinding.toXML(row);
-««« out.write(outXML.getBytes());
-««« }
-««« JAXBContext jc = JAXBContext.newInstance(«firstEntity.entity.fullyQualifiedName».class);
-««« Marshaller marshaller = jc.createMarshaller();
-««« marshaller.marshal(queryResults, out);
-««« «ENDIF»
- pageNumber += pageSize;
+ if (log.isDebugEnabled()) log.debug("export finished");
}
- if (log.isDebugEnabled()) log.debug("export finished");
- }
- '''
+ '''
+ } else {
+ body = '''
+ «body»
+ }'''
+ }
body = '''
«body»
- } catch (Exception e) {
- log.error("«dataInterchange.name»: "+e.getLocalizedMessage()+e.getCause(), e);
+ } catch (DataInterchangeException | IOException e) {
+ StringWriter sw = new StringWriter();
+ e.printStackTrace(new PrintWriter(sw));
+ log.error("«dataInterchange.name»:{}", sw.toString());
} finally {
if(file != null) {
try {
out.close();
file.close();
} catch (IOException e) {
- log.error(e.getLocalizedMessage()+e.getCause());
+ StringWriter sw = new StringWriter();
+ e.printStackTrace(new PrintWriter(sw));
+ log.error("«dataInterchange.name»:{}", sw.toString());
}
}
- smooks.close();
- if(getEventBroker()!=null) {
- UI.getCurrent().setPollInterval(pollingInterval);
+ if(dataInterchange != null) {
+ dataInterchange.close();
}
- // close everything
if (em != null) {
em.close();
}
+ // remove progress bar
+ if(getEventDispatcher() != null) {
+ EventDispatcherEvent evnt = new EventDispatcherEvent(EventDispatcherCommand.REMOVE_PROGRESSBAR, getName(), "DataInterchangeWorkerThread");
+ getEventDispatcher().sendEvent(evnt);
+ }
if (log.isDebugEnabled()) log.debug("datainterchange finished");
}
'''
@@ -1012,29 +1025,23 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
}
/**
- * This seems to be not necessary, because JPA seems to load the entities eager itself!
- * With this functionality active, the sql result set would contain all root entities multiple times!
+ * handle the worker progress bar. provide the appropriate code.
+ *
+ * @param pkg
+ * the current package inferred {@link DataInterchangePackage}
+ * @return code fragment
*/
- @Deprecated
- def buildJoins(DataInterchange interchange, Iterator<DataInterchangeBean> iter, LEntity rootEntity) {
- var root = rootEntity
+ def String removeProgressBar(DataInterchangePackage pkg) {
var body = ""
- if(iter.hasNext) {
- body = '''«body»from'''
- }
- while(iter.hasNext) {
- var entity = (iter.next as DataInterchangeBean).entity
- for(f:root.features) {
- if (f instanceof LReference && f.toMany && f.type instanceof LEntity && f.type.toName.equals(entity.toName)) {
- // one to many for the entity in sequence is found
- body = '''«body».fetch("«f.name»", JoinType.LEFT)'''
- }
- }
- root = entity
- }
- if(!body.empty) {
- body = body + ";"
+ body = '''
+ «body»
+ // a worker notified this view that it is finished
+ if (progressBars.containsKey(workerName)) {
+ final WorkerThreadRunnable worker = progressBars.get(workerName);
+ ((VerticalLayout)worker.getProgressBarArea().getParent()).removeComponent(worker.getProgressBarArea());
+ progressBars.remove(workerName);
}
+ '''
return body
}
@@ -1052,4 +1059,18 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
button.setDescription(dslMetadataService.translate(locale.toLanguageTag(),(String)i18nKeys.get(0))+" "+dslMetadataService.translate(locale.toLanguageTag(),(String)i18nKeys.get(2)));
}
}'''
+
+ def String receiveEvent(DataInterchangePackage pkg) {
+ var body = ""
+ body = '''
+ «body»
+ switch(event.getCommand()) {
+ case REMOVE_PROGRESSBAR:
+ removeProgressBar(event.getTopic());
+ break;
+ }
+ '''
+ return body
+ }
+
}
diff --git a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataDSLModelGenerator.xtend b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataDSLModelGenerator.xtend
index b67aa60..08b212f 100644
--- a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataDSLModelGenerator.xtend
+++ b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataDSLModelGenerator.xtend
@@ -17,20 +17,25 @@
package org.eclipse.osbp.xtext.datainterchange.jvmmodel
-import com.google.common.collect.Lists
import com.vaadin.shared.ui.label.ContentMode
import com.vaadin.ui.Button
import com.vaadin.ui.Button.ClickEvent
import com.vaadin.ui.Button.ClickListener
+import com.vaadin.ui.Component
import com.vaadin.ui.HorizontalLayout
import com.vaadin.ui.Label
import com.vaadin.ui.NativeButton
+import com.vaadin.ui.Panel
import com.vaadin.ui.ProgressBar
import com.vaadin.ui.UI
+import java.io.BufferedInputStream
import java.io.BufferedOutputStream
-import java.io.ByteArrayInputStream
+import java.io.File
+import java.io.FileInputStream
+import java.io.FileOutputStream
import java.io.IOException
import java.io.InputStream
+import java.io.PrintWriter
import java.io.StringReader
import java.io.StringWriter
import java.net.MalformedURLException
@@ -47,6 +52,8 @@ import java.util.Arrays
import java.util.HashMap
import java.util.List
import java.util.MissingResourceException
+import java.util.Properties
+import java.util.UUID
import java.util.concurrent.Executors
import java.util.concurrent.TimeUnit
import javax.inject.Inject
@@ -62,32 +69,44 @@ import javax.validation.ConstraintViolationException
import javax.xml.parsers.DocumentBuilderFactory
import javax.xml.transform.OutputKeys
import javax.xml.transform.Transformer
+import javax.xml.transform.TransformerConfigurationException
import javax.xml.transform.TransformerFactory
import javax.xml.transform.dom.DOMSource
import javax.xml.transform.stream.StreamResult
import javax.xml.transform.stream.StreamSource
import org.apache.commons.lang.StringEscapeUtils
+import org.apache.log4j.lf5.util.StreamUtils
import org.eclipse.e4.core.di.extensions.EventUtils
import org.eclipse.e4.core.services.events.IEventBroker
import org.eclipse.emf.common.util.EList
import org.eclipse.emf.ecore.EObject
import org.eclipse.emf.ecore.resource.Resource
import org.eclipse.emf.ecore.util.EcoreUtil
+import org.eclipse.osbp.blob.service.BlobService
+import org.eclipse.osbp.blob.service.BlobTypingAPI
+import org.eclipse.osbp.datainterchange.api.DataInterchangeException
+import org.eclipse.osbp.datainterchange.api.IDataInterchange
import org.eclipse.osbp.dsl.entity.xtext.extensions.EntityTypesBuilder
import org.eclipse.osbp.dsl.entity.xtext.extensions.ModelExtensions
import org.eclipse.osbp.dsl.semantic.common.types.LAttribute
import org.eclipse.osbp.dsl.semantic.common.types.LDataType
+import org.eclipse.osbp.dsl.semantic.common.types.LFeature
import org.eclipse.osbp.dsl.semantic.common.types.LReference
import org.eclipse.osbp.dsl.semantic.entity.LEntity
import org.eclipse.osbp.dsl.semantic.entity.LEntityAttribute
import org.eclipse.osbp.dsl.semantic.entity.LEntityFeature
+import org.eclipse.osbp.dsl.semantic.entity.LEntityReference
import org.eclipse.osbp.eventbroker.EventBrokerMsg
+import org.eclipse.osbp.preferences.ProductConfiguration
+import org.eclipse.osbp.runtime.common.event.EventDispatcherEvent
+import org.eclipse.osbp.runtime.common.event.EventDispatcherEvent.EventDispatcherCommand
+import org.eclipse.osbp.ui.api.datamart.IDataMart.EType
import org.eclipse.osbp.utils.entityhelper.DataType
-import org.eclipse.osbp.utils.entitymock.IEntityImportInitializationListener
import org.eclipse.osbp.xtext.addons.EObjectHelper
import org.eclipse.osbp.xtext.basic.generator.BasicDslGeneratorUtils
import org.eclipse.osbp.xtext.datainterchange.DataInterchange
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeBean
+import org.eclipse.osbp.xtext.datainterchange.DataInterchangeBlobMapping
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeEntityExpression
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeFile
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeFileCSV
@@ -95,14 +114,19 @@ import org.eclipse.osbp.xtext.datainterchange.DataInterchangeFileEDI
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeFileXML
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeFormat
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeLookup
+import org.eclipse.osbp.xtext.datainterchange.DataInterchangeMapping
import org.eclipse.osbp.xtext.datainterchange.DataInterchangePackage
import org.eclipse.osbp.xtext.datainterchange.DataInterchangePredefinedExpression
+import org.eclipse.osbp.xtext.datainterchange.DataInterchangeValueMapping
+import org.eclipse.osbp.xtext.datainterchange.EntityManagerMode
import org.eclipse.osbp.xtext.datainterchange.common.WorkerThreadRunnable
+import org.eclipse.osbp.xtext.entitymock.common.IEntityImportInitializationListener
import org.eclipse.osbp.xtext.i18n.DSLOutputConfigurationProvider
import org.eclipse.osbp.xtext.i18n.I18NModelGenerator
import org.eclipse.persistence.config.PersistenceUnitProperties
import org.eclipse.persistence.config.PessimisticLock
import org.eclipse.persistence.config.QueryHints
+import org.eclipse.xtext.common.types.JvmEnumerationLiteral
import org.eclipse.xtext.generator.IFileSystemAccess
import org.eclipse.xtext.generator.IOutputConfigurationProvider
import org.eclipse.xtext.naming.IQualifiedNameProvider
@@ -110,22 +134,6 @@ import org.eclipse.xtext.naming.QualifiedName
import org.eclipse.xtext.xbase.compiler.GeneratorConfig
import org.eclipse.xtext.xbase.compiler.ImportManager
import org.eclipse.xtext.xbase.jvmmodel.IJvmDeclaredTypeAcceptor
-import org.eclipse.osbp.ui.api.datamart.IDataMart.EType
-import org.milyn.Smooks
-import org.milyn.SmooksFactory
-import org.milyn.SmooksOSGIFactory
-import org.milyn.container.ExecutionContext
-import org.milyn.event.report.HtmlReportGenerator
-import org.milyn.event.types.ElementPresentEvent
-import org.milyn.event.types.FilterLifecycleEvent
-import org.milyn.event.types.FilterLifecycleEvent.EventType
-import org.milyn.io.StreamUtils
-import org.milyn.javabean.binding.xml.XMLBinding
-import org.milyn.payload.JavaResult
-import org.milyn.payload.JavaSource
-import org.milyn.payload.StringResult
-import org.milyn.persistence.util.PersistenceUtil
-import org.milyn.scribe.adapter.jpa.EntityManagerRegister
import org.osgi.framework.Bundle
import org.osgi.framework.BundleContext
import org.osgi.framework.FrameworkUtil
@@ -135,6 +143,9 @@ import org.slf4j.Logger
import org.slf4j.LoggerFactory
import org.w3c.dom.Document
import org.w3c.dom.Element
+import org.xml.sax.SAXException
+import org.eclipse.osbp.dsl.semantic.common.types.LEnumLiteral
+import org.eclipse.osbp.dsl.semantic.common.types.LEnum
class ParameterValue {
var public HashMap<String,String> modifiers = <String,String>newHashMap()
@@ -164,10 +175,10 @@ class DataDSLModelGenerator extends I18NModelGenerator {
def String generateKey(String name, QualifiedName packageName) {
var pattern = "(\\W)"
- if (name != null) {
+ if (name !== null) {
var newName = name.replaceAll(pattern ,"_").toLowerCase
System.out.println(newName)
- if (packageName != null) {
+ if (packageName !== null) {
return packageName.toString.concat(".").concat(newName)
}
else {
@@ -193,13 +204,9 @@ class DataDSLModelGenerator extends I18NModelGenerator {
transformer.setOutputProperty(OutputKeys.STANDALONE, "yes")
transformer.setOutputProperty(OutputKeys.MEDIA_TYPE, "text/xml")
- var configList = Lists.newArrayList(outputConfig.outputConfigurations)
- var outputDirectory = ""
- for(config:configList) {
- if(config.name.equals(DSLOutputConfigurationProvider.SMOOKS_OUTPUT_ONCE)) {
- outputDirectory = config.outputDirectory
- }
- }
+ EcoreUtil.getAllContents(EObjectHelper.getSemanticElement(input), false).filter(typeof(DataInterchangePackage)).forEach[
+ fsa.generatePathConfig(it)
+ ]
EcoreUtil.getAllContents(EObjectHelper.getSemanticElement(input), false).filter(typeof(DataInterchange)).forEach[
// create all smooks config files
fsa.generateImportConfigStub(it)
@@ -207,6 +214,37 @@ class DataDSLModelGenerator extends I18NModelGenerator {
]
super.doGenerate(input, fsa)
}
+
+ def void generatePathConfig(IFileSystemAccess fsa, DataInterchangePackage dataInterchangePkg) {
+ var dir = new File('''«System.getProperty("user.home")»/.osbee''')
+ if(!dir.exists) {
+ dir.mkdir
+ }
+ var file = new File('''«System.getProperty("user.home")»/.osbee/«dataInterchangePkg.title»Config.xml''');
+ file.setWritable(true, false);
+ if(!file.exists) {
+ file.createNewFile
+ val properties = new Properties();
+ dataInterchangePkg.datInts.forEach[
+ properties.put('''«it.name»-«WorkerThreadRunnable.Direction.EXPORT.toString().toLowerCase()»'''.toString, it.getUrl)
+ properties.put('''«it.name»-«WorkerThreadRunnable.Direction.IMPORT.toString().toLowerCase()»'''.toString, it.getUrl)
+ ]
+ var fileOutput = new FileOutputStream(file);
+ properties.storeToXML(fileOutput, "dataInterchange file URLs");
+ fileOutput.close
+ }
+ }
+
+ def getUrl(DataInterchange di) {
+ switch(di.fileEndpoint) {
+ DataInterchangeFileXML:
+ return (di.fileEndpoint as DataInterchangeFileXML).fileURL
+ DataInterchangeFileCSV:
+ return (di.fileEndpoint as DataInterchangeFileCSV).fileURL
+ DataInterchangeFileEDI:
+ return (di.fileEndpoint as DataInterchangeFileEDI).fileURL
+ }
+ }
def void generateExportConfigStub(IFileSystemAccess fsa, DataInterchange dataInterchange) {
var body = ""
@@ -216,8 +254,8 @@ class DataDSLModelGenerator extends I18NModelGenerator {
var config = document.createElement("resource-config")
var selector = document.createAttribute("selector")
selector.textContent = "global-parameters"
- config.attributeNode = selector;
- var pEl = document.createElement("param");
+ config.attributeNode = selector
+ var pEl = document.createElement("param")
var name = document.createAttribute("name")
name.textContent = "stream.filter.type"
pEl.attributeNode = name
@@ -234,10 +272,10 @@ class DataDSLModelGenerator extends I18NModelGenerator {
var quote = ""
cartridges.put("xmlns:ftl", "http://www.milyn.org/xsd/smooks/freemarker-1.1.xsd")
var csv = dataInterchange.fileEndpoint as DataInterchangeFileCSV
- if(csv.delimiter != null) {
+ if(csv.delimiter !== null) {
delimiter = StringEscapeUtils.unescapeHtml(csv.delimiter)
}
- if(csv.quoteCharacter != null) {
+ if(csv.quoteCharacter !== null) {
quote = StringEscapeUtils.unescapeHtml(csv.quoteCharacter)
}
dataInterchange.generateExportConfig(document, dataInterchange.fileEndpoint, delimiter, quote)
@@ -254,7 +292,7 @@ class DataDSLModelGenerator extends I18NModelGenerator {
}
var source = new DOMSource(document)
- var res = new StringResult()
+ var res = new DataResult()
transformer.transform(source, res)
body = res.result
fsa.generateFile('''«dataInterchange.name»-«WorkerThreadRunnable.Direction.EXPORT.toString().toLowerCase()».xml''', DSLOutputConfigurationProvider.SMOOKS_OUTPUT_ONCE, body)
@@ -267,7 +305,7 @@ class DataDSLModelGenerator extends I18NModelGenerator {
var domImpl = db.DOMImplementation
var document = domImpl.createDocument("http://www.milyn.org/xsd/smooks-1.1.xsd", "smooks-resource-list", null)
- var fieldList = <LEntityAttribute>newArrayList()
+ var fieldList = <LEntityFeature>newArrayList()
var cartridges = <String,String>newHashMap()
var parameters = <String,ParameterValue>newHashMap()
// for the meaning of cartridges see: http://www.smooks.org/mediawiki/index.php?title=V1.5:Smooks_v1.5_User_Guidecartridges
@@ -342,7 +380,7 @@ class DataDSLModelGenerator extends I18NModelGenerator {
}
document.documentElement.appendChild(params)
var source = new DOMSource(document)
- var res = new StringResult()
+ var res = new DataResult()
transformer.transform(source, res)
body = res.result
fsa.generateFile('''«dataInterchange.name»-«WorkerThreadRunnable.Direction.IMPORT.toString().toLowerCase()».xml''', DSLOutputConfigurationProvider.SMOOKS_OUTPUT_ONCE, body)
@@ -351,23 +389,47 @@ class DataDSLModelGenerator extends I18NModelGenerator {
def input(DataInterchangeFileXML xml, DataInterchange interchange, Document doc) {
}
- def input(DataInterchangeFileCSV csv, DataInterchange interchange, List<LEntityAttribute> fieldList, Document doc) {
+ def input(DataInterchangeFileCSV csv, DataInterchange interchange, List<LEntityFeature> fieldList, Document doc) {
var reader = doc.createElement("csv:reader")
var fields = doc.createAttribute("fields")
var fldList = <String>newArrayList
+ var mappingInterchanges = interchange.path.filter[it.hasAttributeMapping]
+ var lookupInterchanges = interchange.path.filter[it.hasReferenceLookup]
for(f:fieldList) {
- fldList.add(f.toName)
+ if(f instanceof LEntityAttribute){
+ if(mappingInterchanges.length > 0){
+ for(mi:mappingInterchanges){
+ for(mapping:mi.mappings){
+ if(!fldList.contains(mapping.data) && f.name.equals(mapping.data)){
+ fldList.add(f.toName)
+ }
+ }
+ }
+ }else{
+ fldList.add(f.toName)
+ }
+ }else if(f instanceof LEntityReference && lookupInterchanges.length > 0){
+ //add reference only if they are specified by lookup
+ for(li:lookupInterchanges){
+ for(lup:li.lookup){
+ if(!fldList.contains(lup.dataMap) && f.name.equals(lup.dataMap)){
+ fldList.add(f.toName)
+ }
+ }
+ }
+ }
}
+
fields.textContent = fldList.join(",")
reader.attributeNode = fields
-
- if(csv.delimiter != null) {
+
+ if(csv.delimiter !== null) {
var sep = doc.createAttribute("separator")
sep.textContent = csv.delimiter
reader.attributeNode = sep
}
- if(csv.quoteCharacter != null) {
+ if(csv.quoteCharacter !== null) {
var quote = doc.createAttribute("quote")
quote.textContent = StringEscapeUtils.unescapeXml(csv.quoteCharacter)
reader.attributeNode = quote;
@@ -397,7 +459,7 @@ class DataDSLModelGenerator extends I18NModelGenerator {
doc.documentElement.appendChild(reader)
}
- def createFreemarker(Document doc, String templateString) {
+ def createFreemarker(Document doc, String templateString, DataInterchangeFile endPoint) {
var freemarker = doc.createElement("ftl:freemarker")
var apply = doc.createAttribute("applyOnElement")
apply.textContent = "#document"
@@ -405,7 +467,7 @@ class DataDSLModelGenerator extends I18NModelGenerator {
doc.documentElement.appendChild(freemarker)
var template = doc.createElement("ftl:template")
- var tplName = doc.createCDATASection(templateString.replaceAll("\r",""));
+ var tplName = doc.createCDATASection('''«IF endPoint.encoding !== null»<#ftl encoding='«endPoint.encoding»'>«ENDIF»'''+templateString.replaceAll("\r",""));
template.appendChild(tplName)
freemarker.appendChild(template)
}
@@ -420,7 +482,7 @@ class DataDSLModelGenerator extends I18NModelGenerator {
clazz.textContent = '''«className»«IF isList»[]«ENDIF»'''
bean.attributeNode = clazz
// is mapping given?
- if(elementMap != null) {
+ if(elementMap !== null) {
var create = doc.createAttribute("createOnElement")
create.textContent = elementMap
bean.attributeNode = create
@@ -432,10 +494,10 @@ class DataDSLModelGenerator extends I18NModelGenerator {
var Element value = null
// try to find a value element, or create a new one
var node = parent.firstChild
- while(node != null && (!node.nodeName.equals("jb:value") || !node.attributes.getNamedItem("property").nodeValue.equals(propertyName))) {
+ while(node !== null && (!node.nodeName.equals("jb:value") || (node.attributes.getNamedItem("property") !== null && !node.attributes.getNamedItem("property").nodeValue.equals(propertyName)))) {
node = node.nextSibling
}
- if(node == null) {
+ if(node === null) {
value = doc.createElement("jb:value")
parent.appendChild(value)
} else {
@@ -445,7 +507,7 @@ class DataDSLModelGenerator extends I18NModelGenerator {
var property = doc.createAttribute("property")
property.textContent = propertyName
value.attributeNode = property
- if(decoderName != null) {
+ if(decoderName !== null) {
var decoder = doc.createAttribute("decoder")
decoder.textContent = decoderName
value.attributeNode = decoder
@@ -458,26 +520,26 @@ class DataDSLModelGenerator extends I18NModelGenerator {
return value
}
- def addMapping(Document doc, Element parent, String propertyName, String dataName) {
+ def addMapping(Document doc, Element parent, String propertyName, String dataName, boolean byAttribute) {
var elementMap = ""
var attr = parent.attributes.getNamedItem("createOnElement")
- if (attr != null) {
+ if (attr !== null) {
elementMap = attr.textContent
}
var Element value = null
// try to find a value element, or create a new one
var node = parent.firstChild
- while(node != null && node.hasAttributes && (!node.nodeName.equals("jb:value") || !node.attributes.getNamedItem("property").nodeValue.equals(propertyName))) {
+ while(node !== null && node.hasAttributes && (!node.nodeName.equals("jb:value") || (node.attributes.getNamedItem("property") !== null && !node.attributes.getNamedItem("property").nodeValue.equals(propertyName)))) {
node = node.nextSibling
}
- if(node == null) {
+ if(node === null) {
value = doc.createElement("jb:value")
parent.appendChild(value)
} else {
value = node as Element
}
var data = doc.createAttribute("data")
- data.textContent = elementMap+"/"+dataName
+ data.textContent = '''«elementMap»/«IF byAttribute»@«ENDIF»«dataName»'''
value.attributeNode = data
}
@@ -498,12 +560,12 @@ class DataDSLModelGenerator extends I18NModelGenerator {
var beanIdRef = doc.createAttribute("beanIdRef")
beanIdRef.textContent = beanIdRefName
value.attributeNode = beanIdRef
- if(propertyName != null) {
+ if(propertyName !== null) {
var property = doc.createAttribute("property")
property.textContent = propertyName
value.attributeNode = property
}
- if(setterName != null) {
+ if(setterName !== null) {
var setter = doc.createAttribute("setterMethod")
setter.textContent = setterName
value.attributeNode = setter
@@ -524,7 +586,7 @@ class DataDSLModelGenerator extends I18NModelGenerator {
def Element createDaoLocator(Document doc, Element parent, String beanIdName, String elementMap, boolean allowNoResult, boolean allowNonuniqueResult) {
var locator = doc.createElement("dao:locator")
- if (parent == null) {
+ if (parent === null) {
doc.documentElement.appendChild(locator)
} else {
parent.appendChild(locator)
@@ -533,7 +595,7 @@ class DataDSLModelGenerator extends I18NModelGenerator {
beanId.textContent = beanIdName
locator.attributeNode = beanId
// mapping given?
- if(elementMap != null) {
+ if(elementMap !== null) {
var lookupOnElement = doc.createAttribute("lookupOnElement")
lookupOnElement.textContent = elementMap
locator.attributeNode = lookupOnElement
@@ -558,13 +620,13 @@ class DataDSLModelGenerator extends I18NModelGenerator {
return daoQuery
}
- def Element createDaoParam(Document doc, Element parent, String paramName, String paramValue, String elementMap, String dataMap) {
+ def Element createDaoParam(Document doc, Element parent, String paramName, String paramValue, String elementMap, String dataMap, boolean byAttribute) {
var Element daoParams = null
var node = parent.firstChild
- while(node != null && !node.nodeName.equals("dao:params")) {
+ while(node !== null && !node.nodeName.equals("dao:params")) {
node = node.nextSibling
}
- if(node == null) {
+ if(node === null) {
daoParams = doc.createElement("dao:params")
parent.appendChild(daoParams)
} else {
@@ -579,9 +641,9 @@ class DataDSLModelGenerator extends I18NModelGenerator {
decoder.textContent = paramValue
daoValue.attributeNode = decoder
// mapping given?
- if(dataMap != null) {
+ if(dataMap !== null) {
var data = doc.createAttribute("data")
- data.textContent = elementMap+"/"+dataMap
+ data.textContent = '''«elementMap»/«IF byAttribute»@«ENDIF»«dataMap»'''
daoValue.attributeNode = data
}
return daoParams
@@ -590,31 +652,40 @@ class DataDSLModelGenerator extends I18NModelGenerator {
def generateExportConfig(DataInterchange dataInterchange, Document doc, DataInterchangeFile endPoint, String delimiter, String quote) {
var substitutionMap = <String,String>newHashMap
var substitutionCount = 0
- var fieldList = <LEntityAttribute>newArrayList
+ var fieldList = <LEntityFeature>newArrayList
var String rootEntityName = null
var Document ftlDocument = null
var Element bean = null
var vector = "vector"
for(path : dataInterchange.path) {
- if (rootEntityName == null) {
- rootEntityName = path.entity.name.toString
+ // are there any mappings?
+ var mappingFound = hasAttributeMapping(path)
+ if (rootEntityName === null && !path.markLatest) {
+ rootEntityName = path.entity.toName.toString
}
if (dataInterchange.fileEndpoint instanceof DataInterchangeFileXML) {
var currentKey = '''list«substitutionCount»'''
- if(ftlDocument == null) {
- ftlDocument = domImpl.createDocument(null, "vector", null)
+ if(ftlDocument === null) {
+ ftlDocument = domImpl.createDocument(null, '''«IF dataInterchange.vectorName !== null»«dataInterchange.vectorName»«ELSE»vector«ENDIF»''', null)
}
- substitutionMap.put(currentKey, '''«vector» as «path.entity.name»''')
- bean = createXmlBean(ftlDocument, bean, path.entity, path.format, currentKey)
+ substitutionMap.put(currentKey, '''«vector» as «path.entity.toName»''')
+ bean = createXmlBean(ftlDocument, bean, path.entity, path.format, currentKey, path, dataInterchange)
substitutionCount = substitutionCount + 1
}
- for (f : path.entity.features) {
+ for (f : path.entity.allFeatures) {
if(f instanceof LAttribute && !f.toMany) {
- if ((!"disposed".equals((f as LEntityFeature).toName) && (!"id".equals((f as LEntityFeature).toName)))) {
+ if (!"disposed".equals(f.toName) &&
+ !(f as LEntityAttribute).version && // don't export version
+ !path.markLatest &&
+ ((!(f as LEntityAttribute).id && !(f as LEntityAttribute).uuid) || (path.lookupKeys.empty && dataInterchange.mode != EntityManagerMode.PERSIST)) &&
+ (!mappingFound || path.mappings.isMapped(f))) {
fieldList.add(f as LEntityAttribute)
}
}
- if(f instanceof LReference && f.toMany) {
+ if(f instanceof LReference && !f.toMany && path.lookupKeys.empty) {
+ fieldList.add(f as LEntityReference)
+ }
+ if(f instanceof LReference && f.toMany && !path.markLatest) {
var iter = dataInterchange.path.iterator
var next = iter.next
// find this entity
@@ -624,70 +695,100 @@ class DataDSLModelGenerator extends I18NModelGenerator {
while (iter.hasNext) {
// move to next entity
next = iter.next
- if (next != null && next.entity.toName.equals((f.type as LEntity).toName)) {
- vector = '''«path.entity.name».«(f as LEntityFeature).name»'''
+ if (next !== null && next.entity.toName.equals((f.type as LEntity).toName)) {
+ vector = '''«path.entity.toName».«f.name»'''
}
}
}
}
}
if(endPoint instanceof DataInterchangeFileCSV) {
- createFreemarker(doc, createCsvTemplate(rootEntityName, fieldList, delimiter, quote, dataInterchange.path))
+ createFreemarker(doc, createCsvTemplate(rootEntityName, fieldList, delimiter, quote, dataInterchange.path), endPoint)
}
if(endPoint instanceof DataInterchangeFileXML) {
- createFreemarker(doc, createXmlTemplate(ftlDocument, substitutionMap))
+ createFreemarker(doc, createXmlTemplate(ftlDocument, substitutionMap), endPoint)
}
}
- def generateImportConfig(DataInterchange dataInterchange, List<LEntityAttribute> fieldList, Document doc, DataInterchangeFile endPoint) {
+ def generateImportConfig(DataInterchange dataInterchange, List<LEntityFeature> fieldList, Document doc, DataInterchangeFile endPoint) {
+ var isFirst = true
+ var hasMarker = false
var autoMapping = ""
+ var byAttribute = false
if(endPoint instanceof DataInterchangeFileCSV) {
autoMapping = "/csv-set"
} else if(endPoint instanceof DataInterchangeFileXML) {
- autoMapping = "vector"
+ autoMapping = '''«IF dataInterchange.vectorName !== null»«dataInterchange.vectorName»«ELSE»vector«ENDIF»'''
+ byAttribute = endPoint.byAttribute
}
for(path : dataInterchange.path) {
var map = ""
- if(path.elementMap == null) {
+ if(path.elementMap === null) {
map = autoMapping
} else {
map = path.elementMap
}
+ if(path.markLatest) {
+ hasMarker = true
+ }
var Element bean = null
- if(path.recordList) {
+ if(!path.markLatest && !hasMarker && isFirst) {
var rootBean = createBean(doc, path.entity.toName+"List", path.entity.fullyQualifiedName.toString, map, true)
createWiring(doc, rootBean, path.entity.toName, null, null)
- if(endPoint instanceof DataInterchangeFileCSV) {
- autoMapping = autoMapping + "/csv-record"
- } else if(endPoint instanceof DataInterchangeFileXML) {
- autoMapping = autoMapping + "/" + path.entity.toName
- }
- if(path.elementMap == null) {
- map = autoMapping
- } else {
- map = path.elementMap
- }
- bean = createBean(doc, path.entity.toName, path.entity.fullyQualifiedName.toString, map, false)
+ isFirst = false
+ }
+ if(!path.markLatest && endPoint instanceof DataInterchangeFileCSV) {
+ autoMapping = autoMapping + "/csv-record"
+ }
+ if(endPoint instanceof DataInterchangeFileXML) {
+ autoMapping = '''«autoMapping»/«IF path.nodeName !== null»«path.nodeName»«ELSE»«path.entity.toName»«ENDIF»'''
+ }
+ if(path.elementMap === null) {
+ map = autoMapping
} else {
- if(endPoint instanceof DataInterchangeFileXML) {
- autoMapping = autoMapping + "/" + path.entity.toName
+ map = path.elementMap
+ }
+ bean = createBean(doc, path.entity.toName, path.entity.fullyQualifiedName.toString, map, false)
+ // if merge or remove - create a locator for id and version
+ if(dataInterchange.mode != EntityManagerMode.PERSIST) {
+ if(!path.lookupKeys.empty) {
+ createExpression(doc, bean, path.entity.idAttributeName, "?"+path.entity.toName+"Merger."+path.entity.idAttributeName)
+ if(path.entity.versionAttribute !== null) {
+ createExpression(doc, bean, path.entity.versionAttributeName, "?"+path.entity.toName+"Merger."+path.entity.versionAttributeName)
+ }
+ var locator = createDaoLocator(doc, null, path.entity.toName+"Merger", map, true, false)
+ createDaoQuery(doc, locator, path.queryKeys)
+ var pCnt = 0
+ for(key:path.lookupKeys) {
+ createDaoParam(doc, locator, '''param«pCnt++»''', key.property.decoder, map, path.mappings.mapElement(key.property.name), byAttribute)
+ }
}
- if(path.elementMap == null) {
- map = autoMapping
- } else {
- map = path.elementMap
+ else if(path.entity.versionAttribute !== null) {
+ createExpression(doc, bean, path.entity.versionAttributeName, "?"+path.entity.toName+"Merger."+path.entity.versionAttributeName)
+ var locator = createDaoLocator(doc, null, path.entity.toName+"Merger", map, true, false)
+ createDaoQuery(doc, locator, path.queryVersion)
+ createDaoParam(doc, locator, '''param''', path.entity.primaryKeyAttribute.decoder, map, path.entity.idAttributeName, byAttribute)
}
- bean = createBean(doc, path.entity.toName, path.entity.fullyQualifiedName.toString, map, false)
}
- var mappingFound = false
- for (f : path.entity.features) {
+ // are there any mappings?
+ var mappingFound = hasAttributeMapping(path)
+ for (f : path.entity.allFeatures) {
switch f {
LAttribute: {
if (!f.toMany) {
- // enable mapping for this field, but first try special cases...
- if ((!"disposed".equals((f as LEntityFeature).toName) && (!"id".equals((f as LEntityFeature).toName) && (!path.markLatest || !path.latestProperty.toName.equals((f as LEntityFeature).toName))))) {
+ // enable mapping for this field
+ // if not disposed and not latest marker and not id except no lookup keys given and mode is not persist
+ if (!"disposed".equals(f.toName) &&
+ !(f as LEntityAttribute).version
+ &&
+ ((!(f as LEntityAttribute).id && !(f as LEntityAttribute).uuid)
+ ||
+ (path.lookupKeys.empty && dataInterchange.mode != EntityManagerMode.PERSIST))
+ &&
+ (!path.markLatest || !path.latestProperty.toName.equals(f.toName))
+ ) {
// add to the level's field list
- if (path.recordList || path.recordElement) {
+ if (!path.markLatest) {
fieldList.add(f as LEntityAttribute)
}
var expressionFound = false
@@ -696,18 +797,18 @@ class DataDSLModelGenerator extends I18NModelGenerator {
switch(expr) {
DataInterchangeEntityExpression: {
// is there an entity expression for this attribute ?
- if ((f as LEntityFeature).toName.equals((expr as DataInterchangeEntityExpression).targetProperty.toName)) {
- createExpression(doc, bean, (f as LEntityFeature).toName, (expr as DataInterchangeEntityExpression).entity.toName+"."+(expr as DataInterchangeEntityExpression).property.toName)
+ if (f.toName.equals((expr as DataInterchangeEntityExpression).targetProperty.toName)) {
+ createExpression(doc, bean, f.toName, (expr as DataInterchangeEntityExpression).entity.toName+"."+(expr as DataInterchangeEntityExpression).property.toName)
expressionFound = true
}
}
DataInterchangePredefinedExpression: {
// is there an predefined expression modeled for this attribute ?
- if ((f as LEntityFeature).toName.equals((expr as DataInterchangePredefinedExpression).targetProperty.toName)) {
+ if (f.toName.equals((expr as DataInterchangePredefinedExpression).targetProperty.toName)) {
if("UUID".equals((expr as DataInterchangePredefinedExpression).bean.literal)) {
- createExpression(doc, bean, (f as LEntityFeature).toName, "PUUID."+(expr as DataInterchangePredefinedExpression).beanType.getName)
+ createExpression(doc, bean, f.toName, "PUUID."+(expr as DataInterchangePredefinedExpression).beanType.getName)
} else {
- createExpression(doc, bean, (f as LEntityFeature).toName, "PTIME."+(expr as DataInterchangePredefinedExpression).bean.getName+(expr as DataInterchangePredefinedExpression).beanType.getName)
+ createExpression(doc, bean, f.toName, "PTIME."+(expr as DataInterchangePredefinedExpression).bean.getName+(expr as DataInterchangePredefinedExpression).beanType.getName)
}
expressionFound = true
}
@@ -718,11 +819,11 @@ class DataDSLModelGenerator extends I18NModelGenerator {
var formatFound = false
for (format : path.format) {
// is there a format modeled for this attribute ?
- if ((f as LEntityFeature).toName.equals(format.targetProperty.toName)) {
- var value = createProperty(doc, bean, (f as LEntityFeature).toName, (f as LEntityFeature).decoder, dtType.getBasicType(f as LEntityAttribute))
- if(format.format != null) {
+ if (f.toName.equals(format.targetProperty.toName)) {
+ var value = createProperty(doc, bean, f.toName, f.decoder, dtType.getBasicType(f as LEntityAttribute))
+ if(format.format !== null) {
createDecodeParam(doc, value, "format", format.format)
- if (format.locale != null) {
+ if (format.locale !== null) {
createDecodeParam(doc, value, "locale-language", format.locale.split("_").get(0))
if(format.locale.split("_").size > 1) {
createDecodeParam(doc, value, "locale-country", format.locale.split("_").get(1))
@@ -732,44 +833,51 @@ class DataDSLModelGenerator extends I18NModelGenerator {
formatFound = true
}
}
- // scan lookup for this field - import only
- var lookupFound = false
- for (lookup : path.lookup) {
- // entity and property must match
- if ((f as LEntityFeature).toName.equals(lookup.targetProperty.toName)) {
- var value = createWiring(doc, bean, (f.type as LEntity).toName, (f as LReference).name, null)
- var locator = createDaoLocator(doc, value, (f.type as LEntity).toName, lookup.elementMap, lookup.allowNoResult, lookup.allowNonuniqueResult)
- var daoQuery = createDaoQuery(doc, locator, (f.type as LEntity).query(lookup))
- createDaoParam(doc, daoQuery, "param", (f as LEntityFeature).type.name.toFirstUpper, lookup.elementMap, lookup.dataMap)
- }
- }
// default for mapping purposes
- if (!expressionFound && !formatFound && !lookupFound) {
- // add format decoder
- var etype = dtType.getBasicType(f as LEntityAttribute)
- var value = createProperty(doc, bean, (f as LEntityFeature).toName, (f as LEntityFeature).decoder, etype)
- if (etype == EType.DATE) {
- createDecodeParam(doc, value, "format", "yyyy-MM-dd")
+ if (!expressionFound && !formatFound) {
+ // create no property for unmapped marker entities
+ if (!(endPoint instanceof DataInterchangeFileCSV) || !path.markLatest) {
+ // create no property if mapping is used and this attribute is unmapped
+ if(!mappingFound || path.mappings.isMapped(f)) {
+ var etype = dtType.getBasicType(f as LEntityAttribute)
+
+ // add enum decoder
+ if(etype == EType.LENUM){
+ var value = createProperty(doc, bean, f.toName, "Enum", etype)
+ createDecodeParam(doc, value, "enumType", f.type.toQualifiedName.toString)
+
+ var enumsliterals = f.type.eContents;
+ for(literal : enumsliterals){
+ val enumname = literal.fullyQualifiedName.lastSegment
+ // extra fileds
+ createDecodeParam(doc, value, enumsliterals.indexOf(literal).toString, enumname)
+ // mandatory fields
+ createDecodeParam(doc, value, enumname, enumname)
+ }
+ }
+ else{
+ // add format decoder
+ var value = createProperty(doc, bean, f.toName, f.decoder, etype)
+ if (etype == EType.DATE) {
+ createDecodeParam(doc, value, "format", "yyyy-MM-dd'T'HH:mm:ss")
+ }
+ }
+ }
}
}
// if mapping given
+ path.hasBlobMapping = false
for(mapping : path.mappings) {
if(f.name.equals(mapping.property.name)) {
- addMapping(doc, bean, (f as LEntityFeature).toName, mapping.data)
- mappingFound = true
- }
- }
- // if recordElement given
- if (!mappingFound && !fieldList.isEmpty) {
- if (path.recordList || path.recordElement) {
- for(fld : fieldList) {
- addMapping(doc, bean, fld.toName, fld.toName)
+ addMapping(doc, bean, f.toName, mapping.data, byAttribute)
+ if(mapping instanceof DataInterchangeBlobMapping) {
+ path.hasBlobMapping = true
}
}
}
// default mapping for xml
- if (!mappingFound && endPoint instanceof DataInterchangeFileXML) {
- addMapping(doc, bean, (f as LEntityFeature).toName, (f as LEntityFeature).toName)
+ if (!mappingFound && !path.markLatest && endPoint instanceof DataInterchangeFileXML) {
+ addMapping(doc, bean, f.toName, f.toName, byAttribute)
}
}
}
@@ -788,58 +896,126 @@ class DataDSLModelGenerator extends I18NModelGenerator {
while (iter.hasNext) {
// move to next entity
next = iter.next
- if (next != null && next.entity.toName.equals((f.type as LEntity).toName)) {
- createWiring(doc, bean, (f.type as LEntity).toName, null, (f as LEntityFeature).toAdder((f as LEntityFeature).name).simpleName)
+ if (next !== null && next.entity.toName.equals((f.type as LEntity).toName)) {
+ createWiring(doc, bean, (f.type as LEntity).toName, null, f.toAdder(f.name).simpleName)
}
}
} else {
// many to one
- // generate possible lookups for this many to one relationship
- for (lookup : path.lookup) {
- // entity and property must match
- if (lookup.targetProperty.toName.equals((f as LReference).name)) {
- createWiring(doc, bean, (f.type as LEntity).toName, (f as LReference).name, null)
- var locator = createDaoLocator(doc, null, (f.type as LEntity).toName, lookup.elementMap, lookup.allowNoResult, lookup.allowNonuniqueResult)
- createDaoQuery(doc, locator, (f.type as LEntity).query(lookup))
- createDaoParam(doc, locator, "param", lookup.queryProperty.type.name.toFirstUpper, lookup.elementMap, lookup.dataMap)
+ if(path.lookup.isEmpty) {
+ fieldList.add(f as LEntityReference)
+ var field = (f as LEntityReference).type.primaryKeyAttribute
+ var etype = dtType.getBasicType(field)
+
+ for(mapping : path.mappings) {
+ // might be buggy for xml
+ if(f.name.equals(mapping.property.name)) {
+ createProperty(doc, bean, f.toName, field.decoder, etype)
+ addMapping(doc, bean, f.toName, mapping.data, byAttribute)
+ }
+ }
+
+ } else {
+ // generate possible lookups for this many to one relationship
+ for (lookup : path.lookup) {
+ // entity and property must match
+ if (lookup.targetProperty.toName.equals((f as LReference).name)) {
+ if(endPoint instanceof DataInterchangeFileCSV){
+ // the field will need to be included for header
+ fieldList.add(f as LEntityReference)
+ }
+ createWiring(doc, bean, (f.type as LEntity).toName, (f as LReference).name, null)
+ var locator = createDaoLocator(doc, null, (f.type as LEntity).toName, lookup.elementMap, lookup.allowNoResult, lookup.allowNonuniqueResult)
+ createDaoQuery(doc, locator, (f.type as LEntity).query(lookup))
+ createDaoParam(doc, locator, "param", lookup.queryProperty.type.name.toFirstUpper, lookup.elementMap, lookup.dataMap, byAttribute)
+ }
}
}
}
}
}
}
+ //for CSV file, if no mapping were found, simply map all attributes
+ if (!mappingFound && !path.markLatest && endPoint instanceof DataInterchangeFileCSV) {
+ for(fld : fieldList) {
+ if(fld instanceof LEntityAttribute){
+ addMapping(doc, bean, fld.toName, fld.toName, byAttribute)
+ }
+ }
+ }
+ }
+ }
+
+ protected def boolean hasAttributeMapping(DataInterchangeBean path) {
+ for(mapping : path.mappings) {
+ for (a : path.entity.allAttributes) {
+ if(a.name.equals(mapping.property.name)) {
+ return true
+ }
+ }
+ }
+ return false
+ }
+
+ protected def boolean hasReferenceLookup(DataInterchangeBean path){
+ for(lu:path.lookup) {
+ for(a:path.entity.getAllReferences) {
+ if(a.name.equals(lu.dataMap)){
+ return true
+ }
+ }
+ }
+ return false
+ }
+
+ def boolean isMapped(EList<DataInterchangeMapping> list, LFeature attribute) {
+ if(attribute instanceof LEntityAttribute) {
+ return !list.filter[it|it.property.name.equals(attribute.name)].empty
+ }
+ return false;
+ }
+
+ def String mapElement(EList<DataInterchangeMapping> mappings, String propertyName) {
+ var element = (mappings.findFirst[it.property.name.equals(propertyName) && it instanceof DataInterchangeValueMapping] as DataInterchangeValueMapping)?.data
+ if(element === null) {
+ element = propertyName
}
+ return element
}
- def Element createXmlBean(Document doc, Element parent, LEntity entity, EList<DataInterchangeFormat> formats, String currentKey) {
- var bean = doc.createElement(entity.name)
+ def Element createXmlBean(Document doc, Element parent, LEntity entity, EList<DataInterchangeFormat> formats, String currentKey, DataInterchangeBean path, DataInterchange dataInterchange) {
+ // are there any mappings?
+ var mappingFound = hasAttributeMapping(path)
+ var bean = doc.createElement('''«IF path.nodeName !== null»«path.nodeName»«ELSE»«entity.toName»«ENDIF»''')
var pi = doc.createProcessingInstruction(currentKey, "")
- if(parent == null) {
+ if(parent === null) {
doc.documentElement.appendChild(pi)
doc.documentElement.appendChild(bean)
} else {
- bean = doc.createElement(entity.name)
+ bean = doc.createElement('''«IF path.nodeName !== null»«path.nodeName»«ELSE»«entity.toName»«ENDIF»''')
parent.appendChild(pi)
parent.appendChild(bean)
}
for(p:entity.allAttributes) {
- if ((!"disposed".equals(p.toName) && (!"id".equals(p.toName)))) {
+ if(!p.version && // don't export version
+ ((!p.id && !p.uuid) || (path.lookupKeys.empty && dataInterchange.mode != EntityManagerMode.PERSIST)) &&
+ (!mappingFound || path.mappings.isMapped(p))) {
var format = null as DataInterchangeFormat
for (step : formats) {
// is there a format modeled for this attribute ?
if ((p as LEntityFeature).toName.equals(step.targetProperty.toName)) {
- if (step.format != null) {
+ if (step.format !== null) {
format = step
}
}
}
var property = doc.createElement(p.toName)
- property.textContent = encodeFreemarker(entity.name, p, format, "")
+ property.textContent = encodeFreemarker(entity.toName, p, format, "", true)
bean.appendChild(property)
}
}
var pa = doc.createProcessingInstruction(currentKey, "")
- if(parent == null) {
+ if(parent === null) {
doc.documentElement.appendChild(pa)
} else {
parent.appendChild(pa)
@@ -851,13 +1027,17 @@ class DataDSLModelGenerator extends I18NModelGenerator {
return getPrimitiveDataTypeName(f as LEntityAttribute)
}
+ def String decoder(LEntityAttribute f) {
+ return getPrimitiveDataTypeName(f)
+ }
+
def String getPrimitiveDataTypeName(LEntityAttribute attribute) {
var eType = dtType.getBasicType(attribute)
var String typeName = null
if (eType == EType.DATE) {
typeName = "Date"
}
- else if (attribute.type != null && (attribute.type instanceof LDataType) && (attribute.type as LDataType).jvmTypeReference != null) {
+ else if (attribute.type !== null && (attribute.type instanceof LDataType) && (attribute.type as LDataType).jvmTypeReference !== null) {
typeName = (attribute.type as LDataType).jvmTypeReference.simpleName
} else {
typeName = attribute.type.name
@@ -883,7 +1063,7 @@ class DataDSLModelGenerator extends I18NModelGenerator {
def String createXmlTemplate(Document doc, HashMap<String,String> substitutionMap) {
var source = new DOMSource(doc)
- var res = new StringResult()
+ var res = new DataResult()
transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
transformer.transform(source, res)
var output = res.result
@@ -897,21 +1077,22 @@ class DataDSLModelGenerator extends I18NModelGenerator {
return output.replace("&lt;","<").replace("&gt;",">").replace("<?","<").replace("?>",">")//.replace("</#","\n</#")
}
- def String createCsvTemplate(String rootEntityName, List<LEntityAttribute> fieldList, String delimiter, String quote, EList<DataInterchangeBean> paths) {
+ def String createCsvTemplate(String rootEntityName, List<LEntityFeature> fieldList, String delimiter, String quote, EList<DataInterchangeBean> paths) {
var tmpList = <String>newArrayList()
var fldList = <String>newArrayList
for(field:fieldList) {
- tmpList.add(encodeFreemarker(rootEntityName, field, paths, quote))
+ tmpList.add(encodeFreemarker(rootEntityName, field, paths, quote, false))
fldList.add(field.toName)
}
- var body = '''«fldList.join(delimiter)»
-<#list vector as «rootEntityName»>
-«tmpList.join(delimiter)»
-</#list>'''
+ var body = '''
+ «fldList.join(delimiter)»
+ <#list vector as «rootEntityName»>
+ «tmpList.join(delimiter)»
+ </#list>'''
return body
}
- def String encodeFreemarker(String entityName, LEntityAttribute field, EList<DataInterchangeBean> paths, String quote) {
+ def String encodeFreemarker(String entityName, LEntityFeature field, EList<DataInterchangeBean> paths, String quote, boolean encodeHtml) {
var format = null as DataInterchangeFormat
val entity = field.eContainer as LEntity
for (path : paths) {
@@ -919,32 +1100,41 @@ class DataDSLModelGenerator extends I18NModelGenerator {
for (step : path.format) {
// is there a format modeled for this attribute ?
if (field.toName.equals(step.targetProperty.toName)) {
- if (step.format != null) {
+ if (step.format !== null) {
format = step
}
}
}
}
}
- return encodeFreemarker(entityName, field, format, quote)
+ return encodeFreemarker(entityName, field, format, quote, encodeHtml)
}
- def String encodeFreemarker(String entityName, LEntityAttribute field, DataInterchangeFormat format, String quote) {
- var etype = dtType.getBasicType(field)
- if (etype == EType.BOOLEAN) {
- return '''${(«entityName».«field.toName»?c)!}'''
- }
- else if (format != null) {
- return '''${(«entityName».«field.toName»?string["«format.format»"])!}'''
- }
- else if (etype == EType.DATE) {
- return '''${(«entityName».«field.toName»?date)!}'''
- }
- else if (etype == EType.STRING) {
- return '''«quote»${(«entityName».«field.toName»)!}«quote»'''
- }
- else {
- return '''${(«entityName».«field.toName»)!}'''
+ def String encodeFreemarker(String entityName, LEntityFeature field, DataInterchangeFormat format, String quote, boolean encodeHtml) {
+ if(field instanceof LEntityAttribute) {
+ var etype = dtType.getBasicType(field)
+ if (etype == EType.BOOLEAN) {
+ return '''${(«entityName».«field.toName»?c)!}'''
+ }
+ else if (format !== null) {
+ return '''«IF format.locale !== null»<#setting locale="«format.locale»">«ENDIF»${(«entityName».«field.toName»?string["«format.format»"])!}'''
+ }
+ else if (etype == EType.DATE) {
+ return '''${(«entityName».«field.toName»?datetime?iso_local_ms_nz)!}'''
+ }
+ else if (etype == EType.STRING) {
+ return '''«quote»${(«entityName».«field.toName»«IF encodeHtml»?html«ENDIF»)!}«quote»'''
+ }
+ else {
+ return '''${(«entityName».«field.toName»)!}'''
+ }
+ } else {
+ var etype = dtType.getBasicType((field as LEntityReference).type.primaryKeyAttribute)
+ if (etype == EType.STRING) {
+ return '''«quote»${(«entityName».«field.toName».«(field as LEntityReference).type.primaryKeyAttribute.name»)!}«quote»'''
+ } else {
+ return '''${(«entityName».«field.toName».«(field as LEntityReference).type.primaryKeyAttribute.name»)!}'''
+ }
}
}
@@ -956,7 +1146,7 @@ class DataDSLModelGenerator extends I18NModelGenerator {
var whereList = <String>newArrayList
var qstr = '''x«aliasCnt».«lookup.queryProperty.toName» = :param'''
whereList.add(qstr)
- if (lookup.markerPath != null) {
+ if (lookup.markerPath !== null) {
for(markerEntity:lookup.markerPath.path) {
aliasCnt = aliasCnt + 1
if (markerEntity.markLatest) {
@@ -976,32 +1166,39 @@ class DataDSLModelGenerator extends I18NModelGenerator {
return '''from «select»«IF joinList.size>0» left join «ENDIF»«joinList.join(" left join ")» where «whereList.join(" and ")»'''
}
+ def String queryKeys(DataInterchangeBean bean) {
+ var pCnt = 0
+ var select = '''«bean.entity.toName» x'''
+ var whereList = <String>newArrayList
+ for(key:bean.lookupKeys) {
+ var qstr = '''x.«key.property.toName» = :param«pCnt++»'''
+ whereList.add(qstr)
+ }
+ return '''from «select» where «whereList.join(" and ")»'''
+ }
+
+ def String queryVersion(DataInterchangeBean bean) {
+ var select = '''«bean.entity.toName» x'''
+ var qstr = '''x.«bean.entity.idAttributeName» = :param'''
+ return '''from «select» where «qstr»'''
+ }
+
override createAppendable(EObject context, ImportManager importManager, GeneratorConfig config) {
// required to initialize the needed builder to avoid deprecated methods
builder = context.eResource
// ---------
addImportFor(importManager, _typeReferenceBuilder
, FrameworkUtil
- , SmooksOSGIFactory
- , SmooksFactory
- , Smooks
- , HtmlReportGenerator
- , ExecutionContext
- , JavaResult
- , XMLBinding
+ , IDataInterchange
, StreamSource
- , ByteArrayInputStream
, URL
, URI
, InputStream
, StreamUtils
, MalformedURLException
- , IOException
, FileAlreadyExistsException
, URISyntaxException
, EntityManager
- , PersistenceUtil
- , EntityManagerRegister
, EntityTransaction
, Logger
, LoggerFactory
@@ -1035,7 +1232,6 @@ class DataDSLModelGenerator extends I18NModelGenerator {
, StringReader
, OutputKeys
, StreamResult
- , JavaSource
, CriteriaBuilder
, CriteriaQuery
, Root
@@ -1046,14 +1242,29 @@ class DataDSLModelGenerator extends I18NModelGenerator {
, Files
, StandardOpenOption
, BufferedOutputStream
- , FilterLifecycleEvent
- , ElementPresentEvent
- , EventType
+ , BufferedInputStream
, UI
, Pair
, IEntityImportInitializationListener
, ConstraintViolationException
, ConstraintViolation
+ , EventDispatcherEvent
+ , EventDispatcherCommand
+ , DataInterchangeException
+ , TransformerConfigurationException
+ , SAXException
+ , IOException
+ , UUID
+ , IOException
+ , File
+ , FileInputStream
+ , Properties
+ , ProductConfiguration
+ , PrintWriter
+ , BlobService
+ , BlobTypingAPI
+ , Component
+ , Panel
)
super.createAppendable(context, importManager, config)
}
diff --git a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataResult.java b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataResult.java
new file mode 100644
index 0000000..1d98bf0
--- /dev/null
+++ b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataResult.java
@@ -0,0 +1,20 @@
+package org.eclipse.osbp.xtext.datainterchange.jvmmodel;
+
+import java.io.StringWriter;
+
+import javax.xml.transform.stream.StreamResult;
+
+public class DataResult extends StreamResult {
+ public DataResult() {
+ StringWriter writer = new StringWriter();
+ setWriter(writer);
+ }
+
+ public String getResult() {
+ return getWriter().toString();
+ }
+
+ public String toString() {
+ return getResult();
+ }
+}
diff --git a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/scoping/DataDSLImportSectionNamespaceScopeProvider.java b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/scoping/DataDSLImportSectionNamespaceScopeProvider.java
new file mode 100644
index 0000000..d7daa89
--- /dev/null
+++ b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/scoping/DataDSLImportSectionNamespaceScopeProvider.java
@@ -0,0 +1,8 @@
+package org.eclipse.osbp.xtext.datainterchange.scoping;
+
+import org.eclipse.osbp.xtext.oxtype.scoping.OXDelegatingNamespaceScopeProvider;
+
+public class DataDSLImportSectionNamespaceScopeProvider extends
+ OXDelegatingNamespaceScopeProvider {
+
+}
diff --git a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/scoping/DataDSLScopeProvider.xtend b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/scoping/DataDSLScopeProvider.xtend
index 796c5fb..e4c21bd 100644
--- a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/scoping/DataDSLScopeProvider.xtend
+++ b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/scoping/DataDSLScopeProvider.xtend
@@ -12,35 +12,32 @@
*
*
* This copyright notice shows up in the generated Java code
- *
+ *
*/
-
+
package org.eclipse.osbp.xtext.datainterchange.scoping
+import javax.inject.Inject
+import org.eclipse.emf.ecore.EObject
+import org.eclipse.emf.ecore.EReference
+import org.eclipse.osbp.dsl.entity.xtext.extensions.ModelExtensions
+import org.eclipse.osbp.dsl.semantic.common.types.LAttribute
+import org.eclipse.osbp.dsl.semantic.common.types.LReference
+import org.eclipse.osbp.dsl.semantic.entity.LEntity
+import org.eclipse.osbp.dsl.semantic.entity.LEntityReference
import org.eclipse.osbp.xtext.datainterchange.DataDSLPackage
import org.eclipse.osbp.xtext.datainterchange.DataInterchange
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeBean
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeEntityExpression
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeLookup
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeMarkerEntity
-import javax.inject.Inject
-import org.eclipse.emf.ecore.EObject
-import org.eclipse.emf.ecore.EReference
import org.eclipse.xtext.resource.EObjectDescription
import org.eclipse.xtext.resource.IEObjectDescription
import org.eclipse.xtext.scoping.IScope
import org.eclipse.xtext.scoping.impl.MapBasedScope
-import org.eclipse.xtext.xbase.annotations.typesystem.XbaseWithAnnotationsBatchScopeProvider
-import org.eclipse.osbp.dsl.entity.xtext.extensions.EntityTypesBuilder
-import org.eclipse.osbp.dsl.entity.xtext.extensions.ModelExtensions
-import org.eclipse.osbp.dsl.semantic.common.types.LAttribute
-import org.eclipse.osbp.dsl.semantic.common.types.LReference
-import org.eclipse.osbp.dsl.semantic.entity.LEntity
-import org.eclipse.osbp.dsl.semantic.entity.LEntityReference
-class DataDSLScopeProvider extends XbaseWithAnnotationsBatchScopeProvider {
+class DataDSLScopeProvider extends AbstractDataDSLScopeProvider {
@Inject extension ModelExtensions
- @Inject extension EntityTypesBuilder
override getScope(EObject context, EReference reference) {
if (reference == DataDSLPackage.Literals.DATA_INTERCHANGE_BEAN__LATEST_PROPERTY) {
@@ -49,8 +46,6 @@ class DataDSLScopeProvider extends XbaseWithAnnotationsBatchScopeProvider {
return getScope_Data_Target_property(context, reference, true)
} else if (reference == DataDSLPackage.Literals.DATA_INTERCHANGE_FORMAT__TARGET_PROPERTY) {
return getScope_Data_Target_property(context, reference, true)
- } else if (reference == DataDSLPackage.Literals.DATA_INTERCHANGE_ENTITY_EXPRESSION__ENTITY) {
- return getScope_Data_Expression_entity(context, reference, true)
} else if (reference == DataDSLPackage.Literals.DATA_INTERCHANGE_ENTITY_EXPRESSION__PROPERTY) {
return getScope_Data_Expression_entity(context, reference, false)
} else if (reference == DataDSLPackage.Literals.DATA_INTERCHANGE_LOOKUP__TARGET_PROPERTY) {
@@ -59,42 +54,44 @@ class DataDSLScopeProvider extends XbaseWithAnnotationsBatchScopeProvider {
return getScope_Data_Lookup_queryProperty(context, reference)
} else if (reference == DataDSLPackage.Literals.DATA_INTERCHANGE_LOOKUP__ENTITY) {
return getScope_Data_Lookup_lookupEntity(context, reference)
- } else if (reference == DataDSLPackage.Literals.DATA_INTERCHANGE_BEAN__ENTITY) {
- return getScope_Data_Bean_Entity(context, reference)
} else if (reference == DataDSLPackage.Literals.DATA_INTERCHANGE_MAPPING__PROPERTY) {
return getScope_Data_Target_property(context, reference, true)
} else if (reference == DataDSLPackage.Literals.DATA_INTERCHANGE_MARKER_ENTITY__MARKER_ENTITY) {
return getScope_Data_Markerpath_entity(context, reference)
} else if (reference == DataDSLPackage.Literals.DATA_INTERCHANGE_MARKER_ENTITY__MARKER_PROPERTY) {
return getScope_Data_Entity_latest(context, reference)
+ } else if (reference == DataDSLPackage.Literals.DATA_INTERCHANGE_BEAN__ENTITY) {
+ return getScope_Data_Entity_next(context, reference)
+ } else if (reference == DataDSLPackage.Literals.DATA_INTERCHANGE_KEY__PROPERTY) {
+ return getScope_keyProperty(context, reference)
} else {
super.getScope(context, reference)
}
}
-
+
def getScope_Data_Bean_Entity(EObject context, EReference reference) {
var result = <IEObjectDescription>newArrayList
var eObj = context.eContainer
while (!(eObj instanceof DataInterchange)) {
eObj = eObj.eContainer
}
- if (eObj != null) {
+ if (eObj !== null) {
var DataInterchangeBean previousEntity = null
- for(diEntity:(eObj as DataInterchange).path) {
+ for (diEntity : (eObj as DataInterchange).path) {
// find me
- if (previousEntity != null) {
+ if (previousEntity !== null) {
// remove this entity from result, we already used it
var IEObjectDescription delObj = null
- for(r:result) {
- if (r.name.toString.equals(previousEntity.entity.name)) {
+ for (r : result) {
+ if (r.name.toString.equals(previousEntity.entity.name)) {
delObj = r
}
}
- if(delObj != null) {
+ if (delObj !== null) {
result.remove(delObj)
}
// scoping refers to the previous owner
- for(f:previousEntity.entity.features) {
+ for (f : previousEntity.entity.features) {
if (f instanceof LReference) {
if (f.cascading && f.toMany) {
var ref = (f as LEntityReference)
@@ -105,28 +102,36 @@ class DataDSLScopeProvider extends XbaseWithAnnotationsBatchScopeProvider {
}
if (!diEntity.equals(context)) {
previousEntity = diEntity
- }
- else if (previousEntity == null) {
+ } else if (previousEntity === null) {
return super.getScope(context, reference)
}
}
}
return MapBasedScope.createScope(IScope.NULLSCOPE, result)
}
-
+
+ def getScope_keyProperty(EObject context, EReference reference) {
+ val result = <IEObjectDescription>newArrayList
+ var eObj = context.eContainer
+ while (!(eObj instanceof DataInterchangeBean)) {
+ eObj = eObj.eContainer
+ }
+ if (eObj !== null && eObj instanceof DataInterchangeBean) {
+ var entity = (eObj as DataInterchangeBean).entity
+ entity.allAttributes.forEach[result.add(EObjectDescription.create(it.name, it))]
+ }
+ return MapBasedScope.createScope(IScope.NULLSCOPE, result)
+ }
+
def getScope_Data_Lookup_queryProperty(EObject context, EReference reference) {
- var result = <IEObjectDescription>newArrayList
- if (context != null && context instanceof DataInterchangeLookup) {
+ val result = <IEObjectDescription>newArrayList
+ if (context !== null && context instanceof DataInterchangeLookup) {
var entity = (context as DataInterchangeLookup).entity
- for (prop : entity.features) {
- if (prop instanceof LAttribute && !prop.toMany) {
- result.add(EObjectDescription.create((prop as LAttribute).name, (prop as LAttribute)))
- }
- }
+ entity.allAttributes.forEach[result.add(EObjectDescription.create(it.name, it))]
}
return MapBasedScope.createScope(IScope.NULLSCOPE, result)
}
-
+
def getScope_Data_Lookup_lookupEntity(EObject context, EReference reference) {
var result = <IEObjectDescription>newArrayList
var targetProperty = (context as DataInterchangeLookup).targetProperty
@@ -134,32 +139,34 @@ class DataDSLScopeProvider extends XbaseWithAnnotationsBatchScopeProvider {
while (!(eObj instanceof DataInterchangeBean)) {
eObj = eObj.eContainer
}
- if (eObj != null) {
+ if (eObj !== null) {
var entity = (eObj as DataInterchangeBean).entity
for (prop : entity.features) {
- if (prop instanceof LReference && !prop.toMany && (prop as LReference).name.equals(targetProperty.toName)) {
+ if (prop instanceof LReference && !prop.toMany &&
+ (prop as LReference).name.equals(targetProperty.toName)) {
result.add(EObjectDescription.create((prop as LReference).type.name, (prop as LReference).type))
}
}
}
return MapBasedScope.createScope(IScope.NULLSCOPE, result)
}
-
+
def getScope_Data_Expression_entity(EObject context, EReference reference, boolean filterEntity) {
var result = <IEObjectDescription>newArrayList
var targetProperty = (context as DataInterchangeEntityExpression).targetProperty
+
var fromEntity = (context as DataInterchangeEntityExpression).entity
var eObj = context.eContainer
while (!(eObj instanceof DataInterchange)) {
eObj = eObj.eContainer
}
- if (eObj != null) {
+ if (eObj !== null) {
for (path : (eObj as DataInterchange).path) {
- for (f:path.entity.features) {
- if (f instanceof LAttribute && (f as LAttribute).type.name.equals(targetProperty.type.name)) {
+ for (f : path.entity.features) {
+ if (f instanceof LAttribute && (f as LAttribute).type.name.equals(targetProperty.type?.name)) {
if (filterEntity) {
result.add(EObjectDescription.create(path.entity.toName, path.entity))
- } else if (fromEntity == null || path.entity.toName.equals(fromEntity.toName)) {
+ } else if (fromEntity === null || path.entity.toName.equals(fromEntity.toName)) {
result.add(EObjectDescription.create((f as LAttribute).toName, (f as LAttribute)))
}
}
@@ -168,23 +175,25 @@ class DataDSLScopeProvider extends XbaseWithAnnotationsBatchScopeProvider {
}
return MapBasedScope.createScope(IScope.NULLSCOPE, result)
}
-
+
def getScope_Data_Target_property(EObject context, EReference reference, boolean filterAttributes) {
var result = <IEObjectDescription>newArrayList
var eObj = context.eContainer
while (!(eObj instanceof DataInterchangeBean)) {
eObj = eObj.eContainer
}
- if (eObj != null) {
+ if (eObj !== null) {
var entity = (eObj as DataInterchangeBean).entity
var marker = (eObj as DataInterchangeBean).latestProperty
- for (prop : entity.features) {
- if (filterAttributes) {
- if (prop instanceof LAttribute && (marker==null || !prop.toName.equals(marker.toName))) {
+ if (filterAttributes) {
+ for (prop : entity.allAttributes) {
+ if (marker === null || !prop.toName.equals(marker.toName)) {
result.add(EObjectDescription.create((prop as LAttribute).name, (prop as LAttribute)))
}
- } else {
- if (prop instanceof LReference && !prop.toMany) {
+ }
+ } else {
+ for (prop : entity.allReferences) {
+ if (!prop.toMany) {
result.add(EObjectDescription.create((prop as LReference).name, (prop as LReference)))
}
}
@@ -192,11 +201,11 @@ class DataDSLScopeProvider extends XbaseWithAnnotationsBatchScopeProvider {
}
return MapBasedScope.createScope(IScope.NULLSCOPE, result)
}
-
+
def getScope_Data_Entity_latest(EObject context, EReference reference) {
var result = <IEObjectDescription>newArrayList
var LEntity rootEntity = null
- if (context != null) {
+ if (context !== null) {
switch context {
DataInterchangeBean: {
rootEntity = (context as DataInterchangeBean).entity
@@ -213,18 +222,43 @@ class DataDSLScopeProvider extends XbaseWithAnnotationsBatchScopeProvider {
}
return MapBasedScope.createScope(IScope.NULLSCOPE, result)
}
-
+
+ def getScope_Data_Entity_next(EObject context, EReference reference) {
+ val result = <IEObjectDescription>newArrayList
+ var eObj = context
+ while (!(eObj instanceof DataInterchange)) {
+ eObj = eObj.eContainer
+ }
+ val prevObjects = <DataInterchangeBean>newArrayList
+ val rootObjects = <LEntity>newArrayList
+ for(obj:(eObj as DataInterchange).path) {
+ if(obj.equals(context)) {
+ prevObjects.forall[rootObjects.add(it.entity)]
+ }
+ prevObjects += obj
+ }
+ if(rootObjects.empty) {
+ return super.getScope(context, reference)
+ }
+ rootObjects.forEach[
+ it.allReferences.filter(it|it.toMany && it.cascading && !rootObjects.contains(it.type)).forEach[
+ result.add(EObjectDescription.create(it.type.name, it.type))
+ ]
+ ]
+ return MapBasedScope.createScope(IScope.NULLSCOPE, result)
+ }
+
def getScope_Data_Markerpath_entity(EObject context, EReference reference) {
var result = <IEObjectDescription>newArrayList
var eObj = context.eContainer
while (!(eObj instanceof DataInterchangeLookup)) {
eObj = eObj.eContainer
}
- if (eObj != null) {
+ if (eObj !== null) {
var entity = (eObj as DataInterchangeLookup).entity
- for (f:entity.references) {
+ for (f : entity.references) {
if (!f.toMany) {
- result.add(EObjectDescription.create(f.type.toName , f.type))
+ result.add(EObjectDescription.create(f.type.toName, f.type))
}
}
}
diff --git a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/validation/DataDSLValidator.xtend b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/validation/DataDSLValidator.xtend
index c447636..81194b4 100644
--- a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/validation/DataDSLValidator.xtend
+++ b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/validation/DataDSLValidator.xtend
@@ -1,38 +1,25 @@
-/**
- *
- * Copyright (c) 2011, 2016 - Loetz GmbH&Co.KG (69115 Heidelberg, Germany)
- *
- * All rights reserved. This program and the accompanying materials
- * are made available under the terms of the Eclipse Public License v1.0
- * which accompanies this distribution, and is available at
- * http://www.eclipse.org/legal/epl-v10.html
- *
- * Contributors:
- * Christophe Loetz (Loetz GmbH&Co.KG) - initial implementation
- *
- *
- * This copyright notice shows up in the generated Java code
- *
+/*
+ * generated by Xtext 2.11.0
*/
-
package org.eclipse.osbp.xtext.datainterchange.validation
-//import org.eclipse.xtext.validation.Check
+
/**
- * Custom validation rules.
+ * This class contains custom validation rules.
*
- * see http://www.eclipse.org/Xtext/documentation.html#validation
+ * See https://www.eclipse.org/Xtext/documentation/303_runtime_concepts.html#validation
*/
class DataDSLValidator extends AbstractDataDSLValidator {
-
-// public static val INVALID_NAME = 'invalidName'
+
+// public static val INVALID_NAME = 'invalidName'
//
// @Check
// def checkGreetingStartsWithCapital(Greeting greeting) {
// if (!Character.isUpperCase(greeting.name.charAt(0))) {
// warning('Name should start with a capital',
-// MyDslPackage.Literals.GREETING__NAME,
+// DataDSLPackage.Literals.GREETING__NAME,
// INVALID_NAME)
// }
// }
+
}
diff --git a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/valueconverter/DataDSLQualifiedNameProvider.java b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/valueconverter/DataDSLQualifiedNameProvider.java
index a38751d..60f22c2 100644
--- a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/valueconverter/DataDSLQualifiedNameProvider.java
+++ b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/valueconverter/DataDSLQualifiedNameProvider.java
@@ -23,9 +23,6 @@ import com.google.inject.Inject;
@SuppressWarnings("restriction")
public class DataDSLQualifiedNameProvider extends XbaseQualifiedNameProvider {
- @Inject
- private IQualifiedNameConverter qualifiedNameConverter;
-
@Override
public QualifiedName getFullyQualifiedName(EObject obj) {
if (obj == null) {

Back to the top