Skip to main content
summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
Diffstat (limited to 'org.eclipse.osbp.xtext.datainterchange/src')
-rw-r--r--org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/DataDSL.xtext19
-rw-r--r--org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataDSLJvmModelInferrer.xtend529
-rw-r--r--org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataDSLModelGenerator.xtend124
-rw-r--r--org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/scoping/DataDSLScopeProvider.xtend80
-rw-r--r--org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/validation/DataDSLValidator.xtend19
5 files changed, 616 insertions, 155 deletions
diff --git a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/DataDSL.xtext b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/DataDSL.xtext
index ef4ac0d..9b55c17 100644
--- a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/DataDSL.xtext
+++ b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/DataDSL.xtext
@@ -131,10 +131,17 @@ DataInterchangeExpose:
DataInterchangeExportFilter:
{DataInterchangeExportFilter}
- ('where'
- '{' (attrFilter=AttributeFilter)? (refFilter=ReferenceFilter)? '}' )?
+ ('where' '{' (attrFilter=AttributeFilter)? (refFilter=ReferenceFilter)? '}' )?
+ ('join' join=DataInterchangeJoin)?
('hide' hiddenproperties+=DataInterchangeExportHide*)?;
+DataInterchangeJoin:
+ {DataInterchangeJoin} beanAttr1=DataInterchangeJoinAttr 'with' joinEntity=[entity::LEntity|LFQN] 'on' beanAttr2=DataInterchangeJoinAttr;
+
+DataInterchangeJoinAttr:
+ {DataInterchangeJoinAttr}
+ (refEntity=[entity::LEntityReference])? refProperty=[entity::LEntityAttribute];
+
DataInterchangeFilter:
{DataInterchangeFilter}
(attrFilter=AttributeFilter)? (refFilter=ReferenceFilter)?;
@@ -239,10 +246,14 @@ enum Operator:
notequals = 'notEquals' |
greaterthan = 'greaterThan' |
greaterthanorequalto = 'greatThanOrEqualTo' |
- lessthan = 'lessThen' |
+ lessthan = 'lessThan' |
lessthanorequalto = 'lessThanOrEqualTo' |
isnull = 'isNull' |
- isnotnull = 'isNotNull';
+ isnotnull = 'isNotNull' |
+ isin = 'isIn' |
+ isnotin = 'isNotIn' |
+ islike = 'isLike'
+ ;
enum Junction:
and = 'and'|
diff --git a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataDSLJvmModelInferrer.xtend b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataDSLJvmModelInferrer.xtend
index c7a32d8..be9c011 100644
--- a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataDSLJvmModelInferrer.xtend
+++ b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataDSLJvmModelInferrer.xtend
@@ -58,8 +58,10 @@ import org.eclipse.osbp.xtext.datainterchange.AttributeFilter
import org.eclipse.osbp.xtext.datainterchange.DataInterchange
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeBean
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeBlobMapping
+import org.eclipse.osbp.xtext.datainterchange.DataInterchangeFileCSV
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeFilter
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeGroup
+import org.eclipse.osbp.xtext.datainterchange.DataInterchangeJoinAttr
import org.eclipse.osbp.xtext.datainterchange.DataInterchangePackage
import org.eclipse.osbp.xtext.datainterchange.EntityManagerMode
import org.eclipse.osbp.xtext.datainterchange.ReferenceFilter
@@ -465,8 +467,8 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
@Override
public void buttonClick(ClickEvent event) {
log.debug("pressed «dataInterchange.name» import");
- «getConfigFileURL((dataInterchange.eContainer as DataInterchangeGroup).name)»
- «dataInterchange.getBasicRunConfiguration(false, WorkerThreadRunnable.Direction.IMPORT.name, null)»
+ «dataInterchange.getBasicClassnameLine(false)»
+ «dataInterchange.getBasicRunConfigurationDefaults("import")»
«dataInterchange.defaultVariableName».setName(UUID.randomUUID().toString());
«dataInterchange.defaultVariableName».setEventDispatcher(eventDispatcher);
«dataInterchange.defaultVariableName».setUi(UI.getCurrent());
@@ -492,8 +494,8 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
@Override
public void buttonClick(ClickEvent event) {
log.debug("pressed «dataInterchange.name» export");
- «getConfigFileURL((dataInterchange.eContainer as DataInterchangeGroup).name)»
- «dataInterchange.getBasicRunConfiguration(false, WorkerThreadRunnable.Direction.EXPORT.name, null)»
+ «dataInterchange.getBasicClassnameLine(false)»
+ «dataInterchange.getBasicRunConfigurationDefaults("export")»
«dataInterchange.defaultVariableName».setName(UUID.randomUUID().toString());
«dataInterchange.defaultVariableName».setEventDispatcher(eventDispatcher);
«dataInterchange.defaultVariableName».setUi(UI.getCurrent());
@@ -522,7 +524,9 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
return dataInterchange.name.toFirstLower
}
- def String getBasicRunConfiguration(DataInterchange dataInterchange, boolean fqClass, String direction, DataInterchange baseInterchange) {
+
+
+ def String getBasicClassnameLine(DataInterchange dataInterchange, boolean fqClass) {
var className = ""
if (fqClass) {
className = (dataInterchange.eContainer.eContainer as DataInterchangePackage).name + "." + dataInterchange.name
@@ -530,25 +534,47 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
else {
className = dataInterchange.name
}
- var URL = dataInterchange.produceAppropiateInterchangeURL(null)
-
return
'''
«className» «dataInterchange.getDefaultVariableName» = new «className»();
- «if(baseInterchange === null || (baseInterchange !== null && direction.equals(Direction.EXPORT.name))){
- getConfigFileURL((dataInterchange.eContainer as DataInterchangeGroup).name)
- dataInterchange.getConfigFileURLA(URL, direction)
- }»
+ '''
+ }
+
+
+ def String getBasicRunConfigurationDefaults(DataInterchange dataInterchange, String direction ) {
+
+ return
+ '''
«dataInterchange.getDefaultVariableName».setPersistenceService(persistenceService);
«dataInterchange.getDefaultVariableName».setDataInterchange(dataInterchange);
«dataInterchange.getDefaultVariableName».setEventDispatcher(eventDispatcher);
«dataInterchange.getDefaultVariableName».setBlobService(blobService);
-««« «IF dataInterchange.hasPostFunction && dataInterchange.postFunction.afterImport»postInterchangeExecution(«dataInterchange.postFunction.doExecuteFunction.fullyQualifiedName»);«ENDIF»
«if(direction.equals(Direction.IMPORT.name)){
'''«dataInterchange.getDefaultVariableName».setDeleteFileAfterImport(«dataInterchange.isDeleteFileAfterImport»);'''
'''
}
+
+
+ def String getBasicRunConfiguration(DataInterchange dataInterchange, boolean fqClass, String direction, DataInterchange baseInterchange) {
+ var URL = dataInterchange.produceAppropiateInterchangeURL(null)
+
+ return
+ '''
+ «dataInterchange.getBasicClassnameLine(fqClass)»
+ «getConfigFileURL((dataInterchange.eContainer as DataInterchangeGroup).name)»
+ «if(direction.empty){
+ dataInterchange.getConfigFileURLA(false, URL, null)
+ }
+ else if(baseInterchange === null || (baseInterchange !== null && direction.equals(Direction.EXPORT.name))){
+ dataInterchange.getConfigFileURLA(fqClass, URL, Direction.EXPORT)
+ } else if(direction.equals(Direction.IMPORT.name)){
+ dataInterchange.getConfigFileURLA(fqClass, URL, Direction.IMPORT)
+ }
+ »
+ «dataInterchange.getBasicRunConfigurationDefaults(direction)»
+ '''
+ }
def String getConfigFileURL(String groupname){
return
@@ -565,8 +591,56 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
}
'''
}
-
- def String getConfigFileURLA(DataInterchange dataInterchange, String fileURL, String direction){
+
+ def String getConfigFileURLA(DataInterchange dataInterchange, boolean fqClass, String fileURL, Direction direction){
+
+ var String directionName = ""
+ if (null === direction ) {
+ directionName = "\"+direction.name().toLowerCase()+\""
+ } else {
+ directionName = direction.name.toLowerCase()
+ }
+
+ var String setFileUrl='''setFileURL'''
+ if ( fqClass )
+ setFileUrl='''«dataInterchange.getDefaultVariableName».setFileURL'''
+
+ return
+ '''
+ File file = new File(url);
+ if(file.exists()) {
+ FileInputStream fileInput;
+ try {
+ fileInput = new FileInputStream(file);
+ Properties properties = new Properties();
+ properties.loadFromXML(fileInput);
+ fileInput.close();
+ if(properties.getProperty("«dataInterchange.name»-«directionName»") == null) {
+ «setFileUrl»("«fileURL»");
+ } else {
+ «setFileUrl»(properties.getProperty("«dataInterchange.name»-«directionName»"));
+ }
+ } catch (IOException e) {
+ StringWriter sw = new StringWriter();
+ e.printStackTrace(new PrintWriter(sw));
+ log.error("{}", sw.toString());
+ }
+ } else {
+ «setFileUrl»("«fileURL»");
+ }
+ '''
+ }
+
+
+ def String getConfigFileURLB(DataInterchange dataInterchange, String fileURL, Direction direction){
+
+ var String directionName = ""
+ if (null === direction ) {
+ directionName = "\"+direction.name().toLowerCase()+\""
+ } else {
+ directionName = direction.name.toLowerCase()
+ }
+
return
'''
File file = new File(url);
@@ -577,10 +651,10 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
Properties properties = new Properties();
properties.loadFromXML(fileInput);
fileInput.close();
- if(properties.getProperty("«dataInterchange.name»-«direction.toLowerCase()»") == null) {
+ if(properties.getProperty("«dataInterchange.name»-«directionName»") == null) {
«dataInterchange.getDefaultVariableName».setFileURL("«fileURL»");
} else {
- «dataInterchange.getDefaultVariableName».setFileURL(properties.getProperty("«dataInterchange.name»-«direction.toLowerCase()»"));
+ «dataInterchange.getDefaultVariableName».setFileURL(properties.getProperty("«dataInterchange.name»-«directionName»"));
}
} catch (IOException e) {
StringWriter sw = new StringWriter();
@@ -593,6 +667,7 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
'''
}
+
/**
* <p>build the constructor for each class.</p>
@@ -614,6 +689,7 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
*/
def void toFields(JvmDeclaredType type, DataInterchange dataInterchange) {
var JvmField field = null
+
field = dataInterchange.toField("log", _typeReferenceBuilder.typeRef(Logger)) [setInitializer([ append('''LoggerFactory.getLogger("dataInterchange")''') ])]
field.final = true
field.static = true
@@ -634,6 +710,17 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
type.members += field
field = dataInterchange.toField("smooks", _typeReferenceBuilder.typeRef(Object))
type.members += field
+
+ if(dataInterchange.fileEndpoint instanceof DataInterchangeFileCSV && dataInterchange.path !== null && dataInterchange.path.size > 1){
+ var idx = 0
+ var firstEntity = dataInterchange.path.get(0)
+ for(bean : dataInterchange.path){
+ if(bean !== firstEntity){
+ field = dataInterchange.toField("smooks"+idx++, _typeReferenceBuilder.typeRef(Object))
+ type.members += field
+ }
+ }
+ }
}
/**
@@ -672,7 +759,28 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
* @return code fragment
*/
def String init(DataInterchange dataInterchange) {
- var firstEntity = (dataInterchange.path.iterator.next as DataInterchangeBean)
+ var firstBean = (dataInterchange.path.iterator.next as DataInterchangeBean)
+
+ var multicsv_smook = ''''''
+ var idx = 0
+ if(dataInterchange.fileEndpoint instanceof DataInterchangeFileCSV && dataInterchange.path !== null && dataInterchange.path.size > 1){
+ multicsv_smook = multicsv_smook.concat('''
+ if( "export".equals(direction.toString().toLowerCase() ) ) {
+ ''')
+ for(bean : dataInterchange.path){
+ if(bean !== firstBean){
+ multicsv_smook = multicsv_smook.concat(
+ '''
+ smooks«idx++» = dataInterchange.open(FrameworkUtil.getBundle(getClass()),"«DSLOutputConfigurationProvider.SMOOKS_OUTPUT_DIRECTORY»/«dataInterchange.name»_«bean.entity.name»-"+direction.toString().toLowerCase()+".xml");
+ ''')
+ }
+ }
+ multicsv_smook = multicsv_smook.concat('''
+ }
+ ''')
+ }
+
+ var URL = dataInterchange.produceAppropiateInterchangeURL(null)
var body =
'''
try {
@@ -688,9 +796,10 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
if (log.isDebugEnabled()) log.debug("initializing datainterchange factory");
// get entity manager
if (log.isDebugEnabled()) log.debug("opening entity manager to persist results");
- em = getPersistenceService().getEntityManagerFactory("«firstEntity.entity.persistenceUnit»").createEntityManager();
+ em = getPersistenceService().getEntityManagerFactory("«firstBean.entity.persistenceUnit»").createEntityManager();
if(dataInterchange != null) {
smooks = dataInterchange.open(FrameworkUtil.getBundle(getClass()),"«DSLOutputConfigurationProvider.SMOOKS_OUTPUT_DIRECTORY»/«dataInterchange.name»-"+direction.toString().toLowerCase()+".xml");
+ «multicsv_smook»
dataInterchange.setEventListener(this);
dataInterchange.setEntityManager(smooks, em);
}
@@ -702,6 +811,10 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
dataInterchange.enableReport(smooks, location);
}
«ENDIF»
+ «getConfigFileURL((dataInterchange.eContainer as DataInterchangeGroup).name)»
+ «dataInterchange.getConfigFileURLA(false, URL, null) »
+
+
} catch (TransformerConfigurationException | SAXException | IOException e) {
StringWriter sw = new StringWriter();
e.printStackTrace(new PrintWriter(sw));
@@ -709,47 +822,10 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
return false;
}
if(direction == Direction.EXPORT) {
- int openTry = 0;
- file = null;
- URI uri = null;
- try {
- uri = getFileURL().toURI();
- uri = new URI(uri.toString()+"tmp");
- } catch (URISyntaxException e) {
- StringWriter sw = new StringWriter();
- e.printStackTrace(new PrintWriter(sw));
- log.error("{}", sw.toString());
- return false;
- }
- do {
- try {
- exportPath = Paths.get(uri);
- // find a unique name - similar to given
- file = Files.newOutputStream(exportPath, StandardOpenOption.CREATE_NEW);
- } catch (FileAlreadyExistsException ae) {
- openTry ++;
- try {
- int pos = uri.toString().lastIndexOf('.');
- if(pos == -1) {
- uri = new URI(uri.toString()+openTry);
- } else {
- uri = new URI(uri.toString().substring(0,pos)+openTry+"."+uri.toString().substring(pos+1));
- }
- } catch (URISyntaxException e) {
- StringWriter sw = new StringWriter();
- e.printStackTrace(new PrintWriter(sw));
- log.error("{}", sw.toString());
- return false;
- }
- } catch (IOException e) {
- StringWriter sw = new StringWriter();
- e.printStackTrace(new PrintWriter(sw));
- log.error("{}", sw.toString());
- return false;
- }
- }while(file == null);
- log.debug("export temporary file ["+exportPath+"] created" );
- out = new BufferedOutputStream(file);
+ Object[] data = createExportFileComponents("«firstBean.entity.name»", null, true);
+ exportPath = (Path)data[0];
+ file = (OutputStream)data[1];
+ out = (OutputStream)data[2];
}
return true;
'''
@@ -768,7 +844,6 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
def String performInterchange(DataInterchange dataInterchange) {
var firstEntityBean = (dataInterchange.path.iterator.next as DataInterchangeBean)
var body = '''
- if (log.isDebugEnabled()) log.debug("{} - START - Task execution on [{}] ...", Thread.currentThread().getName().toUpperCase(), getFileURL().getPath().substring(1));
if(!init(getDirection())) {
return;
}
@@ -777,6 +852,7 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
return;
}
try {
+ if (log.isDebugEnabled()) log.debug("{} - START - Task execution on [{}] ...", Thread.currentThread().getName().toUpperCase(), getFileURL().getPath().substring(1));
log.info(getDirection().name()+" - Start of «dataInterchange.name».");
if(getDirection()==WorkerThreadRunnable.Direction.IMPORT) {
'''
@@ -856,22 +932,22 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder();
CriteriaQuery<Long> countQuery = criteriaBuilder.createQuery(Long.class);
Root<«root.entity.fullyQualifiedName»> fr = countQuery.from(«root.entity.fullyQualifiedName».class);
-««« filter for the count query
- «countQuery("fr", dataInterchange)»
+ «countQuery("fr", "countQuery", dataInterchange)»
Long count = em.createQuery(countQuery).getSingleResult();
- CriteriaQuery<«root.entity.fullyQualifiedName»> criteriaQuery = criteriaBuilder.createQuery(«root.entity.fullyQualifiedName».class);
- Root<«root.entity.fullyQualifiedName»> from = criteriaQuery.from(«root.entity.fullyQualifiedName».class);
-««« «dataInterchange.buildJoins» // only needed without explicit cascading
- CriteriaQuery<«root.entity.fullyQualifiedName»> select = criteriaQuery.multiselect(from);
-««« filter for the export query
- «exportQuery("from", dataInterchange)»
- TypedQuery<«root.entity.fullyQualifiedName»> typedQuery = em.createQuery(select);
- List<«root.entity.fullyQualifiedName»> allResults = typedQuery.getResultList();
- if (log.isDebugEnabled()) log.debug("evaluate root entity count");
- setLength(count*«IF dataInterchange.elementSize==0»10«ELSE»«dataInterchange.elementSize»«ENDIF»);
- setAvgElementSize(1);
- if (log.isDebugEnabled()) log.debug("root entity count is "+count.toString());
- if(count > 0) {
+
+ if(count > 0) {
+ CriteriaQuery<«root.entity.fullyQualifiedName»> criteriaQuery = criteriaBuilder.createQuery(«root.entity.fullyQualifiedName».class);
+ Root<«root.entity.fullyQualifiedName»> from = criteriaQuery.from(«root.entity.fullyQualifiedName».class);
+ CriteriaQuery<«root.entity.fullyQualifiedName»> select = criteriaQuery.multiselect(from);
+ «exportQuery("from", "select", dataInterchange)»
+ TypedQuery<«root.entity.fullyQualifiedName»> typedQuery = em.createQuery(select);
+ List<«root.entity.fullyQualifiedName»> allResults = typedQuery.getResultList();
+
+ if (log.isDebugEnabled()) log.debug("evaluate root entity count");
+ setLength(count*«IF dataInterchange.elementSize==0»10«ELSE»«dataInterchange.elementSize»«ENDIF»);
+ setAvgElementSize(1);
+ if (log.isDebugEnabled()) log.debug("root entity count is "+count.toString());
+
StringWriter writer = new StringWriter();
if(dataInterchange != null) {
dataInterchange.exportSource(smooks, allResults, writer);
@@ -879,30 +955,42 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
out.write(writer.toString().getBytes(«IF dataInterchange.fileEndpoint.encoding !== null»"«dataInterchange.fileEndpoint.encoding»"«ENDIF»));
String newname = exportPath.getFileName().toString();
renameFile(exportPath, newname.substring(0, newname.length()-3));
+
+ «IF firstEntityBean.markLatestExport»
+ if(allResults != null && !allResults.isEmpty()){
+ CriteriaQuery cq = criteriaBuilder.createQuery();
+ Root<«root.entity.fullyQualifiedName»> fr1 = cq.from(«root.entity.fullyQualifiedName».class);
+ List<String> ids = em.createQuery(cq.select(fr1.get("«root.entity.idAttributeName»"))«IF dataInterchange.exportFilter !== null»«buildAppropriateFilter("fr1", dataInterchange.exportFilter, null, false, false)»«ENDIF»).getResultList();
+ if (log.isDebugEnabled()) log.debug("mark results as latest export");
+ em.setProperty(QueryHints.PESSIMISTIC_LOCK, PessimisticLock.Lock);
+ em.getTransaction().begin();
+ em.createQuery("update «firstEntityBean.entity.name» set «firstEntityBean.latestExpProperty.name» = 1 where «root.entity.idAttributeName» in :ids").setParameter("ids", ids).executeUpdate();
+ if (log.isDebugEnabled()) log.debug("committing mark export");
+ em.getTransaction().commit();
+ }
+ «ENDIF»
+
+ CriteriaQuery<«root.entity.fullyQualifiedName»> query = criteriaBuilder.createQuery(«root.entity.fullyQualifiedName».class);
+ «exportQuery("from", "query", dataInterchange)»
+ query.select(query.from(«root.entity.fullyQualifiedName».class).get("«getBeanEntityIDAttribut(root.entity)»")); //.distinct(true); bzw. ID Column name
+ TypedQuery<«root.entity.fullyQualifiedName»> idTypedQuery = em.createQuery(query);
+ List<«root.entity.fullyQualifiedName»> all«root.entity.name»Ids = idTypedQuery.getResultList();
+
+ «IF dataInterchange.fileEndpoint instanceof DataInterchangeFileCSV && dataInterchange.path !== null && dataInterchange.path.size >= 2»
+ «dataInterchange.csvMultiFileExport»
+ «ENDIF»
+
+ «IF dataInterchange.hasPostFunction && !dataInterchange.postFunction.afterImport»
+ log.info("post interchange function execution started ... >>> «dataInterchange.postFunction.doExecuteFunction.fullyQualifiedName»");
+ «dataInterchange.postFunction.doExecuteFunction.fullyQualifiedName»(Paths.get(getFileURL().getPath().substring(1)), getFilter(), all«root.entity.name»Ids);
+ log.info("post interchange function execution done ... >>> «dataInterchange.postFunction.doExecuteFunction.fullyQualifiedName»");
+ «ENDIF»
}
else{
deleteFile(exportPath, "EXPORT - ");
log.info("created file ["+exportPath.getFileName()+"] has been deleted for having no data!");
}
- «IF firstEntityBean.markLatestExport»
- if(allResults != null && !allResults.isEmpty()){
- CriteriaQuery cq = criteriaBuilder.createQuery();
- Root<«root.entity.fullyQualifiedName»> fr1 = cq.from(«root.entity.fullyQualifiedName».class);
- List<String> ids = em.createQuery(cq.select(fr1.get("«root.entity.idAttributeName»"))«IF dataInterchange.exportFilter !== null»«buildAppropriateFilter("fr1", dataInterchange.exportFilter, null, false, false)»«ENDIF»).getResultList();
- if (log.isDebugEnabled()) log.debug("mark results as latest export");
- em.setProperty(QueryHints.PESSIMISTIC_LOCK, PessimisticLock.Lock);
- em.getTransaction().begin();
- em.createQuery("update «firstEntityBean.entity.name» set «firstEntityBean.latestExpProperty.name» = 1 where «root.entity.idAttributeName» in :ids").setParameter("ids", ids).executeUpdate();
- if (log.isDebugEnabled()) log.debug("committing mark export");
- em.getTransaction().commit();
- }
- «ENDIF»
log.info("export finished");
- «IF dataInterchange.hasPostFunction && !dataInterchange.postFunction.afterImport»
- log.info("post interchange function execution started ... >>> «dataInterchange.postFunction.doExecuteFunction.fullyQualifiedName»");
- «dataInterchange.postFunction.doExecuteFunction.fullyQualifiedName»(Paths.get(getFileURL().getPath().substring(1)), getFilter());
- log.info("post interchange function execution done ... >>> «dataInterchange.postFunction.doExecuteFunction.fullyQualifiedName»");
- «ENDIF»
}
log.info(getDirection().name()+" of «dataInterchange.name» successfully ended!");
'''
@@ -929,13 +1017,13 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
log.error(getDirection().name()+" Execution of «dataInterchange.name»: failed due to: {}", sw.toString());
setExecutionFailed(true);
log.info("import failed");
- Path filePath = Paths.get(getFileURL().getPath().substring(1));
«IF dataInterchange.hasPostFunction && dataInterchange.postFunction.afterImport»
log.info("post interchange function execution started ... >>> «dataInterchange.postFunction.doExecuteFunction.fullyQualifiedName»");
«dataInterchange.postFunction.doExecuteFunction.fullyQualifiedName»(Paths.get(getFileURL().getPath().substring(1)), getFilter());
log.info("post interchange function execution done ... >>> «dataInterchange.postFunction.doExecuteFunction.fullyQualifiedName»");
«ENDIF»
«IF dataInterchange.isDeleteFileAfterImport»
+ Path filePath = Paths.get(getFileURL().getPath().substring(1));
if(Files.exists(filePath) && getDirection()==WorkerThreadRunnable.Direction.IMPORT) {
renameFile(filePath, "FAILEDIMPORT"+ getFormatter().format(new Date(System.currentTimeMillis())) + filePath.getFileName().toString()+ ".LOCKED");
}
@@ -1633,47 +1721,48 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
/**
* Returns the appropriate count query for the given datainterchange.
*/
- def String countQuery(String rootname, DataInterchange interchange){
+ def String countQuery(String rootname, String queryname, DataInterchange interchange){
if(interchange.hasActionFilter){
return
'''
if(isActionFilterExecutionNeeded()){
- countQuery«IF interchange.hasExportFilter»«buildAppropriateFilter(rootname, if(interchange.hasExportFilter)interchange.exportFilter else null, interchange.actionFilter, false, false)»«ENDIF».select(criteriaBuilder.count(«rootname»));
+ «queryname»«buildAppropriateFilter(rootname, if(interchange.hasExportFilter)interchange.exportFilter else null, interchange.actionFilter, true, false)».select(criteriaBuilder.count(«rootname»));
}else{
- countQuery«IF interchange.hasExportFilter»«buildAppropriateFilter(rootname, if(interchange.hasExportFilter)interchange.exportFilter else null, null, false, false)»«ENDIF».select(criteriaBuilder.count(«rootname»));
+ «queryname»«buildAppropriateFilter(rootname, if(interchange.hasExportFilter)interchange.exportFilter else null, null, false, false)».select(criteriaBuilder.count(«rootname»));
}
'''
}
- return '''countQuery«IF interchange.hasExportFilter»«buildAppropriateFilter(rootname, interchange.exportFilter, null, false, false)»«ENDIF».select(criteriaBuilder.count(«rootname»));'''
+ return '''«queryname»«IF interchange.hasExportFilter»«buildAppropriateFilter(rootname, interchange.exportFilter, null, false, false)»«ENDIF».select(criteriaBuilder.count(«rootname»));'''
}
/**
* Returns the appropriate export query for the given datainterchange bean.
*/
- def String exportQuery(String rootname, DataInterchange interchange){
+ def String exportQuery(String rootname, String queryname, DataInterchange interchange){
if(interchange.hasActionFilter){
var where1 = buildAppropriateFilter(rootname, if(interchange.hasExportFilter) interchange.exportFilter else null, interchange.actionFilter, true, false)
- var log1 = buildAppropriateFilter(rootname, if(interchange.hasExportFilter) interchange.exportFilter else null, interchange.actionFilter, true, true)
+// var log1 = buildAppropriateFilter(rootname, if(interchange.hasExportFilter) interchange.exportFilter else null, interchange.actionFilter, true, true)
var where2 = buildAppropriateFilter(rootname, if(interchange.hasExportFilter) interchange.exportFilter else null, null, false, false)
- var log2 = buildAppropriateFilter(rootname, if(interchange.hasExportFilter) interchange.exportFilter else null, null, false, true)
+// var log2 = buildAppropriateFilter(rootname, if(interchange.hasExportFilter) interchange.exportFilter else null, null, false, true)
return
'''
+ «IF where1 !== null && !where1.empty»
if(isActionFilterExecutionNeeded()){
- «IF where1 !== null && !where1.empty»
- select«where1»;«ENDIF»
- }else{
- «IF where2 !== null && !where2.empty»
- select«where2»;«ENDIF»
- }
+ «queryname»«where1»;
+ }«ENDIF»
+ «IF where2 !== null && !where2.empty»
+ else{
+ «queryname»«where2»;
+ }«ENDIF»
'''
}
if(interchange.hasExportFilter){
var where = buildAppropriateFilter(rootname, interchange.exportFilter, null, false, false)
- var log = buildAppropriateFilter(rootname, interchange.exportFilter, null, false, true)
+// var log = buildAppropriateFilter(rootname, interchange.exportFilter, null, false, true)
if(where !== null && !where.empty){
return
'''
- select«where»;
+ «queryname»«where»;
'''
}
}
@@ -1722,6 +1811,126 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
return results
}
+ def String csvMultiFileExport(DataInterchange dataInterchange){
+ if(dataInterchange !== null){
+ var result = ""
+ val firstBean = dataInterchange.path.get(0)
+ var idx = 0
+ var extract = <String>newArrayList
+ if(dataInterchange.path !== null && dataInterchange.path.size > 1){
+ for(bean : dataInterchange.path){
+ if(bean !== firstBean ){
+ extract.add( dataInterchange.csvCountQueries(firstBean, bean, idx, "####") )
+ idx++
+ }
+ }
+ result = getNextContent(extract)
+
+ }else{
+ result = dataInterchange.csvCountQueries(firstBean, firstBean, idx, "")
+ }
+ return result
+ }
+ return ''''''
+ }
+
+ def String getNextContent( List<String> allcontent){
+ var idx = 0
+ var start = allcontent.get(idx)
+ while( idx+1 < allcontent.size) {
+ start = start.replace("####", ''' «allcontent.get(idx+1)»''')
+ idx++
+ }
+ return start.replace("####", "");
+ }
+
+ def String exportQueryForBean(DataInterchangeBean bean, DataInterchange dataInterchange){
+ var firstBean = dataInterchange.path.get(0)
+ var baseEntityName = '''«IF((firstBean.markLatestExport || firstBean.markLatestImport) && dataInterchange.path.size >=2)»all«dataInterchange.path.get(1).entity.name»Ids«ELSE»all«firstBean.entity.name»Ids«ENDIF»''';
+ var refName = getAppropriateRefName(dataInterchange.path.get(0), bean)
+
+ return
+ '''
+ CriteriaQuery<«bean.entity.fullyQualifiedName»> «bean.entity.name.toFirstLower»Query = criteriaBuilder.createQuery(«bean.entity.fullyQualifiedName».class);
+ Root<«bean.entity.fullyQualifiedName»> «bean.entity.name»Root = «bean.entity.name.toFirstLower»Query.from(«bean.entity.fullyQualifiedName».class);
+ CriteriaQuery<«bean.entity.fullyQualifiedName»> «bean.entity.name.toFirstLower»Select = «bean.entity.name.toFirstLower»Query.multiselect(«bean.entity.name»Root);
+ «IF !refName.empty»«bean.entity.name.toFirstLower»Select.where(«bean.entity.name»Root.get("«refName»").in(«baseEntityName»));«ENDIF»
+
+ TypedQuery<«bean.entity.fullyQualifiedName»> «bean.entity.name.toFirstLower»TypedQuery = em.createQuery(«bean.entity.name.toFirstLower»Select);
+ List<«bean.entity.fullyQualifiedName»> all«bean.entity.name.toFirstUpper»Results = «bean.entity.name.toFirstLower»TypedQuery.getResultList();
+
+ «dataInterchange.beanEntityIdList(bean, bean.entity.name+"Root", bean.entity.name+"idQuery2")»
+ '''
+ }
+
+ def String csvCountQueries(DataInterchange dataInterchange, DataInterchangeBean firstBean, DataInterchangeBean bean, int idx, String subpart){
+ var baseEntityName = '''«IF((firstBean.markLatestExport || firstBean.markLatestImport) && dataInterchange.path.size >=2)»all«dataInterchange.path.get(1).entity.name»Ids«ELSE»all«firstBean.entity.name»Ids«ENDIF»''';
+ var refName = getAppropriateRefName(firstBean, bean)
+
+ return
+ '''
+ CriteriaQuery<Long> countQuery«idx» = criteriaBuilder.createQuery(Long.class);
+ Root<«bean.entity.fullyQualifiedName»> from«idx» = countQuery«idx».from(«bean.entity.fullyQualifiedName».class);
+ Long count«idx» = em.createQuery(countQuery«idx»«IF !refName.empty».where(from«idx».get("«refName»").in(«baseEntityName»))«ENDIF».select(criteriaBuilder.count(from«idx»))).getSingleResult();
+ «dataInterchange.csvExports(firstBean, bean, idx, subpart)»
+ '''
+ }
+
+ /**
+ * Determines from which existing references and possibly defined relation
+ * which appropriate ref_name to provide.
+ */
+ def String getAppropriateRefName(DataInterchangeBean previousBean, DataInterchangeBean bean){
+ //TODO later with explicit selection of which ref we hanlde count1 // region
+ if(previousBean !== null && bean !== null && previousBean !== bean){
+ for(ref : previousBean.entity.allReferences){
+ if(ref.type == bean.entity){
+ return ref.opposite.name
+ }
+ }
+ }
+ return ""
+ }
+
+ def String csvExports(DataInterchange dataInterchange, DataInterchangeBean firstBean, DataInterchangeBean bean, int idx, String subpart){
+ var filename = if(bean.fileName !== null && !bean.fileName.empty) bean.fileName else dataInterchange.getDataInterchangeFileName.replace(".csv", "_"+firstBean.entity.name+"_"+bean.entity.name+".csv")
+ return
+ '''
+ if(count«idx» > 0) {
+ log.info("sub-export for entity «bean.entity.name» started");
+ «exportQueryForBean(bean, dataInterchange)»
+ //create the new file for «bean.entity.name»
+ Object[] data«idx» = createExportFileComponents("«bean.entity.name»", "«filename»" , false);
+ Path exportPath«idx» = (Path) data«idx»[0];
+ OutputStream file«idx» = (OutputStream) data«idx»[1];
+ OutputStream out«idx» = (OutputStream) data«idx»[2];
+ StringWriter writer«idx» = new StringWriter();
+ if(dataInterchange != null) {
+ dataInterchange.setEntityManager(smooks«idx», em);
+ dataInterchange.exportSource(smooks«idx», all«bean.entity.name.toFirstUpper»Results, writer«idx»);
+ }
+ if(out«idx» != null && file«idx» != null){
+ out«idx».write(writer«idx».toString().getBytes());
+ String newname«idx» = exportPath«idx».getFileName().toString();
+ renameFile(exportPath«idx», newname«idx».substring(0, newname«idx».length()-3));
+ }
+ if(out«idx» != null){
+ out«idx».close();
+ }
+ if(file«idx» != null){
+ file«idx».close();
+ }
+ if(writer«idx» != null){
+ writer«idx».close();
+ }
+ «subpart»
+ }
+ else{
+ log.info("No data to export for «bean.entity.name». No sub-export for entity «bean.entity.name» executed!");
+ }
+ '''
+ }
+
def List<WorkerThreadRunnable.Parameter> getRefAttributes(ReferenceFilter filter){
var results = <WorkerThreadRunnable.Parameter>newArrayList
if(filter !== null && filter.refEntity !== null){
@@ -1735,7 +1944,6 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
results.add(new WorkerThreadRunnable.Parameter(filter.refEntity.name, filter.value))
}
}
-
if(filter.subCondition !== null){
results.addAll(filter.subCondition.refAttributes)
}
@@ -1756,4 +1964,105 @@ class DataDSLJvmModelInferrer extends AbstractModelInferrer {
}
return false
}
+
+ def String getBeanEntityIDAttribut(LEntity entity){
+ if(entity !== null){
+ for(refProperty : entity.allFeatures){
+ if(refProperty.UUID || refProperty.id) {
+ return refProperty.name
+ }
+ }
+ return "id" //as defaultParam
+ }
+ return null
+ }
+
+ def String beanEntityIdList(DataInterchange dataInterchange, DataInterchangeBean bean, String rootname, String queryname){
+ var firstBean = dataInterchange.path.get(0)
+ var preBean = dataInterchange.previousBean(bean)
+ var refName = getAppropriateRefName(preBean, bean)
+ var baseEntityIdList = '''«IF preBean !== null»all«IF preBean === firstBean && ((firstBean.markLatestExport || firstBean.markLatestImport) && dataInterchange.path.size >=2)»«dataInterchange.path.get(1).entity.name»«ELSE»«preBean.entity.name»«ENDIF»«IF preBean === firstBean»Ids«ELSE»ids«ENDIF»«ENDIF»'''
+
+ return
+ '''
+ CriteriaQuery<«bean.entity.fullyQualifiedName»> «queryname» = criteriaBuilder.createQuery(«bean.entity.fullyQualifiedName».class);
+ «queryname»«IF !refName.empty».where(«rootname».get("«refName»").in(«baseEntityIdList»))«ENDIF».select(«queryname».from(«bean.entity.fullyQualifiedName».class).get("«getBeanEntityIDAttribut(bean.entity)»")); //.distinct(true); bzw. ID Column name
+ TypedQuery<«bean.entity.fullyQualifiedName»> idTypedQuery«queryname» = em.createQuery(«queryname»);
+ List<«bean.entity.fullyQualifiedName»> all«bean.entity.name»ids = idTypedQuery«queryname».getResultList();
+ '''
+ }
+
+ def DataInterchangeBean previousBean(DataInterchange dataInterchange, DataInterchangeBean bean){
+ if(dataInterchange !== null && dataInterchange.path !== null && !dataInterchange.path.empty && bean !== null){
+ var firstBean = dataInterchange.path.get(0)
+
+ if(firstBean === bean){
+ // only for the special cases of markLatestImport or markLatestExport
+ if((firstBean.markLatestExport || firstBean.markLatestImport) && dataInterchange.path.size >=2){
+ return dataInterchange.path.get(1);
+ }
+ return null
+ }
+ for(b : dataInterchange.path){
+ if(bean === b){
+ var idx = dataInterchange.path.indexOf(b)
+ if(idx !== -1 && idx-1 >= 0){
+ return dataInterchange.path.get(idx-1)
+ }
+ }
+ }
+ }
+ return null
+ }
+
+ def String generateJoin(DataInterchange dataInterchange){
+ if(dataInterchange.hasExportJoin){
+ var baseEntity = dataInterchange.path.get(0).entity
+ var joinEntity = dataInterchange.exportFilter.join.joinEntity
+ var attr1 = joinAttributeName(dataInterchange.exportFilter.join.beanAttr1, true)
+ var attr2 = joinAttributeName(dataInterchange.exportFilter.join.beanAttr2, false)
+
+// return
+// '''
+// CriteriaQuery<«bean.entity.fullyQualifiedName»> «bean.entity.name.toFirstLower»Query = criteriaBuilder.createQuery(«bean.entity.fullyQualifiedName».class);
+// Metamodel metamodel = em.getMetamodel();
+// EntityType<«bean.entity.fullyQualifiedName»> «bean.entity.name»EType = metamodel.entity(«bean.entity.fullyQualifiedName».class);
+//
+// Root<«bean.entity.fullyQualifiedName»> «bean.entity.name»Root = «bean.entity.name.toFirstLower»Query.from(«bean.entity.fullyQualifiedName».class);
+//
+// Join<«bean.entity.fullyQualifiedName», «bean1.entity.fullyQualifiedName»> «bean.entity.name»Join = criteriaBuilder.join(«bean.entity.name»EType.«joinattr»);
+//
+// CriteriaQuery<«bean.entity.fullyQualifiedName»> «bean.entity.name.toFirstLower»Select = «bean.entity.name.toFirstLower»Query.multiselect(«bean.entity.name»Root);
+//
+// TypedQuery<«bean.entity.fullyQualifiedName»> «bean.entity.name.toFirstLower»TypedQuery = em.createQuery(«bean.entity.name.toFirstLower»Select);
+// List<«bean.entity.fullyQualifiedName»> all«bean.entity.name.toFirstUpper»Results = «bean.entity.name.toFirstLower»TypedQuery.getResultList();
+// '''
+ return
+ '''
+ CriteriaQuery<«baseEntity.fullyQualifiedName»> beanCriteriaQuery = criteriaBuilder.createQuery(«baseEntity.fullyQualifiedName».class);
+ Root<«baseEntity.fullyQualifiedName»> from = beanCriteriaQuery.from(«baseEntity.fullyQualifiedName».class);
+ ««« CriteriaQuery<«bean1.entity.fullyQualifiedName»> beanSelect = criteriaQuery.multiselect(from);
+ ««« «exportQuery("from", "select", dataInterchange)»
+ ««« TypedQuery<«bean1.entity.fullyQualifiedName»> beanTypedQuery = em.createQuery("SELECT attr1 FROM Region bean1, StateProvince bean2 WHERE bean2.attr2 = attr1", «bean1.entity.fullyQualifiedName».class);
+ TypedQuery<«baseEntity.fullyQualifiedName»> beanTypedQuery = em.createQuery("SELECT «attr1» FROM «baseEntity.fullyQualifiedName» bean1, «joinEntity.fullyQualifiedName» bean2 WHERE bean2.«attr2» = «attr1»", «baseEntity.fullyQualifiedName».class);
+ List<«baseEntity.fullyQualifiedName»> allResults = beanTypedQuery.getResultList();
+
+ '''
+ }
+ return ''''''
+ }
+
+ def boolean hasExportJoin(DataInterchange dataInterchange){
+ if( dataInterchange.hasExportFilter && dataInterchange.exportFilter.join !== null && dataInterchange.exportFilter.join.beanAttr1 !== null
+ && dataInterchange.exportFilter.join.joinEntity !== null && dataInterchange.exportFilter.join.beanAttr2 !== null){
+ return true
+ }
+ return false
+ }
+
+ def String joinAttributeName(DataInterchangeJoinAttr attr, boolean baseAttr){
+ //TODO add logic here
+ return null
+ }
+
}
diff --git a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataDSLModelGenerator.xtend b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataDSLModelGenerator.xtend
index 48a0437..a33c674 100644
--- a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataDSLModelGenerator.xtend
+++ b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/jvmmodel/DataDSLModelGenerator.xtend
@@ -249,9 +249,10 @@ class DataDSLModelGenerator extends I18NModelGenerator {
if(it.fileEndpoint instanceof DataInterchangeFileCSV && it.path !== null && it.path.size > 1){
var firstBean = it.path.get(0)
for(bean : it.path){
- if(bean !== firstBean && bean.fileName !== null && !bean.fileName.empty){
- properties.put('''«it.name»_«bean.entity.name»-«WorkerThreadRunnable.Direction.EXPORT.toString().toLowerCase()»'''.toString, it.produceAppropiateInterchangeURL(bean.fileName))
- properties.put('''«it.name»_«bean.entity.name»-«WorkerThreadRunnable.Direction.IMPORT.toString().toLowerCase()»'''.toString, it.produceAppropiateInterchangeURL(bean.fileName))
+ if(bean !== firstBean){
+ var dataInterChangeFileName = if(bean.fileName !== null && !bean.fileName.empty) bean.fileName else it.getDataInterchangeFileName.replace(".csv", "_"+firstBean.entity.name+"_"+bean.entity.name+".csv")
+ properties.put('''«it.name»_«bean.entity.name»-«WorkerThreadRunnable.Direction.EXPORT.toString().toLowerCase()»'''.toString, it.produceAppropiateInterchangeURL(dataInterChangeFileName))
+ properties.put('''«it.name»_«bean.entity.name»-«WorkerThreadRunnable.Direction.IMPORT.toString().toLowerCase()»'''.toString, it.produceAppropiateInterchangeURL(dataInterChangeFileName))
}
}
}
@@ -271,9 +272,10 @@ class DataDSLModelGenerator extends I18NModelGenerator {
if(it.fileEndpoint instanceof DataInterchangeFileCSV && it.path !== null && it.path.size > 1){
var firstBean = it.path.get(0)
for(bean : it.path){
- if(bean !== firstBean && bean.fileName !== null && !bean.fileName.empty){
- properties.put('''«it.name»_«bean.entity.name»-«WorkerThreadRunnable.Direction.EXPORT.toString().toLowerCase()»'''.toString, it.produceAppropiateInterchangeURL(bean.fileName))
- properties.put('''«it.name»_«bean.entity.name»-«WorkerThreadRunnable.Direction.IMPORT.toString().toLowerCase()»'''.toString, it.produceAppropiateInterchangeURL(bean.fileName))
+ var dataInterChangeFileName = if(bean.fileName !== null && !bean.fileName.empty) bean.fileName else it.getDataInterchangeFileName.replace(".csv", "_"+firstBean.entity.name+"_"+bean.entity.name+".csv")
+ if(bean !== firstBean){
+ properties.put('''«it.name»_«bean.entity.name»-«WorkerThreadRunnable.Direction.EXPORT.toString().toLowerCase()»'''.toString, it.produceAppropiateInterchangeURL(dataInterChangeFileName))
+ properties.put('''«it.name»_«bean.entity.name»-«WorkerThreadRunnable.Direction.IMPORT.toString().toLowerCase()»'''.toString, it.produceAppropiateInterchangeURL(dataInterChangeFileName))
}
}
}
@@ -397,10 +399,8 @@ class DataDSLModelGenerator extends I18NModelGenerator {
}
}
- def void generateExportConfigStub(IFileSystemAccess fsa, DataInterchange dataInterchange) {
- var body = ""
- dbf.namespaceAware = true
- var document = domImpl.createDocument("http://www.milyn.org/xsd/smooks-1.1.xsd", "smooks-resource-list", null)
+ def Document createEmptyDocumentForExport(){
+ var document = domImpl.createDocument("http://www.milyn.org/xsd/smooks-1.1.xsd", "smooks-resource-list", null)
var config = document.createElement("resource-config")
var selector = document.createAttribute("selector")
@@ -414,9 +414,18 @@ class DataDSLModelGenerator extends I18NModelGenerator {
pEl.appendChild(value)
config.appendChild(pEl)
document.documentElement.appendChild(config)
+
+ return document
+ }
+
+ def void generateExportConfigStub(IFileSystemAccess fsa, DataInterchange dataInterchange) {
+ dbf.namespaceAware = true
+ var Document document = null
+ var csvDocs = <String,Document>newHashMap()
var cartridges = <String,String>newHashMap()
- cartridges.put("xmlns:jb", "http://www.milyn.org/xsd/smooks/javabean-1.4.xsd")
+ cartridges.put("xmlns:jb", "http://www.milyn.org/xsd/smooks/javabean-1.4.xsd")
+
switch (dataInterchange.fileEndpoint) {
DataInterchangeFileCSV: {
var delimiter = ""
@@ -429,37 +438,91 @@ class DataDSLModelGenerator extends I18NModelGenerator {
if(csv.quoteCharacter !== null) {
quote = StringEscapeUtils.unescapeHtml(csv.quoteCharacter)
}
- dataInterchange.generateExportConfig(document, dataInterchange.fileEndpoint, delimiter, quote)
+
+ if(dataInterchange.path !== null && dataInterchange.path.size >= 1){
+ dataInterchange.generateCSVExportConfig(csvDocs, dataInterchange.fileEndpoint, delimiter, quote)
+ }
+
}
DataInterchangeFileXML: {
+ if(document === null){document = createEmptyDocumentForExport}
cartridges.put("xmlns:ftl", "http://www.milyn.org/xsd/smooks/freemarker-1.1.xsd")
dataInterchange.generateExportConfig(document, dataInterchange.fileEndpoint, null, null)
}
DataInterchangeFileEDI: {
+ if(document === null){document = createEmptyDocumentForExport}
}
DataInterchangeFileFixed: {
+ if(document === null){document = createEmptyDocumentForExport}
cartridges.put("xmlns:ftl", "http://www.milyn.org/xsd/smooks/freemarker-1.1.xsd")
dataInterchange.generateExportConfig(document, dataInterchange.fileEndpoint, null, null)
}
}
- for(cdg:cartridges.keySet) {
- document.documentElement.setAttributeNS("http://www.w3.org/2000/xmlns/", cdg, cartridges.get(cdg))
+
+ if(dataInterchange.fileEndpoint instanceof DataInterchangeFileCSV){
+ var firstDoc = csvDocs.values.get(0)
+ for(entityName : csvDocs.keySet){
+ var doc = csvDocs.get(entityName)
+
+ for(cdg:cartridges.keySet) {
+ doc.documentElement.setAttributeNS("http://www.w3.org/2000/xmlns/", cdg, cartridges.get(cdg))
+ }
+
+ var source = new DOMSource(doc)
+ var res = new DataResult()
+ transformer.transform(source, res)
+ var newBody = res.result
+
+ if(doc === firstDoc){
+ fsa.generateFile('''«dataInterchange.name»-«WorkerThreadRunnable.Direction.EXPORT.toString().toLowerCase()».xml''', DSLOutputConfigurationProvider.SMOOKS_OUTPUT_ONCE, newBody)
+ }
+ else{
+ fsa.generateFile('''«dataInterchange.name»_«entityName»-«WorkerThreadRunnable.Direction.EXPORT.toString().toLowerCase()».xml''', DSLOutputConfigurationProvider.SMOOKS_OUTPUT_ONCE, newBody)
+ }
+ }
+
+ }else{
+
+ for(cdg:cartridges.keySet) {
+ document.documentElement.setAttributeNS("http://www.w3.org/2000/xmlns/", cdg, cartridges.get(cdg))
+ }
+ var source = new DOMSource(document)
+ var res = new DataResult()
+ transformer.transform(source, res)
+ var body = res.result
+
+ fsa.generateFile('''«dataInterchange.name»-«WorkerThreadRunnable.Direction.EXPORT.toString().toLowerCase()».xml''', DSLOutputConfigurationProvider.SMOOKS_OUTPUT_ONCE, body)
}
- var source = new DOMSource(document)
- var res = new DataResult()
- transformer.transform(source, res)
- body = res.result
- fsa.generateFile('''«dataInterchange.name»-«WorkerThreadRunnable.Direction.EXPORT.toString().toLowerCase()».xml''', DSLOutputConfigurationProvider.SMOOKS_OUTPUT_ONCE, body)
+ }
+
+ def generateCSVExportConfig(DataInterchange dataInterchange, HashMap<String, Document> csvDocs, DataInterchangeFile endPoint, String delimiter, String quote) {
+
+ for(bean : dataInterchange.path) {
+ var vectorMap = <String,String>newHashMap
+ var fieldList = <LFeature>newArrayList
+ var String rootEntityName = null
+ var entityName = (bean.entity as LAnnotationTarget).toName.toString
+
+ if (rootEntityName === null && !bean.isMarkLatestImport) {
+ rootEntityName = (bean.entity as LAnnotationTarget).toName.toString
+ }
+ var doc = createEmptyDocumentForExport
+// dataInterchange.generateExportConfig(doc, dataInterchange.fileEndpoint, delimiter, quote)
- if(dataInterchange.fileEndpoint instanceof DataInterchangeFileCSV && dataInterchange.path !== null && dataInterchange.path.size > 1){
- var firstBean = dataInterchange.path.get(0)
- for(bean : dataInterchange.path){
- if(bean !== firstBean && bean.fileName !== null && !bean.fileName.empty){
- fsa.generateFile('''«dataInterchange.name»_«bean.entity.name»-«WorkerThreadRunnable.Direction.EXPORT.toString().toLowerCase()».xml''', DSLOutputConfigurationProvider.SMOOKS_OUTPUT_ONCE, body)
- }
+ //TODO for all entity bean the field list must be created individually
+ for (f : bean.entity.allFeatures) {
+ createExportAttribute(dataInterchange, bean, f, fieldList, vectorMap)
+ if(f instanceof LEntityAttribute && f.type instanceof LBean) {
+ var beanAttr = (f as LEntityAttribute).type as LBean
+ for(ba : beanAttr.allFeatures) {
+ createExportAttribute(dataInterchange, bean, ba, fieldList, vectorMap)
+ }
+ }
}
- }
+ createFreemarker(doc, createCsvTemplate(entityName, fieldList, delimiter, quote, dataInterchange.path), endPoint)
+ csvDocs.put(entityName, doc)
+ }
}
def void generateImportConfigStub(IFileSystemAccess fsa, DataInterchange dataInterchange) {
@@ -561,15 +624,6 @@ class DataDSLModelGenerator extends I18NModelGenerator {
transformer.transform(source, res)
body = res.result
fsa.generateFile('''«dataInterchange.name»-«WorkerThreadRunnable.Direction.IMPORT.toString().toLowerCase()».xml''', DSLOutputConfigurationProvider.SMOOKS_OUTPUT_ONCE, body)
-
- if(dataInterchange.fileEndpoint instanceof DataInterchangeFileCSV && dataInterchange.path !== null && dataInterchange.path.size > 1){
- var firstBean = dataInterchange.path.get(0)
- for(bean : dataInterchange.path){
- if(bean !== firstBean && bean.fileName !== null && !bean.fileName.empty){
- fsa.generateFile('''«dataInterchange.name»_«bean.entity.name»-«WorkerThreadRunnable.Direction.IMPORT.toString().toLowerCase()».xml''', DSLOutputConfigurationProvider.SMOOKS_OUTPUT_ONCE, body)
- }
- }
- }
}
def String makePath(String fileURL, String location) {
diff --git a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/scoping/DataDSLScopeProvider.xtend b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/scoping/DataDSLScopeProvider.xtend
index f88721d..16e39e0 100644
--- a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/scoping/DataDSLScopeProvider.xtend
+++ b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/scoping/DataDSLScopeProvider.xtend
@@ -37,6 +37,7 @@ import org.eclipse.osbp.xtext.datainterchange.DataInterchangeBean
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeEntityExpression
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeExportFilter
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeExpose
+import org.eclipse.osbp.xtext.datainterchange.DataInterchangeJoin
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeLookup
import org.eclipse.osbp.xtext.datainterchange.DataInterchangeMarkerEntity
import org.eclipse.osbp.xtext.datainterchange.EntityManagerMode
@@ -101,7 +102,15 @@ class DataDSLScopeProvider extends AbstractDataDSLScopeProvider {
return getScope_keyProperty(context, reference)
} else if (reference == DataDSLPackage.Literals.DATA_INTERCHANGE_BEAN__REF_DATA_SOURCE) {
return getScope_Data_Bean_Entity_ReferenceAttribut(context, reference)
- } else {
+ } else if (reference == DataDSLPackage.Literals.DATA_INTERCHANGE_JOIN_ATTR__REF_PROPERTY) {
+ return getJoinAttributes(context, reference)
+ } else if (reference == DataDSLPackage.Literals.DATA_INTERCHANGE_JOIN_ATTR__REF_ENTITY) {
+ return getJoinRefAttributes(context, reference)
+ } else if (reference == DataDSLPackage.Literals.DATA_INTERCHANGE_JOIN__JOIN_ENTITY) {
+ return getJoinEntities(context, reference)
+ }
+
+ else {
super.getScope(context, reference)
}
}
@@ -517,5 +526,72 @@ class DataDSLScopeProvider extends AbstractDataDSLScopeProvider {
}
return result
}
-
+
+ def getJoinEntities(EObject context, EReference reference) {
+ var result = <IEObjectDescription>newArrayList
+ var eObj = context.eContainer
+ while (!(eObj instanceof DataInterchange)) {
+ eObj = eObj.eContainer
+ }
+ if (eObj !== null) {
+ var interchange = (eObj as DataInterchange)
+ if (interchange !== null) {
+ for (bean : interchange.path) {
+ result.add(EObjectDescription.create(bean.entity.name, bean.entity))
+ }
+ }
+ return MapBasedScope.createScope(IScope.NULLSCOPE, result)
+ }
+ }
+
+ def getJoinAttributes(EObject context, EReference reference) {
+ var result = <IEObjectDescription>newArrayList
+ var eObj = context.eContainer
+ var LEntity rootentity = null
+
+ if(context instanceof DataInterchangeJoin){
+ rootentity = context.joinEntity
+ }
+
+ while (!(eObj instanceof DataInterchange)) {
+ eObj = eObj.eContainer
+ }
+ if (eObj !== null) {
+ if (rootentity === null) {
+ rootentity = (eObj as DataInterchange).path.get(0).entity
+ }
+ for (prop : rootentity.allFeatures) {
+ if (prop instanceof LEntityAttribute) {
+ result.add(EObjectDescription.create((prop as LEntityFeature).name, (prop as LAttribute)))
+ }
+ }
+ return MapBasedScope.createScope(IScope.NULLSCOPE, result)
+ }
+ }
+
+ def getJoinRefAttributes(EObject context, EReference reference) {
+ var result = <IEObjectDescription>newArrayList
+ var eObj = context.eContainer
+ var LEntity rootentity = null
+
+ if(context instanceof DataInterchangeJoin){
+ rootentity = context.joinEntity
+ }
+
+ while (!(eObj instanceof DataInterchange)) {
+ eObj = eObj.eContainer
+ }
+ if (eObj !== null) {
+ if (rootentity === null) {
+ rootentity = (eObj as DataInterchange).path.get(0).entity
+ }
+ for (prop : rootentity.allFeatures) {
+ if (prop instanceof LEntityReference && !prop.toMany) {
+ result.add(EObjectDescription.create((prop as LEntityFeature).name, (prop as LReference)))
+ }
+ }
+ return MapBasedScope.createScope(IScope.NULLSCOPE, result)
+ }
+ }
+
}
diff --git a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/validation/DataDSLValidator.xtend b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/validation/DataDSLValidator.xtend
index 816e8f4..d9df899 100644
--- a/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/validation/DataDSLValidator.xtend
+++ b/org.eclipse.osbp.xtext.datainterchange/src/org/eclipse/osbp/xtext/datainterchange/validation/DataDSLValidator.xtend
@@ -41,10 +41,12 @@ class DataDSLValidator extends AbstractDataDSLValidator {
def checkPostFunctionInterchange(DataInterchangePostFunction post){
if(post !== null && post.doExecuteFunction !== null && post.doExecuteFunction.params !== null){
if(!post.doExecuteFunction.params.empty){
- if(post.doExecuteFunction.params.size >=2){
+ if(post.doExecuteFunction.params.size >=3){
var type1 = post.doExecuteFunction.params.get(0).parameterType.type.qualifiedName
var params = post.doExecuteFunction.params.get(1)
var type2 = params.parameterType.type.qualifiedName
+ var params1 = post.doExecuteFunction.params.get(2)
+ var type3 = params1.parameterType.type.qualifiedName
if(!type1.equals("java.nio.file.Path")){
error("The first parameter of the chosen post function must be from type java.nio.file.Path", post, DataDSLPackage.Literals.DATA_INTERCHANGE_POST_FUNCTION__DO_EXECUTE_FUNCTION)
@@ -53,8 +55,12 @@ class DataDSLValidator extends AbstractDataDSLValidator {
error("The second parameter of the chosen post function must be from type HashMap<String,Parameter>,
\nwith Parameter from org.eclipse.osbp.xtext.datainterchange.common.WorkerThreadRunnable.Parameter .",
post, DataDSLPackage.Literals.DATA_INTERCHANGE_POST_FUNCTION__DO_EXECUTE_FUNCTION)
+ }
+ if(!type3.equals("java.util.List")){
+ error("The third parameter of the chosen post function must be from type java.util.List",
+ post, DataDSLPackage.Literals.DATA_INTERCHANGE_POST_FUNCTION__DO_EXECUTE_FUNCTION)
}else{
- // check the first 2 parameters of the function
+ // check the 2 type of the HashMap, the second parameter of the post fucntion
var param0 = (params.parameterType.eContents.get(0) as JvmParameterizedTypeReference).qualifiedName // String
var param1 = (params.parameterType.eContents.get(1) as JvmParameterizedTypeReference).qualifiedName // org.eclipse.osbp.xtext.datainterchange.common.WorkerThreadRunnable.Parameter
if(!param0.equals("java.lang.String")){
@@ -68,13 +74,13 @@ class DataDSLValidator extends AbstractDataDSLValidator {
}
}
else{
- error("The chosen post function must have at least two parameters from types java.nio.file.Path and HashMap<String,Parameter> in that order!
+ error("The chosen post function must have at least tree parameters from types java.nio.file.Path, HashMap<String,Parameter> and java.util.List in that order!
\nWith Parameter from org.eclipse.osbp.xtext.datainterchange.common.WorkerThreadRunnable.Parameter.",
post, DataDSLPackage.Literals.DATA_INTERCHANGE_POST_FUNCTION__DO_EXECUTE_FUNCTION)
}
}
else{
- error("The chosen post function must have at least two parameters from types java.nio.file.Path and HashMap<String,Parameter> in that order!
+ error("The chosen post function must have at least two parameters from types java.nio.file.Path, HashMap<String,Parameter> and java.util.List in that order!
\nWith Parameter from org.eclipse.osbp.xtext.datainterchange.common.WorkerThreadRunnable.Parameter.",
post, DataDSLPackage.Literals.DATA_INTERCHANGE_POST_FUNCTION__DO_EXECUTE_FUNCTION )
}
@@ -273,4 +279,9 @@ class DataDSLValidator extends AbstractDataDSLValidator {
}
}
+// @Check
+// def void checkDateValue(DataInterchange interchange) {
+// // check Date value
+// }
+
}

Back to the top