背景:我们正在构建一个应用程序MuleSoft,作为要求的一部分,我们必须将大量记录(大约30K)写入csv文件。在此之前,我们需要以XML的形式提取数据,从DB2中提取独立数据。然后我们应用一些转换/映射规则,最后我们将数据写入csv文件并将csv文件FTP。我附加了XML。
问题:仅在处理了大约2500-2600条记录后,该进程暂停。它没有抛出任何错误。它只是停留在那里,它没有做任何事情。我们尝试了类似的选项1.将流量作为骡子批处理流程的一部分。没有观察到差异2.设置最大错误计数= -1,因为我们在博客中找到了这个
如果有人可以提供任何建议,那将非常有帮助。写入文件时记录数量是否有限制?
<?xml version="1.0" encoding="UTF-8"?>
<mule xmlns:batch="http://www.mulesoft.org/schema/mule/batch" xmlns:db="http://www.mulesoft.org/schema/mule/db"
xmlns:file="http://www.mulesoft.org/schema/mule/file"
xmlns:dw="http://www.mulesoft.org/schema/mule/ee/dw" xmlns:metadata="http://www.mulesoft.org/schema/mule/metadata"
xmlns="http://www.mulesoft.org/schema/mule/core" xmlns:doc="http://www.mulesoft.org/schema/mule/documentation"
xmlns:spring="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.mulesoft.org/schema/mule/db http://www.mulesoft.org/schema/mule/db/current/mule-db.xsd
http://www.mulesoft.org/schema/mule/file http://www.mulesoft.org/schema/mule/file/current/mule-file.xsd
http://www.mulesoft.org/schema/mule/ee/dw http://www.mulesoft.org/schema/mule/ee/dw/current/dw.xsd
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-current.xsd
http://www.mulesoft.org/schema/mule/core http://www.mulesoft.org/schema/mule/core/current/mule.xsd
http://www.mulesoft.org/schema/mule/batch http://www.mulesoft.org/schema/mule/batch/current/mule-batch.xsd">
<db:generic-config name="Generic_Database_Configuration1" url="jdbc:db2://faadbcdd0017:60004/MATIUT:user=mat_adm;password=q1w2e3r4;" driverClassName="com.ibm.db2.jcc.DB2Driver" doc:name="Generic Database Configuration"/>
<file:connector name="File" outputPattern="Carfax.csv" writeToDirectory="C:\opt\CCM\Output\IUT" autoDelete="false" outputAppend="true" streaming="true" validateConnections="true" doc:name="File"/>
<file:connector name="File1" outputPattern="sample.txt" readFromDirectory="C:\opt\CCM" autoDelete="true" streaming="true" validateConnections="true" doc:name="File"/>
<batch:job name="batch2Batch">
<batch:input>
<logger message="Startr>>>>>>>>>>>>>>>>>>>>>>>>>>>>>" level="INFO" doc:name="Logger"/>
<foreach doc:name="For Each">
<db:select config-ref="Generic_Database_Configuration1" doc:name="Database">
<db:parameterized-query><![CDATA[select MSG_ID,TEMPL_ID,MSG_DATA,EMAIL_CHNL_IND,PUSH_CHNL_IND, INSERT_TMSP,UID FROM IUT.message_master WHERE INSERT_TMSP between
(CURRENT TIMESTAMP- HOUR (CURRENT TIMESTAMP) HOURS- MINUTE(CURRENT TIMESTAMP) MINUTES- SECOND(CURRENT TIMESTAMP) SECONDS
- MICROSECOND(CURRENT TIMESTAMP) MICROSECONDS) and ((CURRENT TIMESTAMP- HOUR (CURRENT TIMESTAMP) HOURS
- MINUTE(CURRENT TIMESTAMP) MINUTES- SECOND(CURRENT TIMESTAMP) SECONDS- MICROSECOND(CURRENT TIMESTAMP) MICROSECONDS) + 1 DAY)
and SOURCE_SYS='CSS' and ONLINE_BATCH_IND IN('Y','E') AND APPL_PROCESS_IND = 'N' with UR]]></db:parameterized-query>
</db:select>
</foreach>
<logger message="#[payload]" level="INFO" doc:name="Logger"/>
</batch:input>
<batch:process-records>
<batch:step name="Batch_Step">
<component class="com.mule.object.transformer.Mapper" doc:name="Java"/>
<dw:transform-message metadata:id="9bd2e755-065a-4208-95cf-1277f5643ee9" doc:name="Transform Message">
<dw:input-payload mimeType="application/java"/>
<dw:set-payload><![CDATA[%dw 1.0
%output application/csv separator = "|" , header = false , ignoreEmptyLine = true
---
[{
Timestamp: payload.timeStamp,
NotificationType: payload.notificationType,
UID: payload.UID,
Name: payload.messageData.firstName,
MiddleName: payload.messageData.middleName,
LastName: payload.messageData.lastName,
Email: payload.messageData.email,
HHNumber: payload.messageData.cssDataRequest.householdNumber,
PolicyNumber: payload.messageData.cssDataRequest.policyContractNumber,
SentDate: payload.messageData.cssDataRequest.sendDate,
PinNumber: payload.messageData.cssDataRequest.pin,
AOR: payload.messageData.cssDataRequest.agentOfRecord
}]]]></dw:set-payload>
</dw:transform-message>
<file:outbound-endpoint path="C:\opt\CCM\Output\IUT" connector-ref="File" responseTimeout="10000" doc:name="File"/>
</batch:step>
</batch:process-records>
<batch:on-complete>
<logger message="Batch2 Completed" level="INFO" doc:name="Logger"/>
</batch:on-complete>
</batch:job>
</mule>
答案 0 :(得分:0)
尝试使用批处理。在BatchStep内部保留一个BatchCommit,可用于累积批量中的所有记录。并设置此属性streaming =&#34; true&#34;用于批处理提交块。并且您的文件连接器应该在批量提交内。如果这有帮助,请告诉我