2
votes

There seem to be a lot of questions around this subject, but I can't seem to find the answer. Sorry if this is a dupe!

I have a Job that has many steps...at one point I want to run the item processing (read/validate/write) of multiple files in parallel. Before making this code parallel, I ran it synchronously and everything was fine. When I add the batch:split using task-executor, I now get the error:

2020-09-01 01:07:12,668 [SimpleAsyncTaskExecutor-1] ERROR AbstractStep - Encountered an error executing step loadBaseFareInfo in job ATPRuleCombined org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'scopedTarget.allAddsCleanupProcessorForBaseFareInfo': Scope 'job' is not active for the current thread; consider defining a scoped proxy for this bean if you intend to refer to it from a singleton; nested exception is java.lang.IllegalStateException: No context holder available for job scope

Here is the relevant code:

        <batch:split id="loadMainTables" next="saveCacheNotification" task-executor="taskExecutor">
        <batch:flow>
            <batch:step id="loadBaseFareInfo">
                <batch:tasklet>
                    <batch:chunk reader="baseFareInfoGroupReader" processor="baseFareInfoGroupValidator"  writer="baseFareInfoGroupWriter"
                                 chunk-completion-policy="baseFareInfoCompletionPolicy">
                        <!--commit-interval="${baseFareInfo.commitInterval}"> -->
                        <!-- skip-policy="alwaysSkipPolicy"> -->
                        <batch:listeners>
                            <batch:listener ref="errorDataFileItemReadListener"/>
                            <batch:listener ref="springContextRepo"/>
                            <batch:listener ref="allAddsCleanupProcessorForBaseFareInfo"/>
                            <batch:listener ref="baseFareInfo.groupMetricListener"/>
                            <batch:listener ref="baseFareInfoGroupWriter"/>
                            <batch:listener ref="baseFareInfoGroupReader"/>
                            <batch:listener ref="baseFareInfoStepLocking"/>
                        </batch:listeners>
                        <batch:streams>
                            <batch:stream ref="baseFareInfoItemReader"/>
                        </batch:streams>
                    </batch:chunk>
                </batch:tasklet>
            </batch:step>
        </batch:flow>
        <batch:flow>
            <batch:step id="loadNegFareSecurity">
                <batch:tasklet>
                    <batch:chunk reader="negFareSecurityGroupReader" processor="negFareSecurityGroupValidator"  writer="negFareSecurityGroupWriter"
                                 chunk-completion-policy="negFareSecurityCompletionPolicy">
                        <!--commit-interval="${negFareSecurity.commitInterval}"> -->
                        <!-- skip-policy="alwaysSkipPolicy"> -->
                        <batch:listeners>
                            <batch:listener ref="errorDataFileItemReadListener"/>
                            <batch:listener ref="springContextRepo"/>
                            <batch:listener ref="allAddsCleanupProcessorForNegFareSecurity"/>
                            <batch:listener ref="negFareSecurity.groupMetricListener"/>
                            <batch:listener ref="negFareSecurityGroupWriter"/>
                            <batch:listener ref="negFareSecurityGroupReader"/>
                            <batch:listener ref="negFareSecurityStepLocking"/>
                        </batch:listeners>
                        <batch:streams>
                            <batch:stream ref="negFareSecurityItemReader"/>
                        </batch:streams>
                    </batch:chunk>
                </batch:tasklet>
            </batch:step>
        </batch:flow>
    </batch:split>

    <bean id="allAddsCleanupProcessorForBaseFareInfo"
      class="com.sabre.aircontent.batch2.rule.basefareinfo.cleanup.AllAddsCleanupProcessorForBaseFareInfo" scope="job">
    <property name="headerRepo" ref="headerRepo" />
    <property name="allAddsCleanupCursor" ref="hibernateCursorForBaseFareInfoAllAddsCleanup" />
    <property name="dao" ref="baseFareInfoDao" />
    <property name="cacheNotificationService" ref="cacheNotificationService"/>
    <property name="resourceName" value="#{jobParameters['job.resource']}" />
    <property name="entityName" value="${baseFareInfo.baseEntityName:#{null}}" />
    <property name="expireThresholdInPercent" value="${basefareinfo.allAdds.expireThresholdInPercent}"/>
    <property name="metricRepo" ref="baseFareInfo.metricRepo"/>
    <property name="jobStartTime"
              value="#{jobExplorer.getJobExecution(jobOperator.getExecutions(jobExecution.jobInstance.instanceId)[jobOperator.getExecutions(jobExecution.jobInstance.instanceId).size()-1]).startTime}"/>
    <property name="additionalCursorParamValues">
        <map>
            <entry key="vendor" value="${vendor}" />
        </map>
    </property>
</bean>

Why is the job scope of the allAddsCleanupProcessorForBaseFareInfo bean not recognized anymore? Is their a workaround to this?

1
Please add the whole XML with batch configuration , not only part that you think is relevant to the question. Thank you.Danylo Gurianov

1 Answers

0
votes

Try with

 <bean id="allAddsCleanupProcessorForBaseFareInfo"
  class="com.sabre.aircontent.batch2.rule.basefareinfo.cleanup.AllAddsCleanupProcessorForBaseFareInfo" scope="step">
<property name="headerRepo" ref="headerRepo" />
<property name="allAddsCleanupCursor" ref="hibernateCursorForBaseFareInfoAllAddsCleanup" />
<property name="dao" ref="baseFareInfoDao" />
<property name="cacheNotificationService" ref="cacheNotificationService"/>
<property name="resourceName" value="#{jobParameters['job.resource']}" />
<property name="entityName" value="${baseFareInfo.baseEntityName:#{null}}" />
<property name="expireThresholdInPercent" value="${basefareinfo.allAdds.expireThresholdInPercent}"/>
<property name="metricRepo" ref="baseFareInfo.metricRepo"/>
<property name="jobStartTime"
          value="#{jobExplorer.getJobExecution(jobOperator.getExecutions(jobExecution.jobInstance.instanceId)[jobOperator.getExecutions(jobExecution.jobInstance.instanceId).size()-1]).startTime}"/>
<property name="additionalCursorParamValues">
    <map>
        <entry key="vendor" value="${vendor}" />
    </map>
</property>