spring batch integration 整合,實現跨服務批處理

生產者方:

配置文件:

server: 
  port: 8081

spring:
  datasource:
    type: com.alibaba.druid.pool.DruidDataSource
    username: root
    password: xxxxxxxx
    driver-class-name: com.mysql.cj.jdbc.Driver
    url: jdbc:mysql://localhost:3306/batch?serverTimezone=UTC&useUnicode=true&characterEncoding=utf8
    schema:
    - classpath:/org/springframework/batch/core/schema-mysql.sql
    druid:

# 下面爲連接池的補充設置,應用到上面所有數據源中
      # 初始化時建立物理連接的個數
      initial-size: 5
      # 最小連接池數量
      min-idle: 5
      # 最大連接池數量
      max-active: 20
      # 配置獲取連接等待超時的時間
      max-wait: 60000
      # 配置間隔多久才進行一次檢測,檢測需要關閉的空閒連接,單位是毫秒
      time-between-eviction-runs-millis: 60000
      # 配置一個連接在池中最小生存的時間,單位是毫秒
      min-evictable-idle-time-millis: 300000
      # 用來檢測連接是否有效的sql,要求是一個查詢語句
      validation-query: SELECT 1 FROM DUAL
      # 建議配置爲true,不影響性能,並且保證安全性。申請連接的時候檢測,如果空閒時間大於timeBetweenEvictionRunsMillis,執行validationQuery檢測連接是否有效。
      test-while-idle: true
      # 申請連接時執行validationQuery檢測連接是否有效,做了這個配置會降低性能。
      test-on-borrow: false
      # 歸還連接時執行validationQuery檢測連接是否有效,做了這個配置會降低性能。
      test-on-return: false
      # 打開PSCache,並且指定每個連接上PSCache的大小
      pool-prepared-statements: true
      # 要啓用PSCache,必須配置大於0,當大於0時,poolPreparedStatements自動觸發修改爲true
      max-pool-prepared-statement-per-connection-size: 20
      #   配置監控統計攔截的filters,去掉後監控界面sql無法統計,'wall'用於防火牆
      filters: stat,wall
      use-global-data-source-stat: true
      # 通過connectProperties屬性來打開mergeSql功能;慢SQL記錄
      connection-properties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000
      # 配置監控服務器
      #stat-view-servlet:
        #login-username: admin
        #login-password: 123456
        #reset-enable: false
        #url-pattern: /druid/*
        # 添加IP白名單
        #allow:
        # 添加IP黑名單,當白名單和黑名單重複時,黑名單優先級更高
        #deny:
      #web-stat-filter:
        # 添加過濾規則
        #url-pattern: /*
        # 忽略過濾格式
        #exclusions: "*.js,*.gif,*.jpg,*.png,*.css,*.ico,/druid/*"

  #batch配置
  batch:
    initialize-schema: always
    job:
      enabled: false
      
  kafka:
    bootstrap-servers:
    - xxxxxxx:9092
  
    consumer:
      group-id: siTestGroup
      auto-offset-reset: earliest
      enable-auto-commit: false
      value-deserializer: com.boot.spring.deserializer.CustomDeserializer
      key-deserializer: com.boot.spring.deserializer.CustomDeserializer

    producer:
      batch-size: 16384
      buffer-memory: 33554432
      retries: 0
      value-serializer: com.boot.spring.serializer.CustomSerializer
      key-serializer: com.boot.spring.serializer.CustomSerializer
      
kafka:    
  send-topic: si.sTopic
  return-topic: si.rTopic
  messageKey: si.key

kafka配置類:

@ConfigurationProperties("kafka")
@Data
public class KafkaAppProperties {

	private String sendTopic;

	private String returnTopic;

	private String messageKey;

}

批處理配置類:

@Configuration
public class CustomBatchConfig extends DefaultBatchConfigurer implements ApplicationContextAware{

	@Autowired
	private EntityManagerFactory entityManagerFactory;
	
	@Autowired
	private JobRegistry jobRegistry;
	
	private ApplicationContext applicationContext;
	

	@Bean
	public JobRegistryBeanPostProcessor jobRegistrarPostProcessor(){
		JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor = new JobRegistryBeanPostProcessor();
		jobRegistryBeanPostProcessor.setBeanFactory(applicationContext.getAutowireCapableBeanFactory());
		jobRegistryBeanPostProcessor.setJobRegistry(jobRegistry);
		return jobRegistryBeanPostProcessor;
	}
	
	@Bean
	public SimpleAsyncTaskExecutor taskExecutor(){
		return	new SimpleAsyncTaskExecutor();

	}
	
	@Bean
	public JobOperator jobOperator(){
		SimpleJobOperator jobOperator = new SimpleJobOperator();
		jobOperator.setJobLauncher(getJobLauncher());
		jobOperator.setJobExplorer(getJobExplorer());
		jobOperator.setJobRepository(getJobRepository());
		jobOperator.setJobParametersConverter(new DefaultJobParametersConverter());
		jobOperator.setJobRegistry(jobRegistry);
		return jobOperator;
	}
	

	@Override
	public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
		this.applicationContext = applicationContext;
	}
	
	
}

kafka配置注入:

@Configuration
public class KafkaConfig {
	
	@Autowired
	private QueueChannel replies;
	

	@Bean
	public ProducerFactory<String, String> kafkaProducerFactory(KafkaProperties properties) {
		Map<String, Object> producerProperties = properties.buildProducerProperties();
		producerProperties.put(ProducerConfig.LINGER_MS_CONFIG, 1);
		
		return new DefaultKafkaProducerFactory<>(producerProperties);
	}
	
	@Bean
	public KafkaTemplate<String, String> kafkaTemplate() {
		KafkaTemplate<String, String> template = new KafkaTemplate<String, String>(kafkaProducerFactory(null));
		//template.setMessageConverter(new StringJsonMessageConverter());
	    return template;
	}
	
	@Bean
	public DefaultKafkaHeaderMapper mapper() {
	    return new DefaultKafkaHeaderMapper();
	}

	@ServiceActivator(inputChannel = "requests")
	@Bean
	public MessageHandler handler(KafkaTemplate<String, String> kafkaTemplate,KafkaAppProperties properties) {
		KafkaProducerMessageHandler<?, ?> handler =
				new KafkaProducerMessageHandler<>(kafkaTemplate);
		handler.setTopicExpression(new LiteralExpression(properties.getSendTopic()));
		handler.setMessageKeyExpression(new LiteralExpression(properties.getMessageKey()));
		return handler;
	}

	@Bean
	public ConsumerFactory<?, ?> kafkaConsumerFactory(KafkaProperties properties) {
		Map<String, Object> consumerProperties = properties
				.buildConsumerProperties();
		consumerProperties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, 15000);
		return new DefaultKafkaConsumerFactory<>(consumerProperties);
	}

	@Bean
	public KafkaMessageListenerContainer<String, String> container(
			ConsumerFactory<String, String> kafkaConsumerFactory,KafkaAppProperties properties) {
		ContainerProperties containerProperties = new ContainerProperties(new TopicPartitionOffset(properties.getReturnTopic(), 0));
		containerProperties.setMissingTopicsFatal(false);
		return new KafkaMessageListenerContainer<String, String>(kafkaConsumerFactory,containerProperties);
	}

	@Bean
	public KafkaMessageDrivenChannelAdapter<String, String>
				adapter(KafkaMessageListenerContainer<String, String> container) {
		KafkaMessageDrivenChannelAdapter<String, String> kafkaMessageDrivenChannelAdapter =
				new KafkaMessageDrivenChannelAdapter<>(container);
		kafkaMessageDrivenChannelAdapter.setOutputChannel(replies);
		return kafkaMessageDrivenChannelAdapter;
	}

}

出戰job配置:

@Configuration
public class RemoteChunkingConfig {
	
	@Autowired
	private JobBuilderFactory jobBuilderFactory;
	
    @Autowired
    private RemoteChunkingManagerStepBuilderFactory masterStepBuilderFactory;

	@Bean
	public Job remoteJob() {

		return jobBuilderFactory.get("remoteJob")
				.start(masterStep())
				.build();		
	}
	
    @Bean
    public TaskletStep masterStep() {
        return  masterStepBuilderFactory.get("masterStep")
                   .chunk(100)
                   .reader(sampleReader(null))
                   .inputChannel(replies())   // replies received from workers
                   .outputChannel(requests()) // requests sent to workers
                   .build();
    }

    
    /*
     * Configure inbound flow (replies coming from workers)
     */
    @Bean
    public QueueChannel replies() {
        return new QueueChannel();
    }
    
    
    /*
     * Configure outbound flow (requests going to workers)
     */
    @Bean
    public DirectChannel requests() {
        return new DirectChannel();
    }

	@Bean
	@StepScope
	public FlatFileItemReader<String> sampleReader(@Value("#{jobParameters[fielName]}")String resource) {
		System.out.println(resource);
	    FlatFileItemReader<String> flatFileItemReader = new FlatFileItemReader<>();
	    flatFileItemReader.setEncoding("gbk");
	    flatFileItemReader.setResource(new FileSystemResource(resource));
	    flatFileItemReader.setLineMapper(new PassThroughLineMapper());
	    return flatFileItemReader;
	}
	

}

FileMessageToJobRequest

public class FileMessageToJobRequest {
    private Job job;
    private String fileParameterName;

    public void setFileParameterName(String fileParameterName) {
        this.fileParameterName = fileParameterName;
    }

    public void setJob(Job job) {
        this.job = job;
    }

    @Transformer
    public JobLaunchRequest toRequest(Message<File> message) {
        JobParametersBuilder jobParametersBuilder =
            new JobParametersBuilder().addDate("date", new Date());

        jobParametersBuilder.addString(fileParameterName,
            message.getPayload().getAbsolutePath());

        return new JobLaunchRequest(job, jobParametersBuilder.toJobParameters());
    }
}

 

流程啓動:

@Configuration
public class IntegerationConfig {
	
	@Autowired
	@Qualifier("remoteJob")
	private Job job;
	
	@Bean
	public MessageChannel re() {
		return new PublishSubscribeChannel();
	}
	
	@Bean
	public PollableChannel exeQueue() {
		return new QueueChannel();
	}
	
	@Bean
	public MessageChannel stepExecutionsChannel() {
		return new PublishSubscribeChannel();
	}
	
	@Bean
	public MessageChannel stepContinue() {
		return new PublishSubscribeChannel();
	}
	
	/**
	 * 
	 * 	接收來自 StepExecutionListener 的消息
	 * 
	 * @return
	 */
	@Bean
	@ServiceActivator(inputChannel = "stepExecutionsChannel")
	public LoggingHandler loggingHandler() {
	    LoggingHandler adapter = new LoggingHandler(LoggingHandler.Level.WARN);
	    adapter.setLoggerName("TEST_LOGGER");
	    adapter.setLogExpressionString("headers.id + ': ' + payload");
	    return adapter;
	}
	
	
	
	@Bean
	public FileMessageToJobRequest fileMessageToJobRequest() {
	    FileMessageToJobRequest fileMessageToJobRequest = new FileMessageToJobRequest();
	    fileMessageToJobRequest.setFileParameterName("fielName");
	    fileMessageToJobRequest.setJob(job);
	    return fileMessageToJobRequest;
	}

	/**
	 * 輪詢通道
	 * 
	 * @param jobLauncher
	 * @return
	 */
	@Bean
	@ServiceActivator(inputChannel = "exeQueue",poller = {@Poller(fixedRate="1000")})
	public JobLaunchingGateway jobLaunchingGateway(JobLauncher jobLauncher) {
		((SimpleJobLauncher)jobLauncher).setTaskExecutor(new SyncTaskExecutor());
	    JobLaunchingGateway jobLaunchingGateway = new JobLaunchingGateway(jobLauncher);
	    //將結果發送至通道
	    jobLaunchingGateway.setOutputChannel(re());
	    return jobLaunchingGateway;
	}
	
	@Bean
	@ServiceActivator(inputChannel = "re")
	public MessageHandler m() {
		
	    return m->{
	    	System.out.println("re:" + m.getHeaders());
	    	System.out.println("re:" + m.getPayload());
	    };
	}
	
	


	/**
	 * 
	 * 讀取文件
	 * 流程設置
	 * 
	 * @return
	 */
	@Bean
	public IntegrationFlow integrationFlow() {
		
	    return IntegrationFlows.from(Files.inboundAdapter(new File("C:\\Users\\yaoqiang\\Desktop\\123")).
	                    filter(new SimplePatternFileListFilter("*.csv")),
	            c -> c.poller(Pollers.fixedRate(1000).maxMessagesPerPoll(1)))
	    		//包裝成request準備出戰
	            .handle(fileMessageToJobRequest())
	            //發送至通道,也可以直接使用getaway
	            .channel(exeQueue())
	            .log(LoggingHandler.Level.WARN, "headers.id + ': ' + payload").
	            get();
	}

}

 

以上是生產者配置,序列化直接使用的spirng工具類,不貼了,消費者和生產者者差不多,貼出有區別的地方:

消息處理配置:

@Configuration
public class WorkerConfiguration {
	
	@Autowired
	private JobBuilderFactory jobBuilderFactory;
	
	@Autowired
	private StepBuilderFactory stepBuilderFactory;
	
    @Autowired
    private RemoteChunkingWorkerBuilder<String,String>  workerBuilder;

	
    @Bean
    public IntegrationFlow workerFlow() {
        return this.workerBuilder
                   .itemProcessor(asynProcessor())
                   .itemWriter(asynWriter())
                   .inputChannel(replies()) // requests received from the master
                   .outputChannel(requests()) // replies sent to the master
                   .build();
    }
    
    /*
     * Configure outbound flow (requests going to workers)
     */
    @Bean
    public DirectChannel requests() {
        return new DirectChannel();
    }
    
    /*
     * Configure inbound flow (replies coming from workers)
     */
    @Bean
    public QueueChannel replies() {
        return new QueueChannel();
    }

	
	/**
	 * 
	 *   	
	 * @return
	 */
	
	@Bean
	public ItemProcessor asynProcessor() {
		ItemProcessor itemProcessor = s->{System.out.println("processor" +s.toString());return s;};
		AsyncItemProcessor asyncItemProcessor = new AsyncItemProcessor<>();
	    asyncItemProcessor.setTaskExecutor(new SyncTaskExecutor());
	    asyncItemProcessor.setDelegate(itemProcessor);
	    return asyncItemProcessor;
	}
	

	@Bean
	public AsyncItemWriter asynWriter() {
		ItemWriter itemWriter = l->System.out.println("writer" + l);
	    AsyncItemWriter asyncItemWriter = new AsyncItemWriter<>();
	    asyncItemWriter.setDelegate(itemWriter);
	    return asyncItemWriter;
	}
	


}

需要注意下通道的配置:

kafka:    
  send-topic: si.rTopic
  return-topic: si.sTopic
  messageKey: si.key

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章