1、kafka版本为kafka_2.11-2.2.1
2、问题是开启事务和关闭事务时生产的offset间隔值不相同。
代码如下
/*
* Copyright 2018-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example;
import com.common.Foo2;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.ApplicationRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.core.task.SimpleAsyncTaskExecutor;
import org.springframework.core.task.TaskExecutor;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.support.converter.RecordMessageConverter;
import org.springframework.kafka.support.converter.StringJsonMessageConverter;
import java.util.HashMap;
import java.util.Map;
/**
* Sample shows use of a dead letter topic.
*
* @author Gary Russell
* @since 2.2.1
*
*/
@SpringBootApplication
public class Application {
private final Logger logger = LoggerFactory.getLogger(Application.class);
private final TaskExecutor exec = new SimpleAsyncTaskExecutor();
public static void main(String[] args) {
SpringApplication.run(Application.class, args).close();
}
@Bean
public Map<String,Object> producerConfig() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "10.60.153.252:9092,10.60.153.253:9093");
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.springframework.kafka.support.serializer" +
".JsonSerializer");
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.springframework.kafka.support.serializer" +
".JsonSerializer");
props.put(ProducerConfig.PARTITIONER_CLASS_CONFIG, "com.MyPartitioner");
props.put(ProducerConfig.ACKS_CONFIG, "all");
props.put(ProducerConfig.RETRIES_CONFIG, 1);
props.put(ProducerConfig.LINGER_MS_CONFIG, 10);
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
props.put(ProducerConfig.BATCH_SIZE_CONFIG, 524880);
props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, "gzip");
return props;
}
@Bean
public ProducerFactory<Object,Object> producerFactory() {
DefaultKafkaProducerFactory<Object,Object> producerFactory =
new DefaultKafkaProducerFactory<>(producerConfig());
producerFactory.transactionCapable();
producerFactory.setTransactionIdPrefix("trans-");
return producerFactory;
}
/*public ProducerFactory<Object, Object> producerFactory() {
return new DefaultKafkaProducerFactory<>(producerConfig());
}*/
@Bean
public KafkaTemplate<Object, Object> kafkaTemplate(ProducerFactory producerFactory) {
return new KafkaTemplate<Object, Object>(producerFactory);
}
@Bean
public RecordMessageConverter converter() {
return new StringJsonMessageConverter();
}
@KafkaListener(id = "wxyGroup", topics = "topic1")
public void listen(Foo2 foo) {
logger.info("Received: " + foo);
if (foo.getFoo().startsWith("fail")) {
throw new RuntimeException("failed");
}
this.exec.execute(() -> System.out.println("Hit Enter to terminate..."));
}
@KafkaListener(id = "wxyGroup2", topics = "wxy_test")
public void dltListen(String in) {
logger.info("Received from wxy_test: " + in);
this.exec.execute(() -> System.out.println("Hit Enter to terminate..."));
}
@Bean
public NewTopic topic() {
return new NewTopic("topic1", 1, (short) 1);
}
@Bean
public NewTopic dlt() {
return new NewTopic("topic1.DLT", 1, (short) 1);
}
@Bean
public NewTopic newTopic() {
return new NewTopic("wxy_test", 3, (short) 1);
}
@Bean
public ApplicationRunner runner() {
return args -> {
System.out.println("Hit Enter to terminate...");
System.in.read();
};
}
}
此时生产的offset结果是
Kafka Message of topic:wxy_test/size:6/partition:1/offset:231 sends successfully at time:20200612 19:35:50-318!
Kafka Message of topic:wxy_test/size:6/partition:0/offset:133 sends successfully at time:20200612 19:35:53-140!
Received from wxy_test: aaaa
Kafka Message of topic:wxy_test/size:6/partition:0/offset:135 sends successfully at time:20200612 19:35:54-365!
Received from wxy_test: aaaa
Kafka Message of topic:wxy_test/size:6/partition:2/offset:136 sends successfully at time:20200612 19:35:55-667!
Received from wxy_test: aaaa
Kafka Message of topic:wxy_test/size:6/partition:1/offset:233 sends successfully at time:20200612 19:43:53-018!
Received from wxy_test: aaaa
Kafka Message of topic:wxy_test/size:6/partition:0/offset:137 sends successfully at time:20200612 19:43:56-295!
Received from wxy_test: aaaa
Kafka Message of topic:wxy_test/size:6/partition:0/offset:139 sends successfully at time:20200612 19:43:57-380!
Received from wxy_test: aaaa
当我把上述的producerFactory()
方法换成星号引去的方法时(代码如下)
/*
* Copyright 2018-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example;
import com.common.Foo2;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.ApplicationRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.core.task.SimpleAsyncTaskExecutor;
import org.springframework.core.task.TaskExecutor;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.support.converter.RecordMessageConverter;
import org.springframework.kafka.support.converter.StringJsonMessageConverter;
import java.util.HashMap;
import java.util.Map;
/**
* Sample shows use of a dead letter topic.
*
* @author Gary Russell
* @since 2.2.1
*
*/
@SpringBootApplication
public class Application {
private final Logger logger = LoggerFactory.getLogger(Application.class);
private final TaskExecutor exec = new SimpleAsyncTaskExecutor();
public static void main(String[] args) {
SpringApplication.run(Application.class, args).close();
}
@Bean
public Map<String,Object> producerConfig() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "10.60.153.252:9092,10.60.153.253:9093");
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.springframework.kafka.support.serializer" +
".JsonSerializer");
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.springframework.kafka.support.serializer" +
".JsonSerializer");
props.put(ProducerConfig.PARTITIONER_CLASS_CONFIG, "com.MyPartitioner");
props.put(ProducerConfig.ACKS_CONFIG, "all");
props.put(ProducerConfig.RETRIES_CONFIG, 1);
props.put(ProducerConfig.LINGER_MS_CONFIG, 10);
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
props.put(ProducerConfig.BATCH_SIZE_CONFIG, 524880);
props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, "gzip");
return props;
}
/*@Bean
public ProducerFactory<Object,Object> producerFactory() {
DefaultKafkaProducerFactory<Object,Object> producerFactory =
new DefaultKafkaProducerFactory<>(producerConfig());
producerFactory.transactionCapable();
producerFactory.setTransactionIdPrefix("trans-");
return producerFactory;
}*/
public ProducerFactory<Object, Object> producerFactory() {
return new DefaultKafkaProducerFactory<>(producerConfig());
}
@Bean
public KafkaTemplate<Object, Object> kafkaTemplate(ProducerFactory producerFactory) {
return new KafkaTemplate<Object, Object>(producerFactory);
}
@Bean
public RecordMessageConverter converter() {
return new StringJsonMessageConverter();
}
@KafkaListener(id = "wxyGroup", topics = "topic1")
public void listen(Foo2 foo) {
logger.info("Received: " + foo);
if (foo.getFoo().startsWith("fail")) {
throw new RuntimeException("failed");
}
this.exec.execute(() -> System.out.println("Hit Enter to terminate..."));
}
@KafkaListener(id = "wxyGroup2", topics = "wxy_test")
public void dltListen(String in) {
logger.info("Received from wxy_test: " + in);
this.exec.execute(() -> System.out.println("Hit Enter to terminate..."));
}
@Bean
public NewTopic topic() {
return new NewTopic("topic1", 1, (short) 1);
}
@Bean
public NewTopic dlt() {
return new NewTopic("topic1.DLT", 1, (short) 1);
}
@Bean
public NewTopic newTopic() {
return new NewTopic("wxy_test", 3, (short) 1);
}
@Bean
public ApplicationRunner runner() {
return args -> {
System.out.println("Hit Enter to terminate...");
System.in.read();
};
}
}
这时的log如下
2020-06-12 19:56:26.660 INFO 19108 --- [ad | producer-1] com.example.KafkaSendResultHandler : Kafka Message of topic:wxy_test/size:4/partition:0/offset:144 sends successfully at time:20200612 19:56:26-641!
2020-06-12 19:57:31.308 INFO 19108 --- [ad | producer-1] com.example.KafkaSendResultHandler : Kafka Message of topic:wxy_test/size:4/partition:0/offset:145 sends successfully at time:20200612 19:57:31-306!
总之这时每个分区的offset都是连续的。
即开启事务时,offset间隔2,关闭事务后,offset间隔1.这个虽然对最终结果没有影响,但是就是百思不得其解啊。求大神看一下?第一次在orcHome提问题,希望能得到解答。