0

我有一个 kafka logback appender,其中包含一个基本 logback.xml,用于将日志事件推送到 kafka。我创建了这个 kafka logback appender 作为 spring boot 启动器应用程序,以便我可以将此 jar 添加到任何客户端应用程序并将日志事件推送到客户端 application.properties 中定义的 kafka 主题。

组件:1)kafka-logback-starter 2)客户端应用程序

在 kafka-logback-starter 中,我正在从客户端应用程序中读取应用程序属性。

这是我的配置:

1) 在 kafka-logback-starter 中添加 spring.factories

2)

 @Configuration
 @EnableConfigurationProperties(MyKafkaProperties.class)
 public abstract class KafkaAppenderConfig<E> extends 
 UnsynchronizedAppenderBase<E> implements AppenderAttachable<E> {

 @Autowired
 private MyKafkaProperties myKafkaProperties;

 @Bean
 public KafkaConfig getKafkaConfig() {
    KafkaConfig kf = new KafkaConfig();
    kafka.put("Topic",sreKafkaProperties.getTopicName()); 
  **I see the values from client application.properties.**
    return kf;

 }

public void addProducerConfig(String keyValue) {
    System.out.println(getKafkaConfig().get("Topic")); //returns null
    System.out.println(myKafkaProperties.getTopic()); //returns null

}

超类代码:

import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.Appender;
import ch.qos.logback.core.spi.AppenderAttachableImpl;
import 
com.github.danielwegener.logback.kafka.delivery.FailedDeliveryCallback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.serialization.ByteArraySerializer;

import java.util.HashMap;
import java.util.Iterator;
import java.util.concurrent.ConcurrentLinkedQueue;


public class KafkaAppender<E> extends KafkaAppenderConfig<E> {

/**
 * Kafka clients uses this prefix for its slf4j logging.
 * This appender defers appends of any Kafka logs since it could cause harmful infinite recursion/self feeding effects.
 */
private static final String KAFKA_LOGGER_PREFIX = KafkaProducer.class.getPackage().getName().replaceFirst("\\.producer$", "");

private LazyProducer lazyProducer = null;
private final AppenderAttachableImpl<E> aai = new AppenderAttachableImpl<E>();
private final ConcurrentLinkedQueue<E> queue = new ConcurrentLinkedQueue<E>();
private final FailedDeliveryCallback<E> failedDeliveryCallback = new FailedDeliveryCallback<E>() {
    @Override
    public void onFailedDelivery(E evt, Throwable throwable) {
        aai.appendLoopOnAppenders(evt);
    }
};

public KafkaAppender() {
    // setting these as config values sidesteps an unnecessary warning (minor bug in KafkaProducer)
    addProducerConfigValue(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
    addProducerConfigValue(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
}

@Override
public void doAppend(E e) {
    ensureDeferredAppends();
    if (e instanceof ILoggingEvent && ((ILoggingEvent)e).getLoggerName().startsWith(KAFKA_LOGGER_PREFIX)) {
        deferAppend(e);
    } else {
        super.doAppend(e);
    }
}

@Override
public void start() {
    // only error free appenders should be activated
    if (!checkPrerequisites()) return;

    if (partition != null && partition < 0) {
        partition = null;
    }

    lazyProducer = new LazyProducer();

    super.start();
}

@Override
public void stop() {
    super.stop();
    if (lazyProducer != null && lazyProducer.isInitialized()) {
        try {
            lazyProducer.get().close();
        } catch (KafkaException e) {
            this.addWarn("Failed to shut down kafka producer: " + e.getMessage(), e);
        }
        lazyProducer = null;
    }
}

@Override
public void addAppender(Appender<E> newAppender) {
    aai.addAppender(newAppender);
}

@Override
public Iterator<Appender<E>> iteratorForAppenders() {
    return aai.iteratorForAppenders();
}

@Override
public Appender<E> getAppender(String name) {
    return aai.getAppender(name);
}

@Override
public boolean isAttached(Appender<E> appender) {
    return aai.isAttached(appender);
}

@Override
public void detachAndStopAllAppenders() {
    aai.detachAndStopAllAppenders();
}

@Override
public boolean detachAppender(Appender<E> appender) {
    return aai.detachAppender(appender);
}

@Override
public boolean detachAppender(String name) {
    return aai.detachAppender(name);
}

@Override
protected void append(E e) {
    final byte[] payload = encoder.encode(e);
    final byte[] key = keyingStrategy.createKey(e);

    final Long timestamp = isAppendTimestamp() ? getTimestamp(e) : null;

    final ProducerRecord<byte[], byte[]> record = new ProducerRecord<>(topic, partition, timestamp, key, payload);

    final Producer<byte[], byte[]> producer = lazyProducer.get();
    if (producer != null) {
        deliveryStrategy.send(lazyProducer.get(), record, e, failedDeliveryCallback);
    } else {
        failedDeliveryCallback.onFailedDelivery(e, null);
    }
}

protected Long getTimestamp(E e) {
    if (e instanceof ILoggingEvent) {
        return ((ILoggingEvent) e).getTimeStamp();
    } else {
        return System.currentTimeMillis();
    }
}

protected Producer<byte[], byte[]> createProducer() {
    return new KafkaProducer<>(new HashMap<>(producerConfig));
}

private void deferAppend(E event) {
    queue.add(event);
}

// drains queue events to super
private void ensureDeferredAppends() {
    E event;

    while ((event = queue.poll()) != null) {
        super.doAppend(event);
    }
}


private class LazyProducer {

    private volatile Producer<byte[], byte[]> producer;

    public Producer<byte[], byte[]> get() {
        Producer<byte[], byte[]> result = this.producer;
        if (result == null) {
            synchronized(this) {
                result = this.producer;
                if(result == null) {
                    this.producer = result = this.initialize();
                }
            }
        }

        return result;
    }

    protected Producer<byte[], byte[]> initialize() {
        Producer<byte[], byte[]> producer = null;
        try {
            producer = createProducer();
        } catch (Exception e) {
            addError("error creating producer", e);
        }
        return producer;
    }

    public boolean isInitialized() { return producer != null; }
}

}

我看到 @Autowired 在 addProducerConfig 方法执行后被调用,因此应用程序属性值在 addProducerConfig 中不可用。

我如何确保在KafkaAppenderConfig上执行任何方法之前调用@Autowired?

注意:KafkaAppenderConfig 是一个抽象类。

4

1 回答 1

0

您可以实现InitializingBean ,并在afterPropertiesSet中调用您的生产者配置

public class KafkaAppender<E> extends KafkaAppenderConfig<E> implements InitializingBean {

    @Override
    public void afterPropertiesSet() throws Exception {
        // setting these as config values sidesteps an unnecessary warning (minor bug in KafkaProducer)
        addProducerConfigValue(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
        addProducerConfigValue(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
    }
}
于 2018-09-16T02:50:45.937 回答