The Kafka Appender was used to store logs directly at Kafka queues. With K8s all logs will printed to standard out. JSON format and the Kafka Appender are no longer used.
Some implementation partners want to use Kafka as messaging platform and get library conflicts with our Kafka Appender. Therefore the Kafka Appender was removed.
In case the Kafka Appender is used at projects, please add the following configuration to a custom cartridge:
dependencies { runtime 'com.github.danielwegener:logback-kafka-appender' or with version runtime 'com.github.danielwegener:logback-kafka-appender:0.1.0' }
Original logback extension for Kafka Appender at folder staticfiles/cartridge/logback:
<?xml version="1.0" encoding="UTF-8" ?> <!-- In order to log into kafka use property intershop.logging.configurationfile.main=logback-kafka.xml intershop.logging.configurationfile.dbinit=logback-kafka.xml --> <included> <property name="intershop.CICLayout" value="[%date{yyyy-MM-dd HH:mm:ss.SSS Z}] %-5level ${intershop.HostName} ${intershop.InstallationID} ${intershop.ServerName} [%mdc{requestsite}] [%mdc{requestapplication}] %logger [%marker] [%mdc{request.type}] [%mdc{session.id}] [%mdc{request.uuid}] "%thread" %msg %ex%n%mdc{request.info}" /> <!-- Kafka appender definition --> <appender name="kafkaAppender" class="com.github.danielwegener.logback.kafka.KafkaAppender"> <!-- This is the default encoder that encodes every log message to an utf8-encoded string --> <encoder class="com.github.danielwegener.logback.kafka.encoding.LayoutKafkaMessageEncoder"> <layout class="ch.qos.logback.classic.PatternLayout"> <pattern>[%date{yyyy-MM-dd HH:mm:ss.SSS Z}] %-5level ${intershop.HostName} ${intershop.InstallationID} ${intershop.ServerName} [%mdc{requestsite}] [%mdc{requestapplication}] %logger [%marker] [%mdc{request.type}] [%mdc{session.id}] [%mdc{request.uuid}] "%thread" %msg %ex%n%mdc{request.info}</pattern> </layout> </encoder> <topic>${intershop.logging.property.kafka.topic}</topic> <keyingStrategy class="com.github.danielwegener.logback.kafka.keying.RoundRobinKeyingStrategy" /> <deliveryStrategy class="com.github.danielwegener.logback.kafka.delivery.AsynchronousDeliveryStrategy" /> <!-- each <producerConfig> translates to regular kafka-client config (format: key=value) --> <!-- producer configs are documented here: https://kafka.apache.org/documentation.html#newproducerconfigs --> <!-- bootstrap.servers is the only mandatory producerConfig --> <producerConfig>bootstrap.servers=${intershop.logging.property.kafka.bootstrap.servers} </producerConfig> </appender> <!-- Define default appender because they might be used as references --> <appender name="Error" class="ch.qos.logback.core.helpers.NOPAppender"> </appender> <appender name="Warn" class="ch.qos.logback.core.helpers.NOPAppender"> </appender> <appender name="ImpexError" class="ch.qos.logback.core.helpers.NOPAppender"> </appender> <appender name="Job" class="ch.qos.logback.core.helpers.NOPAppender"> </appender> <!-- Log everything using the kafkaAppender --> <root level="INFO"> <appender-ref ref="kafkaAppender" /> </root> <logger name="org"> <level value="INFO" /> </logger> <logger name="javax"> <level value="INFO" /> </logger> <logger name="java.awt"> <level value="INFO" /> </logger> <logger name="tomcat"> <level value="INFO" /> </logger> <logger name="sun"> <level value="INFO" /> </logger> <logger name="com.sun"> <level value="INFO" /> </logger> <logger name="org.apache.catalina.startup.Catalina"> <level value="INFO" /> </logger> <logger name="org.apache.jasper.compiler.Compiler" additivity="false"> <level value="INFO" /> </logger> <!-- avoid error messages from sandesha startup Message "Could not load module policies. Using default values." http://issues.apache.org/jira/browse/SANDESHA2-187 --> <logger name="org.apache.sandesha2.SandeshaModule" additivity="false"> <level value="OFF" /> </logger> </included>