SpringBoot日志记录-每个日志语句中的Stacktrace

时间:2018-10-31 19:15:01

标签: java spring-boot slf4j

我一直在 SpringBoot 中开发电子邮件服务,并且为了登录,我正在使用 slf4j 。该应用程序按预期工作,除了日志部分。在我的计算机上运行它时,日志记录运行良好。但是,一旦将其部署在服务器上,日志记录将按预期工作,直到发生异常。之后,将在每个日志语句中打印一个“ stacktrace”元素。 stacktrace来自发生的最后一个异常,直到下一个异常才改变。日志语句的级别是 INFO WARN 还是 ERROR 都没有关系。由于我无法重现它,而且我也不知道是什么原因导致了此问题,这使我非常沮丧。我已经搜索了谷歌和stackoverflow但没有运气。我找不到与此问题相关的任何帖子。以下是我在应用程序中使用的一些日志记录设置以及日志语句。

Logback.xml

<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<include resource="org/springframework/boot/logging/logback/base.xml"/>
<property name="PROJECT_ID" value="email-service-consumer"/>
<appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<charset>UTF-8</charset>
<Pattern>${CONSOLE_LOG_PATTERN}</Pattern>
</encoder>
</appender>

<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${CATALINA_HOME}/logs/${PROJECT_ID}_debug.log</file>
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- daily rollover. Make sure the path matches the one in the file element or else
the rollover logs are placed in the working directory. -->
<fileNamePattern>${CATALINA_HOME}/logs/${PROJECT_ID}_debug.%d{yyyy-MM-dd}.%i.log</fileNamePattern>

<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>50MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!-- keep 30 days' worth of history -->
<maxHistory>30</maxHistory>
</rollingPolicy>

<encoder>
<charset>UTF-8</charset>
<pattern>${FILE_LOG_PATTERN}</pattern>
</encoder>
</appender>

<appender name="SPLUNK_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${CATALINA_HOME}/logs/${PROJECT_ID}_splunk.log</file>
<append>true</append>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- daily rollover. Make sure the path matches the one in the file element or else
the rollover logs are placed in the working directory. -->
<fileNamePattern>${CATALINA_HOME}/logs/${PROJECT_ID}_splunk.%d{yyyy-MM-dd}.%i.log</fileNamePattern>

<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>5MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
<!-- keep 30 days' worth of history -->
<maxHistory>30</maxHistory>
</rollingPolicy>

<encoder class="ch.qos.logback.core.encoder.LayoutWrappingEncoder">
<charset>UTF-8</charset>
<layout class="com.example.core.logging.CcSplunkLogLayout" />
</encoder>
</appender>

<appender name="ASYNC_DEFAULTS" class="ch.qos.logback.classic.AsyncAppender">
<queueSize>500</queueSize>
<discardingThreshold>0</discardingThreshold>
<appender-ref ref="FILE"/>
<appender-ref ref="CONSOLE"/>
<appender-ref ref="SPLUNK_FILE"/>
</appender>

<springProfile name="!production">
<logger name="com.example" additivity="false" level="INFO">
<appender-ref ref="ASYNC_DEFAULTS"/>
</logger>
<logger name="org.hibernate" additivity="false" level="INFO">
<appender-ref ref="ASYNC_DEFAULTS"/>
</logger>
<logger name="org.springframework" additivity="false" level="INFO">
<appender-ref ref="ASYNC_DEFAULTS"/>
</logger>
<root level="INFO">
<appender-ref ref="ASYNC_DEFAULTS"/>
</root>
</springProfile>

<springProfile name="production">
<logger name="com.example" additivity="false" level="INFO">
<appender-ref ref="SPLUNK_FILE"/>
</logger>
<logger name="org.hibernate" additivity="false" level="ERROR">
<appender-ref ref="SPLUNK_FILE"/>
</logger>
<logger name="org.springframework" additivity="false" level="ERROR">
<appender-ref ref="SPLUNK_FILE"/>
</logger>
<root level="ERROR">
<appender-ref ref="SPLUNK_FILE"/>
</root>
</springProfile>

<springProfile name="docker-dev">
<logger name="com.example" additivity="false" level="INFO">
<appender-ref ref="SPLUNK_FILE"/>
</logger>
<logger name="org.hibernate" additivity="false" level="INFO">
<appender-ref ref="FILE"/>
</logger>
<logger name="org.springframework" additivity="false" level="INFO">
<appender-ref ref="FILE"/>
</logger>
<root level="TRACE">
<appender-ref ref="FILE"/>
</root>
</springProfile>
</configuration>

我还写了一个util Logging类,其中包含用于不同日志级别的包装器方法。

public class LoggingUtil {


    public static void logWarn(String message, EmailRequest emailRequest, Logger logger) {
        Map<String, String> identifierMap = getRequestIdentifiers(emailRequest);
        StringBuffer warnMessage = new StringBuffer(checkPunctuation(message));
        warnMessage.append(Constants.REQUEST_IDENTIFIERS);

        logger.warn(warnMessage.toString(), identifierMap.get(Constants.ORDERID),
                identifierMap.get(Constants.TEMPLATENAME),
                identifierMap.get(Constants.SESSIONID),
                identifierMap.get(Constants.STOREID));
    }

    /**
     * Method to log error messages.
     * @param message
     * @param emailRequest
     * @param
     */
    public static void logInfo(String message, EmailRequest emailRequest, Logger logger) {
        Map<String, String> identifierMap = getRequestIdentifiers(emailRequest);
        StringBuffer infoMessage = new StringBuffer(checkPunctuation(message));

        infoMessage.append(Constants.REQUEST_IDENTIFIERS);

        logger.info(infoMessage.toString(), identifierMap.get(Constants.ORDERID),
                identifierMap.get(Constants.TEMPLATENAME),
                identifierMap.get(Constants.SESSIONID),
                identifierMap.get(Constants.STOREID));
    }

    public static void logInfo(String message, KafkaMessage kafkaMessage, Logger logger) {
        String sessionId = null;
        String clientOriginationId = null;
        String eventId = null;
        String eventName = null;
        String timeStamp = null;

        StringBuilder stringBuilder = new StringBuilder(checkPunctuation(message));
        stringBuilder.append(Constants.KAFKA_MESSAGE_IDENTIFIERS);

        if(kafkaMessage != null && kafkaMessage.getHeader() != null) {
            Header header = kafkaMessage.getHeader();
            sessionId = header.getSessionId();
            clientOriginationId = header.getClientOriginationId();
            eventId = header.getEventId();
            eventName = header.getEventName();
            timeStamp = header.getTimestamp();
        }

        logger.info(stringBuilder.toString(), sessionId, clientOriginationId, eventId, eventName, timeStamp);
    }

    /**
     * Method to log error messages.
     * @param message
     * @param emailRequest
     * @param e
     */
    public static void logError(String message, EmailRequest emailRequest, Exception e, Logger logger) {
        Map<String, String> identifierMap = getRequestIdentifiers(emailRequest);

        StringBuffer errorMessage = new StringBuffer(checkPunctuation(message));
        errorMessage.append(Constants.REQUEST_IDENTIFIERS);

        logger.error(errorMessage.toString(), identifierMap.get(Constants.ORDERID),
                identifierMap.get(Constants.TEMPLATENAME),
                identifierMap.get(Constants.SESSIONID),
                identifierMap.get(Constants.STOREID), e.getMessage(), e);
    }

    /**
     * This method extracts the request identfiers from email request object
     * @param emailRequest
     * @return
     */
    private static Map<String, String> getRequestIdentifiers(EmailRequest emailRequest) {
        String templateName = emailRequest != null ? emailRequest.getTemplateName() : null;
        String orderId = null;
        String sessionId = null;
        String storeId = null;
        if(emailRequest != null && emailRequest.getEmailVars() != null && emailRequest.getEmailVars().getTemplateVarsList() != null) {
            EmailVars emailVars = emailRequest.getEmailVars();
            orderId = emailVars.getTemplateVariable(Constants.ORDERID);
            sessionId = emailVars.getTemplateVariable(Constants.SESSIONID);
            storeId = emailVars.getTemplateVariable(Constants.STOREID);
        }
        Map<String, String> identifierMap = new HashMap<String, String>();
        identifierMap.put(Constants.TEMPLATENAME, templateName);
        identifierMap.put(Constants.ORDERID, orderId);
        identifierMap.put(Constants.SESSIONID, sessionId);
        identifierMap.put(Constants.STOREID, storeId);

        return identifierMap;
    }

    /**
     * Method to add '.' to the message
     * @param message
     * @return
     */
    private static String checkPunctuation(String message) {
        if(message == null || message.length() == 0 || message.trim().length() == 0) {
            return "";
        }
        //Removing any trailing white spaces
        message = message.trim();
        StringBuilder stringBuilder = new StringBuilder(message);

        //checking if the message ends with a period '.'
        if(message.charAt(message.length() - 1) != '.') {
            stringBuilder.append(". ");
        }

        return stringBuilder.toString();
    }
}

日志声明:

  

2018-10-31 11:45:33.770000 -0700 LOG_LEVEL =“ INFO”   apiCall =“ com.example.email.service.KafkaConsumer”   stacktrace =“ java.lang.Exception:无效的收件人地址   com.example.email.service.KafkaConsumer.handleMessages(KafkaConsumer.java:84)     在sun.reflect.GeneratedMethodAccessor134.invoke(未知源)在   sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)     在java.lang.reflect.Method.invoke(Method.java:498)在   org.springframework.messaging.handler.invocation.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:181)     在   org.springframework.messaging.handler.invocation.InvocableHandlerMethod.invoke(InvocableHandlerMethod.java:114)     在   org.springframework.cloud.stream.binding.StreamListenerMessageHandler.handleRequestMessage(StreamListenerMessageHandler.java:55)     在   org.springframework.integration.handler.AbstractReplyProducingMessageHandler.handleMessageInternal(AbstractReplyProducingMessageHandler.java:109)     在   org.springframework.integration.handler.AbstractMessageHandler.handleMessage(AbstractMessageHandler.java:158)     在   org.springframework.integration.dispatcher.AbstractDispatcher.tryOptimizedDispatch(AbstractDispatcher.java:116)     在   org.springframework.integration.dispatcher.UnicastingDispatcher.doDispatch(UnicastingDispatcher.java:132)     在   org.springframework.integration.dispatcher.UnicastingDispatcher.dispatch(UnicastingDispatcher.java:105)     在   org.springframework.integration.channel.AbstractSubscribableChannel.doSend(AbstractSubscribableChannel.java:73)     在   org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:445)     在   org.springframework.integration.channel.AbstractMessageChannel.send(AbstractMessageChannel.java:394)     在   org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:181)     在   org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:160)     在   org.springframework.messaging.core.GenericMessagingTemplate.doSend(GenericMessagingTemplate.java:47)     在   org.springframework.messaging.core.AbstractMessageSendingTemplate.send(AbstractMessageSendingTemplate.java:108)     在   org.springframework.integration.endpoint.MessageProducerSupport.sendMessage(MessageProducerSupport.java:203)     在   org.springframework.integration.kafka.inbound.KafkaMessageDrivenChannelAdapter.access $ 300(KafkaMessageDrivenChannelAdapter.java:70)     在   org.springframework.integration.kafka.inbound.KafkaMessageDrivenChannelAdapter $ IntegrationRecordMessageListener.onMessage(KafkaMessageDrivenChannelAdapter.java:387)     在   org.springframework.integration.kafka.inbound.KafkaMessageDrivenChannelAdapter $ IntegrationRecordMessageListener.onMessage(KafkaMessageDrivenChannelAdapter.java:364)     在   org.springframework.kafka.listener.KafkaMessageListenerContainer $ ListenerConsumer.doInvokeRecordListener(KafkaMessageListenerContainer.java:1071)     在   org.springframework.kafka.listener.KafkaMessageListenerContainer $ ListenerConsumer.doInvokeWithRecords(KafkaMessageListenerContainer.java:1051)     在   org.springframework.kafka.listener.KafkaMessageListenerContainer $ ListenerConsumer.invokeRecordListener(KafkaMessageListenerContainer.java:998)     在   org.springframework.kafka.listener.KafkaMessageListenerContainer $ ListenerConsumer.invokeListener(KafkaMessageListenerContainer.java:866)     在   org.springframework.kafka.listener.KafkaMessageListenerContainer $ ListenerConsumer.run(KafkaMessageListenerContainer.java:724)     在   java.util.concurrent.Executors $ RunnableAdapter.call(Executors.java:511)     在java.util.concurrent.FutureTask.run(FutureTask.java:266)在   java.lang.Thread.run(Thread.java:748)“ message =”花费时间   处理消息为2265ms。对于orderId = 6207168,templateName =   TERMS_CONDITIONS,sessionId = 77337650,storeId =1892。errorMessage =   {}“

我怀疑包装器方法可能是罪魁祸首,因为我将记录器实例从发生异常的类传递给包装器方法。我不确定。感谢您的帮助,谢谢。

0 个答案:

没有答案