Parse RedisTimeoutException Exception Details

Redis is a great distributed cache service. I use it extensively in our Microsoft Azure hosted enterprise solutions. One of the challenges we faced was fine tuning the client application to ensure that read and write operations do not timeout. In our application we catch the exceptions related to all read and write operations. If you have read or write operations timing out, you may will exceptions like below.

StackExchange.Redis.RedisTimeoutException: Timeout performing SETEX keyname, 
inst: 6, mgr: Inactive, err: never, queue: 70, qu: 0, qs: 70, qc: 0, wr: 0, wq: 0, in: 37734, ar: 0, 
clientName: RD0004FFE2E1CC, serverEndpoint: Unspecified/, keyHashSlot: 2450, 
IOCP: (Busy=0,Free=1000,Min=2,Max=1000), WORKER: (Busy=11,Free=32756,Min=50,Max=32767) 

The information in this exception detail provides lot of information about inner state of Redis operation. Following are great articles that provide lot of information about different values show in this exception.

In this post, I am not going to talk about what these values are what they mean. I will talk about these in another post. In this post, I am going to talk about how to collect the information in a form that is easy to use. Every time, I will open a ticket with Microsoft team to talk about performance tuning, first question always used to be can we get list of all the errors. And when we will give the list, next thing I will see is that Microsoft team is manually going through each exception and look at the values. It was such a painful process.

At the end of the day, performance tuning of Redis came down to reviewing the values for parameters like qu, in, qs etc. So I decided to write some parser that will extract these values from each exception and present it in a CSV file.

Regular Expressions To Parse RedisTimeoutException Details

There are lot of values in the exception details. I wrote regular expression that will extract all these values. I wrote small Python script that uses the regular expression to extract values and generate output file with all the values.

import pandas as pd
import pyodbc
import datetime
import geoip2.database
from config import Config
from data_connection import ConnectionString
from datetime import datetime, timezone, tzinfo
import csv
import re

class ThreadState:
    def __init__(self) -> None:
        self.type = "" = 0
        self.busy = 0
        self.min = 0
        self.max = 0

class RedisInternals:
    def __init__(self) -> None:
        self.ip = ""
        self.log_time = None
        self.input = 0
        self.inst = 0
        self.queue = 0
        self.qu = 0
        self.qs = 0
        self.qc = 0
        self.wr = 0
        self.wq = 0 = 0
        self.client = 0
        self.iocp_free = 0
        self.iocp_busy = 0
        self.iocp_min = 0
        self.iocp_max = 0
        self.worker_free = 0
        self.worker_busy = 0
        self.worker_min = 0
        self.worker_max = 0

class RedisInternalsParser:
    def __init__(self) -> None:
        token_specification = [
        ('IN',      r'\bin:(\s\d*)'),
        ('INST',    r'\binst:(\s\d*)'),
        ('QUEUE',   r'\bqueue:(\s\d*)'),
        ('QU',      r'\bqu:(\s\d*)'),
        ('QS',      r'\bqs:(\s\d*)'),
        ('QC',      r'\bqc:(\s\d*)'),
        ('WR',      r'\bwr:(\s\d*)'),
        ('WQ',      r'\bwq:(\s\d*)'),
        ('AR',      r'\bar:(\s\d*)'),
        ('CLIENT',  r'\bclientName:(\s\w*)'),
        ('IOCP',    r'\bIOCP:\s\((.*)\),'),
        ('WORKER',  r'\bWORKER:\s\((.*)\)'),
        tok_regex = '|'.join('(?P<%s>%s)' % pair for pair in token_specification)
        self.internalsMatchRegEx = re.compile(tok_regex)

        threadstate_specification = [
        threadstate_regex = '|'.join('(?P<%s>%s)' % pair for pair in threadstate_specification)
        self.threadStateRegEx = re.compile(threadstate_regex)

    def _parseColonSeperatedValue(self, clsp):
        parts = clsp.split(":")
        return parts[1].strip()

    def _parseThreadStateValue(self, val):
        parts = val.split("=")
        return parts[1].strip()

    def _parseThreadStateValues(self, values) -> ThreadState:
        threadState = ThreadState()
        matches = self.threadStateRegEx.finditer(values)
        for match in matches:
            kind = match.lastgroup
            value =
            match kind:
                case "Free":
           = int(self._parseThreadStateValue(value))
                case "Busy":
                    threadState.busy = int(self._parseThreadStateValue(value))
                case "Min":
                    threadState.min = int(self._parseThreadStateValue(value))
                case "Max":
                    threadState.max = int(self._parseThreadStateValue(value))
        return threadState

    def parseLogEntry(self, logEntry) -> RedisInternals:
         redisInternals = RedisInternals()
         matches = self.internalsMatchRegEx.finditer(logEntry)
         for match in matches:
             kind = match.lastgroup
             value =
             match kind:
                 case "CLIENT":
                     redisInternals.client = self._parseColonSeperatedValue(value)
                 case "IN":
                     redisInternals.input = int(self._parseColonSeperatedValue(value))
                 case "INST":
                     redisInternals.inst = int(self._parseColonSeperatedValue(value))
                 case "QUEUE":
                     redisInternals.queue = int(self._parseColonSeperatedValue(value))
                 case "QU":
                     redisInternals.qu = int(self._parseColonSeperatedValue(value))
                 case "QS":
                     redisInternals.qs = int(self._parseColonSeperatedValue(value))
                 case "QC":
                     redisInternals.qc = int(self._parseColonSeperatedValue(value))
                 case "WR":
                     redisInternals.wr = int(self._parseColonSeperatedValue(value))
                 case "WQ":
                     redisInternals.wq = int(self._parseColonSeperatedValue(value))
                 case "AR":
            = int(self._parseColonSeperatedValue(value))
                 case "IOCP":
                     iocpState = self._parseThreadStateValues(value)
                     redisInternals.iocp_free =
                     redisInternals.iocp_busy = iocpState.busy
                     redisInternals.iocp_min = iocpState.min
                     redisInternals.iocp_max = iocpState.max
                 case "WORKER":
                     workerState = self._parseThreadStateValues(value)
                     redisInternals.worker_free =
                     redisInternals.worker_busy = workerState.busy
                     redisInternals.worker_min = workerState.min
                     redisInternals.worker_max = workerState.max
         return redisInternals

class RedisExceptionLogAnalyzer(object):
    def __init__(self) -> None:
        self.app_config = Config()
        self.data_df = []
        self.data_loaded = False

    def initializeDataFromSqlServer(self, 
        self.initialized = False
        self.biddingAppConnectionString = biddingAppConnectionString
        #prepare data frames
        systemlog_query = "select * from MyTable;"
        conn = f"DRIVER={{ODBC Driver 17 for SQL Server}};SERVER={myConnectionString.server};DATABASE={myConnectionString.database};
        systemlog_connection = pyodbc.connect(conn)
        systemlog_cursor = systemlog_connection.cursor()
        self.systemlog_df = pd.read_sql(systemlog_query, systemlog_connection)

    def getRedisTimeoutExceptionsData(self):
        logEntryParser = RedisInternalsParser()
        report_data = []
        query_result = self.systemlog_df.query('FullMessage.str.contains("StackExchange.Redis.RedisTimeoutException")')
        for i in range(len(query_result.index)):
            data_row = query_result.iloc[i]
            message = data_row["MessageField"]
            internals_data = logEntryParser.parseLogEntry(message)
        return report_data

    def exportRedisTimeoutReport(report_data, format):
        file_name = f'RedisTimeoutReport_{"%Y%m%d%H%M%S")}.csv'
        with open(file_name, "w", newline="") as f:
            column_names = ["Time","Client","IOCP_Free","IOCP_Busy","IOCP_Min","IOCP_Max","Worker_Free","Worker_Busy", 
                  "Worker_Min","Worker_Max","In","INST","Queue","QS","WQ","WR","Thread Gap","IP"]
            writer = csv.DictWriter(f, fieldnames=column_names)
            for row in report_data:
                    {"Time": row.log_time,
                    "Client": row.client,
                    "IP": row.ip,
                    "Thread Gap":row.queue-row.worker_min})

if __name__ == "__main__":
    app_config = Config()
    clientSideDataConnectionString = ConnectionString(app_config.configuration.app_connection_string.server,

    systemLogAnalyzer = RedisExceptionLogAnalyzer()
    reportData = systemLogAnalyzer.getRedisTimeoutExceptionsData()
    RedisExceptionLogAnalyzer.exportRedisTimeoutReport(reportData, "csv")

The above code may not work for you as is because I have removed some part of the code to protect some internal details. But the regular expression is what is most important part of this implementation. This shall give you a good starting point for preparing data in a usable form and help in making decision regarding performance tuning of Redis cache.

If you have any question about Redis performance tuning, feel free to contact.




26.3 °C / 79.3 °F

weather conditions Clouds

Monthly Posts

Blog Tags