十一聚会,某谈及人生理想。我要的是“地位,身份和爱情!”,其实就是金钱,面子和美女。O.O

正文

9月份,连续两天(AB 两天)线上出现业务故障,redis 监控曲线瞬间上涨。

业务场景:一千万 UV / 日

redis监控曲线

(修改图片好麻烦,曲线意会下吧~):

redis-clients-num
redis-command
redis-key
redis-read
redis-write

redis日志

Screen Shot 2015-10-08 at 12.49.37

业务代码

<?php
(self::$_debug) ? Helper_Log::writeApplog('newFeatureOfMixedFeed', "初始化配比数据.") : null;
if (self::$_redisDb == null) {
    self::$_redisDb = new Comm_Redisdb(self::REDIS_SERVER);
}
/** 判断是否有缓存数据 **/
$total = self::$_redisDb->lSize(Conf_Sailmixfeed::REDIS_CONF_RATIO_KEY, array());
if ($total) {
    /** 有缓存数据**/
    $tmp = self::$_redisDb->lRange(self::REDIS_CONF_RATIO_KEY, array(), 0, 0);
    self::$_ratio = $tmp[0];
    (self::$_debug) ? Helper_Log::writeApplog('newFeatureOfMixedFeed', "有配比的Redis缓存数据:" . var_export(self::$_ratio, true)) : null;
} else {
    (self::$_debug) ? Helper_Log::writeApplog('newFeatureOfMixedFeed', "Redis中无配比数据") : null;
    /** 无缓存数据 => 设置缓存 **/
    /** 读库 **/
    $results = ModelMixed_Feeds_Ratio::getList(array('where' => '1'));
    if ($results && is_array($results)) {
        foreach ($results as $result) {
            self::$_ratio[intval($result['category']) ] = intval($result['ratio']) / 100;
        }
    }
    if (!isset(self::$_ratio) || !is_array(self::$_ratio)) {
        (self::$_debug) ? Helper_Log::writeApplog('newFeatureOfMixedFeed', "读库失败的情况下 => 采用默认的配比") : null;
        /** 读库失败的情况下 => 采用默认的配比 **/
        self::$_ratio[self::RATIO_24HOUR] = self::PERCENT_24HOUR;
        self::$_ratio[self::RATIO_HOUR] = self::PERCENT_HOUR;
        self::$_ratio[self::RATIO_LOCAL] = self::PERCENT_LOCAL;
        self::$_ratio[self::RATIO_REC] = self::PERCENT_REC;
        self::$_ratio[self::RATIO_TAG] = self::PERCENT_TAG;
        self::$_ratio[self::RATIO_UVE] = self::PERCENT_UVE;
    }
    (self::$_debug) ? Helper_Log::writeApplog('newFeatureOfMixedFeed', "设置配比的Redis缓存数据:" . var_export(array(0 => self::$_ratio), true)) : null;
    /** 设置缓存 **/
    self::$_redisDb->del(Conf_Sailmixfeed::REDIS_CONF_RATIO_KEY, array());
    self::$_redisDb->rPush(Conf_Sailmixfeed::REDIS_CONF_RATIO_KEY, array(), array(0 => self::$_ratio));
}

 

问题

由业务代码和redis日志可以看出,正常情况下通常会有一次删和一次写之后,会出现大量的读。然而,日志中则出现了大量的写入。这就是高并发时导致同时写入后,也许因为写入量特大,导致下次读取失败后继续进行写入,最后把 redis 拖垮。

解决

  1. 如果不需要进行排序截取等操作可以换用 Memcached 缓存
  2. 在高并发情况下应对 redis 加事务机制

redis事务

<?php
$redis_db->watch(Conf_Sailmixfeed2::REDIS_CONF_KEY, $args);
$redis_db->multi();
$redis_db->del(Conf_Sailmixfeed2::REDIS_CONF_KEY, $args);
$ss = $redis_db->rPush(Conf_Sailmixfeed2::REDIS_CONF_KEY, $args, $mix_data);
$redis_db->exec();

修改后的业务代码

<?php
/** 直接读静态变量 **/
if (isset(self::$_ratio) && !empty(self::$_ratio)) return self::$_ratio;
/** 初始化数据 **/
$mc = Comm_Mc::init(Comm_Mc::BASIC);
$mc->setUseL0(false);
self::log("初始化配比数据.");
/** 判断是否有缓存数据 **/
$results = $mc->getData(self::REDIS_CONF_RATIO_KEY);
if ($results) {
    /** 有缓存的情况 **/
    self::$_ratio = $results;
    self::log("有缓存的情况:" . var_export(self::$_ratio, true));
} else {
    /** 无缓存的情况 **/
    /** 读库 **/
    $_results = ModelMixed_Feeds_Ratio::getList(array('where' => '1'));
    if ($_results && is_array($_results)) {
        foreach ($_results as $_result) {
            self::$_ratio[intval($_result['category']) ] = intval($_result['ratio']) / 100;
        }
    }
    /** 读库失败的情况 **/
    if (count(self::$_ratio) == 0) {
        /** 读库失败的情况下 => 采用默认的配比 **/
        self::$_ratio[self::RATIO_24HOUR] = self::PERCENT_24HOUR;
        self::$_ratio[self::RATIO_HOUR] = self::PERCENT_HOUR;
        self::$_ratio[self::RATIO_LOCAL] = self::PERCENT_LOCAL;
        self::$_ratio[self::RATIO_REC] = self::PERCENT_REC;
        self::$_ratio[self::RATIO_TAG] = self::PERCENT_TAG;
        self::$_ratio[self::RATIO_UVE] = self::PERCENT_UVE;
        self::log('newFeatureOfMixedFeed', "读库失败的情况下 => 采用默认的配比");
    }
    $mc->setData(self::REDIS_CONF_RATIO_KEY, array(), self::$_ratio);
    self::log("无缓存的情况: " . var_export(self::$_ratio, true));
}
return self::$_ratio;

注:封面配图为朋友圈图片

Share:

Leave a Reply

Your email address will not be published. Required fields are marked *

This site uses Akismet to reduce spam. Learn how your comment data is processed.