Skip to content

Commit

Permalink
[fix] Failed to import gptcache due to missing redis dependency (#522)
Browse files Browse the repository at this point in the history
* [mod] move RedisCacheEviction to a separate module to avoid `import_redis` call

Signed-off-by: Anurag Wagh <a9raag@gmail.com>

* [fix] add `redis_eviction` module

Signed-off-by: Anurag Wagh <a9raag@gmail.com>

* [mod] pylint: disable wrong import position

Signed-off-by: Anurag Wagh <a9raag@gmail.com>

---------

Signed-off-by: Anurag Wagh <a9raag@gmail.com>
  • Loading branch information
a9raag authored Aug 26, 2023
1 parent 790e9f4 commit f9456aa
Show file tree
Hide file tree
Showing 3 changed files with 85 additions and 79 deletions.
77 changes: 0 additions & 77 deletions gptcache/manager/eviction/distributed_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,8 @@
from abc import ABC, abstractmethod
from typing import List

from gptcache.utils import import_redis
from gptcache.manager.eviction.base import EvictionBase

import_redis()
import redis
from redis_om import get_redis_connection


class DistributedEviction(EvictionBase, ABC):
"""
Expand All @@ -29,78 +24,6 @@ def policy(self) -> str:
pass


class RedisCacheEviction(DistributedEviction, ABC):
"""eviction: Distributed Cache Eviction Strategy using Redis.
:param host: the host of redis
:type host: str
:param port: the port of redis
:type port: int
:param policy: eviction strategy policy of redis such as allkeys-lru, volatile-lru, allkeys-random, volatile-random, etc.
refer https://redis.io/docs/reference/eviction/ for more information.
:type policy: str
:param maxsize: the maxsize of cache data
:type maxsize: int
:param on_evict: the function for cleaning the data in the store
:type on_evict: Callable[[List[Any]], None]
:param maxmemory: the maxmemory of redis
:type maxmemory: str
:param global_key_prefix: the global key prefix
:type global_key_prefix: str
:param ttl: the ttl of the cache data
:type ttl: int
:param maxmemory_samples: Number of keys to sample when evicting keys
:type maxmemory_samples: int
:param kwargs: the kwargs
:type kwargs: Any
"""

def __init__(self,
host="localhost",
port=6379,
maxmemory: str = None,
policy: str = None,
global_key_prefix="gptcache",
ttl: int = None,
maxmemory_samples: int = None,
**kwargs):
self._redis = get_redis_connection(host=host, port=port, **kwargs)
if maxmemory:
self._redis.config_set("maxmemory", maxmemory)
if maxmemory_samples:
self._redis.config_set("maxmemory-samples", maxmemory_samples)
if policy:
self._redis.config_set("maxmemory-policy", policy)
self._policy = policy.lower()

self._global_key_prefix = global_key_prefix
self._ttl = ttl

def _create_key(self, key: str) -> str:
return f"{self._global_key_prefix}:evict:{key}"

def put(self, objs: List[str], expire=False):
ttl = self._ttl if expire else None
for key in objs:
self._redis.set(self._create_key(key), "True", ex=ttl)

def get(self, obj: str):

try:
value = self._redis.get(self._create_key(obj))
# update key expire time when accessed
if self._ttl:
self._redis.expire(self._create_key(obj), self._ttl)
return value
except redis.RedisError:
print(f"Error getting key {obj} from cache")
return None

@property
def policy(self) -> str:
return self._policy


class NoOpEviction(EvictionBase):
"""eviction: No Op Eviction Strategy. This is used when Eviction is managed internally
by the Databases such as Redis or memcached and no eviction is required to perform.
Expand Down
4 changes: 2 additions & 2 deletions gptcache/manager/eviction/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,13 +34,13 @@ def get(
)
return eviction_base
if name == "redis":
from gptcache.manager.eviction.distributed_cache import RedisCacheEviction
from gptcache.manager.eviction.redis_eviction import RedisCacheEviction
if policy == "LRU":
policy = None
eviction_base = RedisCacheEviction(policy=policy, **kwargs)
return eviction_base
if name == "no_op_eviction":
from gptcache. manager.eviction.distributed_cache import NoOpEviction
from gptcache.manager.eviction.distributed_cache import NoOpEviction
eviction_base = NoOpEviction()
return eviction_base

Expand Down
83 changes: 83 additions & 0 deletions gptcache/manager/eviction/redis_eviction.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
# pylint: disable=wrong-import-position
from abc import ABC
from typing import List

from gptcache.manager.eviction.distributed_cache import DistributedEviction
from gptcache.utils import import_redis


import_redis()
import redis
from redis_om import get_redis_connection


class RedisCacheEviction(DistributedEviction, ABC):
"""eviction: Distributed Cache Eviction Strategy using Redis.
:param host: the host of redis
:type host: str
:param port: the port of redis
:type port: int
:param policy: eviction strategy policy of redis such as allkeys-lru, volatile-lru, allkeys-random, volatile-random, etc.
refer https://redis.io/docs/reference/eviction/ for more information.
:type policy: str
:param maxsize: the maxsize of cache data
:type maxsize: int
:param on_evict: the function for cleaning the data in the store
:type on_evict: Callable[[List[Any]], None]
:param maxmemory: the maxmemory of redis
:type maxmemory: str
:param global_key_prefix: the global key prefix
:type global_key_prefix: str
:param ttl: the ttl of the cache data
:type ttl: int
:param maxmemory_samples: Number of keys to sample when evicting keys
:type maxmemory_samples: int
:param kwargs: the kwargs
:type kwargs: Any
"""

def __init__(self,
host="localhost",
port=6379,
maxmemory: str = None,
policy: str = None,
global_key_prefix="gptcache",
ttl: int = None,
maxmemory_samples: int = None,
**kwargs):
self._redis = get_redis_connection(host=host, port=port, **kwargs)
if maxmemory:
self._redis.config_set("maxmemory", maxmemory)
if maxmemory_samples:
self._redis.config_set("maxmemory-samples", maxmemory_samples)
if policy:
self._redis.config_set("maxmemory-policy", policy)
self._policy = policy.lower()

self._global_key_prefix = global_key_prefix
self._ttl = ttl

def _create_key(self, key: str) -> str:
return f"{self._global_key_prefix}:evict:{key}"

def put(self, objs: List[str], expire=False):
ttl = self._ttl if expire else None
for key in objs:
self._redis.set(self._create_key(key), "True", ex=ttl)

def get(self, obj: str):

try:
value = self._redis.get(self._create_key(obj))
# update key expire time when accessed
if self._ttl:
self._redis.expire(self._create_key(obj), self._ttl)
return value
except redis.RedisError:
print(f"Error getting key {obj} from cache")
return None

@property
def policy(self) -> str:
return self._policy

0 comments on commit f9456aa

Please sign in to comment.