BigSnarf blog

Infosec FTW

Redis Analytics

linkedHashMap

I LOVE LUCY Chocolate Factory

Scala Redis LinkedHashMap implementation


/*
* Class implements a queue/cache that holds key/value for 1000 iterations and flushes
* key/value to Redis when it has been updated 1000 times.
* Uses LinkedHashMap functionality with counter on each key to flush on 1000th iteration
import com.redis._
val r = new RedisClient("localhost", 6379)
val items = Map("foo" -> "bar", "yin" -> "yang", "One" -> "1", "some_key" -> "some_value")
*/
object RedisLinkedHashMap {
import com.redis._
val capacity = 5000 //keys
val flush_interval = 1000 //flush on every 1000th update
// if the JVM gets a sys call to shutdown, this spawns a new thread to flush all the keys to Redis
Runtime.getRuntime.addShutdownHook(new Thread { override def run { flushAll } })
def uuid = java.util.UUID.randomUUID.toString
def _update(key: String, value: String) {
val setKey = r.hset("map_collection_queue", key, value)
var count = r.zincrby("flush_counter", 1, key)
println(key)
println(value)
println(count)
}
def getOrUpdate(key: String, value: String) {
var result = r.hget("map_collection_queue", key)
if (result.isEmpty) {
println("new entry")
_update(key, value)
}
else {
val check = r.zrank("flush_counter", key).get
if (r.zrank("flush_counter", key).isEmpty) {
update(key, value)
}
else {
val zValue = r.zscore("flush_counter", key).get
if (zValue == 10.0) {
println("flushing")
flush(key)
update(key, value)
}
else {
update(key, value)
}
}
}
}
def getMap: Map[String,String] = {
val ordered_keys = r.zrange("flush_counter", 0, -1)
val mapResult = ordered_keys.get.map { key =>
val value = r.hget("map_collection_queue", key).get
(key -> value)
}.toMap
mapResult
}
def getMapCounter: Map[String,Double] = {
val orderedKeys = r.zrange("flush_counter", 0, -1)
val listResult = orderedKeys.get.map { zKey =>
if (r.zrank("flush_counter", zKey).isEmpty) {
val zValue = 0
}
else {
val zValue = r.zscore("flush_counter", zKey).get
(zKey -> zValue)
}
}
val converted = for ( (x,y) <- listResult ) yield (x.toString -> y.toString.toDouble)
converted.toMap
}
def bulk(itemz: Map[String,String]){
// put items into 3 data structures in Redis
itemz.map({
case (key,value) => {
var count = r.incr("hash_counter").get.toDouble // where in the count is the next element going to be in
val setKey = r.hset("map_collection_queue", key, value) // generic unordered map
val setOrder = r.zadd("hash_order", count, key)
}
})
}
def flush(key: String) {
val deleteH = r.hdel("map_collection_queue", key)
val deleteZ = r.zrem("flush_counter", key)
println("flushing " + key)
}
def flushAll() {}
}

view raw

gistfile1.txt

hosted with ❤ by GitHub


/*
* Class implements a queue/cache that holds key/value for 1000 iterations and flushes
* key/value to Redis when it has been updated 1000 times.
* Uses LinkedHashMap functionality with counter on each key to flush on 1000th iteration
import com.redis._
val r = new RedisClient("localhost", 6379)
val items = Map("foo" -> "bar", "yin" -> "yang", "One" -> "1", "some_key" -> "some_value")
*/
object RedisLinkedHashMap {
import com.redis._
val capacity = 5000 //keys
val flush_interval = 1000 //flush on every 1000th update
// if the JVM gets a sys call to shutdown, this spawns a new thread to flush all the keys to Redis
Runtime.getRuntime.addShutdownHook(new Thread { override def run { flushAll } })
def uuid = java.util.UUID.randomUUID.toString
def _update(key: String, value: String) {
val setKey = r.hset("map_collection_queue", key, value)
var count = r.zincrby("flush_counter", 1, key)
println(key)
println(value)
println(count)
}
def getOrUpdate(key: String, value: String) {
var result = r.hget("map_collection_queue", key)
if (result.isEmpty) {
println("new entry")
_update(key, value)
}
else {
val check = r.zrank("flush_counter", key).get
if (r.zrank("flush_counter", key).isEmpty) {
update(key, value)
}
else {
val zValue = r.zscore("flush_counter", key).get
if (zValue == 10.0) {
println("flushing")
flush(key)
update(key, value)
}
else {
update(key, value)
}
}
}
}
def getMap: Map[String,String] = {
val ordered_keys = r.zrange("flush_counter", 0, -1)
val mapResult = ordered_keys.get.map { key =>
val value = r.hget("map_collection_queue", key).get
(key -> value)
}.toMap
mapResult
}
def getMapCounter: Map[String,Double] = {
val orderedKeys = r.zrange("flush_counter", 0, -1)
val listResult = orderedKeys.get.map { zKey =>
if (r.zrank("flush_counter", zKey).isEmpty) {
val zValue = 0
}
else {
val zValue = r.zscore("flush_counter", zKey).get
(zKey -> zValue)
}
}
val converted = for ( (x,y) <- listResult ) yield (x.toString -> y.toString.toDouble)
converted.toMap
}
def bulk(itemz: Map[String,String]){
// put items into 3 data structures in Redis
itemz.map({
case (key,value) => {
var count = r.incr("hash_counter").get.toDouble // where in the count is the next element going to be in
val setKey = r.hset("map_collection_queue", key, value) // generic unordered map
val setOrder = r.zadd("hash_order", count, key)
}
})
}
def flush(key: String) {
val deleteH = r.hdel("map_collection_queue", key)
val deleteZ = r.zrem("flush_counter", key)
println("flushing " + key)
}
def flushAll() {}
}

view raw

gistfile1.txt

hosted with ❤ by GitHub

 

Links

Leave a Reply

Fill in your details below or click an icon to log in:

WordPress.com Logo

You are commenting using your WordPress.com account. Log Out /  Change )

Google photo

You are commenting using your Google account. Log Out /  Change )

Twitter picture

You are commenting using your Twitter account. Log Out /  Change )

Facebook photo

You are commenting using your Facebook account. Log Out /  Change )

Connecting to %s

%d bloggers like this: