项目作者: IMSEONGJUN

项目描述 :
A Custom Swift Cache Object using LRU Algorithm
高级语言: Swift
项目地址: git://github.com/IMSEONGJUN/SwiftyLRUCache.git
创建时间: 2020-09-05T17:15:53Z
项目社区:https://github.com/IMSEONGJUN/SwiftyLRUCache

开源协议:

下载


SwiftyLRUCache

  • Custom Swift Cache using LRU Algorithm
  • Used Double Linked-list and Hash Map data structure
  • Hash Map(Dictionary) is for fast search to find a Node
  • Double Linked-list is a storage for more efficient insertion than Array and fast deletion in the middle of Nodes
  • Most important reason to use Double Linked-list is to find least recently used Node, that is to say the tail Node in Double Linked-list
  1. public final class SwiftyLRUCache<Key: Hashable, Value> where Key: Comparable {
  2. /// configured with Double Linked-list.
  3. private class ListNode {
  4. var key: Key?
  5. var value: Value?
  6. var prevNode: ListNode?
  7. var nextNode: ListNode?
  8. init(key: Key? = nil, value: Value? = nil) {
  9. self.key = key
  10. self.value = value
  11. }
  12. }
  13. /// Use Dictionary for fast search.
  14. private var nodeDictionary = [Key: ListNode]()
  15. /// Cache's limit count.
  16. private var capacity = 0
  17. /// head's nextNode is the actual first node in the Double Linked-list.
  18. private var head = ListNode()
  19. /// tail's prevNode is the actual last node in the Double Linked-list.
  20. private var tail = ListNode()
  21. /// initialized with empty Double Linked-list.
  22. public init(capacity: Int) {
  23. self.capacity = capacity
  24. head.nextNode = tail
  25. tail.prevNode = head
  26. }
  27. /// Remove Node in the Double Linked-list.
  28. private func remove(node: ListNode) {
  29. node.prevNode?.nextNode = node.nextNode
  30. node.nextNode?.prevNode = node.prevNode
  31. guard let key = node.key else { return }
  32. nodeDictionary.removeValue(forKey: key)
  33. }
  34. /// insertion is always fullfilled on the Head side.
  35. private func insertToHead(node: ListNode) {
  36. head.nextNode?.prevNode = node
  37. node.nextNode = head.nextNode
  38. node.prevNode = head
  39. head.nextNode = node
  40. guard let key = node.key else { return }
  41. nodeDictionary.updateValue(node, forKey: key)
  42. }
  43. /// When the cache hit happen, remove the node what you get and insert to Head side again.
  44. public func getValue(forKey key: Key) -> Value? {
  45. guard let node = nodeDictionary[key] else { return nil }
  46. remove(node: node)
  47. insertToHead(node: node)
  48. return node.value
  49. }
  50. /// Push your value and if there is same value, remove that automatically.
  51. /// if not, and the capacity of LRUCache full, remove Least Recently Used Node and push new node.
  52. public func setValue(value: Value, forKey key: Key) {
  53. let newNode = ListNode(key: key, value: value)
  54. if let oldNode = nodeDictionary[key] {
  55. remove(node: oldNode)
  56. } else if nodeDictionary.count >= capacity,
  57. let tailNode = tail.prevNode {
  58. remove(node: tailNode) // remove Least Recently Used Node
  59. }
  60. insertToHead(node: newNode)
  61. }
  62. /// Print values in Cache sorted by key
  63. public func description() {
  64. let values = nodeDictionary.sorted(by: {$0.0 < $1.0}).map{ $0.value }
  65. values.forEach({
  66. print($0.value.debugDescription)
  67. })
  68. }
  69. }