/* LeetCode 146. LRU Cache Here's a focused 1-hour Bloomberg-style HackerRank interview simulation for LRU Cache (LeetCode 146 equivalent) in C++. I'll act as the interviewer. We'll go through explanation, coding, edge cases, and follow-ups exactly as in a real 60-minute session. Interviewer (me): Good morning. Today we'll design and implement an LRU Cache (Least Recently Used Cache). Problem Statement (5 minutes): Design a data structure that follows the constraints of a Least Recently Used (LRU) cache. Implement the LRUCache class: LRUCache(int capacity): Initialize the LRU cache with positive integer capacity. int get(int key): Return the value of the key if the key exists in the cache, otherwise return -1. void put(int key, int value): Update the value of the key if the key exists. Otherwise, add the key-value pair to the cache. If the number of keys exceeds the capacity, evict the least recently used key. Constraints: All operations (get and put) must run in O(1) average time complexity. 1 ≤ capacity ≤ 3000 0 ≤ key ≤ 10^4 0 ≤ value ≤ 10^9 At most 2 * 10^5 calls will be made to get and put. Example: textLRUCache lRUCache = new LRUCache(2); lRUCache.put(1, 1); // cache is {1=1} lRUCache.put(2, 2); // cache is {1=1, 2=2} lRUCache.get(1); // return 1 lRUCache.put(3, 3); // LRU key was 2, evicts key 2, cache is {1=1, 3=3} lRUCache.get(2); // returns -1 (not found) lRUCache.put(4, 4); // LRU key was 1, evicts key 1, cache is {3=3, 4=4} lRUCache.get(1); // return -1 (not found) lRUCache.get(3); // return 3 lRUCache.get(4); // return 4 Optimal approach (what Bloomberg expects): std::unordered_map for O(1) key → node access. Doubly Linked List (manual nodes with prev/next) to maintain usage order: Head (dummy) → Most Recently Used (MRU) Tail (dummy) → Least Recently Used (LRU) On get or put (if key exists): move the node to the head (right after dummy head) in O(1) using prev/next pointers. On put when full: remove the node before dummy tail, erase from map. This gives O(1) for get/put. Space: O(capacity) */ #include #include using namespace std; struct Node { int key; int value; Node* prev; Node* next; Node(int k = 0, int v = 0) : key(k), value(v) , prev(nullptr), next(nullptr) { } }; class LRUCache { public: unordered_map nodeCache; Node* head; Node* tail; size_t capacity; // Helper: remove node from doubly linked list (O(1)) void removeNode(Node* node) { // prev node next node // ---- <- ---- <- ---- // | | -> |node| -> | | // ---- ---- ---- node->prev->next = node->next; node->next->prev = node->prev; } // Helper: add node right after head (most recent) void addToFront(Node* newNode) { // dummy // ---- <- ---- <- ---- -> // |head| -> |new | -> |prior| // ---- ---- ---- Node* prior = head->next; prior->prev = newNode; newNode->next = prior; newNode->prev = head; head->next = newNode; } // Helper: move existing node to front void moveToFront(Node* node) { removeNode(node); addToFront(node); } public: LRUCache(size_t capacity) : capacity(capacity) { head = new Node(); tail = new Node(); head->next = tail; tail->prev = head; } // Return the value of the key if the key exists in the cache, otherwise return -1. int get(int key) { if (nodeCache.find(key) == nodeCache.end()) return -1; Node* node = nodeCache[key]; moveToFront(node); return node->value; } // Update the value of the key if the key exists. Otherwise, add the key-value pair to the // cache. If the number of keys exceeds the capacity, evict the least recently used key. void put(int key, int value) { // update if exists auto search = nodeCache.find(key); if (search != nodeCache.end()) // key exists, update and make node MRU { Node* node = search->second; node->value = value; moveToFront(node); } else // add key-value pair, check capacity and evict lru key if needed { if (nodeCache.size() == capacity) { Node* lruNode = tail->prev; removeNode(lruNode); // remove from map and delete nodeCache.erase(lruNode->key); delete lruNode; } // add Node* node = new Node(key, value); nodeCache[key] = node; addToFront(node); } } ~LRUCache() { // delete nodes } }; void printCache(LRUCache* cache) { for (const auto& [key, node]: cache->nodeCache) cout << format(" key: {} node: {} value: {}\n", key, static_cast(node), node->value); } int main(void) { vector, vector>, vector> >> testCases = { { {"LRUCache", "put", "put", "get", "put", "get", "put", "get", "get", "get"}, { {2,0}, {1,1}, {2,2}, {1,0}, {3,3}, {2,0}, {4,4}, {1,0}, {3,0}, {4,0}}, // output { {2}, // init cache capacity 2 {1,1}, // put 1,1 -> map 1,1 {2,2,1,1}, // put 2,2 -> map 1,1 2,2 {1}, // get 1 -> 1 {3,3,1,1}, // put 3,3 -> evict key 2, cache is 1,1 3,3 {-1}, // get 2 -> -1 not found {4,4,3,3}, // put 4,4 -> evict key 1, cache is 3,3 4,4 {-1}, // get 1 -> -1 {3}, // get 3 -> 3 {4} // get 4 -> 4 } }, }; [[maybe_unused]] LRUCache* cache; for (auto& tc: testCases) { size_t n = get<0>(tc).size(); for (size_t i = 0; i < n; i++) { vector commands = get<0>(tc); vector> inputs = get<1>(tc); vector> expected = get<2>(tc); if (commands.at(i) == "LRUCache") { size_t capacity = inputs.at(i).first; cout << "LRUCache capacity=" << capacity << endl; cache = new LRUCache(capacity); printCache(cache); vector result{(int)cache->capacity}; EXPECT_EQ(expected[i], result); } else if (commands.at(i) == "put") { int key = inputs.at(i).first; int value = inputs.at(i).second; cout << "put(" << key << "," << value << ")" << endl; cache->put(key,value); printCache(cache); vector result; for (auto& [k,node] : cache->nodeCache) { result.push_back(k); result.push_back(node->value); } EXPECT_EQ(expected[i], result); } else if (commands.at(i) == "get") { int key = inputs.at(i).first; cout << "get(" << key << ") --> "; vector result; result.push_back(cache->get(key)); cout << result[0] << endl; printCache(cache); EXPECT_EQ(expected[i], result); } else cout << "Error\n"; } } return 0; }