You are viewing a single comment's thread. Return to all comments →
Very nice solution. Thanks! Here is a c++ version:
vector<string> tokenize(string word){ int n = word.length(); vector<string> tokens (n); for (int i=0; i<n; i++){ tokens[i]=word.substr(0,i+1); } return tokens; } int main(){ unordered_map<string,int> contact_indices; int n; cin >> n; for(int a0 = 0; a0 < n; a0++){ string op; string contact; cin >> op >> contact; if (op=="add"){ vector<string> tokens = tokenize(contact); for (string token: tokens){ contact_indices[token]++; } } if (op=="find"){ cout << contact_indices[contact] << endl; } } return 0; }
Tries: Contacts
You are viewing a single comment's thread. Return to all comments →
Very nice solution. Thanks! Here is a c++ version: