-
Notifications
You must be signed in to change notification settings - Fork 1.7k
Expand file tree
/
Copy pathkth-smallest-path-xor-sum.py
More file actions
95 lines (84 loc) · 3.11 KB
/
kth-smallest-path-xor-sum.py
File metadata and controls
95 lines (84 loc) · 3.11 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
# Time: O(n * (logn)^2 + qlogn)
# Space: O(n + q)
from sortedcontainers import SortedList
# iterative dfs, small-to-large merging, sorted list
class Solution(object):
def kthSmallest(self, par, vals, queries):
"""
:type par: List[int]
:type vals: List[int]
:type queries: List[List[int]]
:rtype: List[int]
"""
def small_to_large_merge(sl1, sl2): # Total Time: O(n * (logn)^2)
if len(sl1) < len(sl2):
sl1, sl2 = sl2, sl1
for x in sl2: # each node is merged at most O(logn) times
if x not in sl1:
sl1.add(x) # each add costs O(logn)
return sl1
def iter_dfs():
sl = [SortedList() for _ in xrange(len(adj))]
result = [-1]*len(queries)
stk = [(1, (0, 0))]
while stk:
step, (u, curr) = stk.pop()
if step == 1:
curr ^= vals[u]
sl[u].add(curr)
stk.append((2, (u, curr)))
for v in reversed(adj[u]):
stk.append((1, (v, curr)))
elif step == 2:
for v in adj[u]:
sl[u] = small_to_large_merge(sl[u], sl[v])
for i in lookup[u]: # Total Time: O(qlogn)
if queries[i][1]-1 < len(sl[u]):
result[i] = sl[u][queries[i][1]-1]
return result
adj = [[] for _ in xrange(len(par))]
for u, p in enumerate(par):
if p != -1:
adj[p].append(u)
lookup = [[] for _ in xrange(len(adj))]
for i, (u, _) in enumerate(queries):
lookup[u].append(i)
return iter_dfs()
# Time: O(n * (logn)^2 + qlogn)
# Space: O(n + q)
from sortedcontainers import SortedList
# dfs, small-to-large merging, sorted list
class Solution2(object):
def kthSmallest(self, par, vals, queries):
"""
:type par: List[int]
:type vals: List[int]
:type queries: List[List[int]]
:rtype: List[int]
"""
def small_to_large_merge(sl1, sl2): # Total Time: O(n * (logn)^2)
if len(sl1) < len(sl2):
sl1, sl2 = sl2, sl1
for x in sl2: # each node is merged at most O(logn) times
if x not in sl1:
sl1.add(x) # each add costs O(logn)
return sl1
def dfs(u, curr):
curr ^= vals[u]
sl = SortedList([curr])
for v in adj[u]:
sl = small_to_large_merge(sl, dfs(v, curr))
for i in lookup[u]: # Total Time: O(qlogn)
if queries[i][1]-1 < len(sl):
result[i] = sl[queries[i][1]-1]
return sl
adj = [[] for _ in xrange(len(par))]
for u, p in enumerate(par):
if p != -1:
adj[p].append(u)
lookup = [[] for _ in xrange(len(adj))]
for i, (u, _) in enumerate(queries):
lookup[u].append(i)
result = [-1]*len(queries)
dfs(0, 0)
return result