There are a vector of skills list and a vector of points of each skill. The problem is to calculate maximum points of a string. The points of the string is sum of the points of matched skills. Let's say skills list is {a, ab, abd, cd} and points of each skill is {1, 5, 2 ,3} and given string is "abdecdab". Then, maximum points is ab(matched) + d(unmatched) + e(unmatched) + cd(matched) + ab(matched) = 5 + 0 + 0 + 3 + 5 = 13. It can be also matched as abd(matched) + e(unmatched) +c(unmatched) + d(unmatched) + ab(matched) or abd(matched) + e(unmatched) +c(unmatched) + d(unmatched) + a(matched) + b(matched) but don't result in maximum point. I thinks it is dynamic programming problem. How to calculate maximum point of given string?
from collections import defaultdict
class TrieNode:
def __init__(self):
self.children = defaultdict(TrieNode)
self.score = 0
class Trie:
def __init__(self):
self.root = TrieNode()
def insert(self, word, score):
node = self.root
for char in word:
node = node.children[char]
node.score = score
def solution(skills, scores, pressed):
dp = list(range(len(pressed),0,-1))
print(dp)
trie = Trie()
for skill, score in zip(skills, scores):
trie.insert(skill, score)
for i in range(len(dp)-1,-1,-1):#7 6 5 4 3 2 1 0
if i < len(dp)-1:
dp[i] = max(dp[i], dp[i + 1] + 1)
node = trie.root
for j in range(i, len(dp)):
if pressed[j] not in node.children:
break
node = node.children[pressed[j]]
if node.score:
if j < len(dp)-1:
dp[i] = max(dp[i], node.score + dp[j + 1])
else:
dp[i] = max(dp[i], node.score)
return dp[0]
skills=["a","ab","abd","cd"]
scores=[1,5,2,3]
pressed="abdecdab"
print(solution(skills,scores,pressed))