Add task moduel, and pipline the task and every parser (#49)
This commit is contained in:
@@ -67,4 +67,6 @@ def tokenize(d, t, eng):
|
||||
d["content_ltks"] = " ".join([stemmer.stem(w) for w in word_tokenize(t)])
|
||||
else:
|
||||
d["content_ltks"] = huqie.qie(t)
|
||||
d["content_sm_ltks"] = huqie.qieqie(d["content_ltks"])
|
||||
d["content_sm_ltks"] = huqie.qieqie(d["content_ltks"])
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user