KoichiYasuoka
commited on
Commit
·
b0801ed
1
Parent(s):
9b97257
algorithm improved
Browse files
ud.py
CHANGED
@@ -1,3 +1,4 @@
|
|
|
|
1 |
from transformers import TokenClassificationPipeline
|
2 |
|
3 |
class UniversalDependenciesPipeline(TokenClassificationPipeline):
|
@@ -7,24 +8,25 @@ class UniversalDependenciesPipeline(TokenClassificationPipeline):
|
|
7 |
with torch.no_grad():
|
8 |
e=self.model(input_ids=torch.tensor([v[0:i]+[self.tokenizer.mask_token_id]+v[i+1:]+[j] for i,j in enumerate(v[1:-1],1)],device=self.device))
|
9 |
return {"logits":e.logits[:,1:-2,:],**model_inputs}
|
|
|
|
|
10 |
def postprocess(self,model_outputs,**kwargs):
|
11 |
-
import numpy
|
12 |
if "logits" not in model_outputs:
|
13 |
return "".join(self.postprocess(x,**kwargs) for x in model_outputs)
|
14 |
e=model_outputs["logits"].numpy()
|
15 |
r=[1 if i==0 else -1 if j.endswith("|root") else 0 for i,j in sorted(self.model.config.id2label.items())]
|
16 |
-
e+=numpy.where(numpy.add.outer(numpy.identity(e.shape[0]),r)==0,0
|
17 |
g=self.model.config.label2id["X|_|goeswith"]
|
18 |
r=numpy.tri(e.shape[0])
|
19 |
for i in range(e.shape[0]):
|
20 |
for j in range(i+2,e.shape[1]):
|
21 |
-
r[i,j]=r[i,j-1] if numpy.
|
22 |
-
e[:,:,g]+=numpy.where(r==0,0
|
23 |
-
m,p=numpy.
|
24 |
h=self.chu_liu_edmonds(m)
|
25 |
z=[i for i,j in enumerate(h) if i==j]
|
26 |
if len(z)>1:
|
27 |
-
k,h=z[numpy.
|
28 |
m[:,z]+=[[0 if j in z and (i!=j or i==k) else h for i in z] for j in range(m.shape[0])]
|
29 |
h=self.chu_liu_edmonds(m)
|
30 |
v=[(s,e) for s,e in model_outputs["offset_mapping"][0].tolist() if s<e]
|
@@ -35,14 +37,31 @@ class UniversalDependenciesPipeline(TokenClassificationPipeline):
|
|
35 |
h=[b if i>b else b-1 for a,b in enumerate(h) if i!=a]
|
36 |
v[i-1]=(v[i-1][0],v.pop(i)[1])
|
37 |
q.pop(i)
|
|
|
|
|
|
|
|
|
38 |
t=model_outputs["sentence"].replace("\n"," ")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
39 |
u="# text = "+t+"\n"
|
40 |
for i,(s,e) in enumerate(v):
|
41 |
u+="\t".join([str(i+1),t[s:e],"_",q[i][0],"_","|".join(q[i][1:-1]),str(0 if h[i]==i else h[i]+1),q[i][-1],"_","_" if i+1<len(v) and e<v[i+1][0] else "SpaceAfter=No"])+"\n"
|
42 |
return u+"\n"
|
43 |
def chu_liu_edmonds(self,matrix):
|
44 |
-
|
45 |
-
h=numpy.nanargmax(matrix,axis=0)
|
46 |
x=[-1 if i==j else j for i,j in enumerate(h)]
|
47 |
for b in [lambda x,i,j:-1 if i not in x else x[i],lambda x,i,j:-1 if j<0 else x[j]]:
|
48 |
y=[]
|
@@ -53,10 +72,10 @@ class UniversalDependenciesPipeline(TokenClassificationPipeline):
|
|
53 |
if max(x)<0:
|
54 |
return h
|
55 |
y,x=[i for i,j in enumerate(x) if j==max(x)],[i for i,j in enumerate(x) if j<max(x)]
|
56 |
-
z=matrix-numpy.
|
57 |
-
m=numpy.block([[z[x,:][:,x],numpy.
|
58 |
-
k=[j if i==len(x) else x[j] if j<len(x) else y[numpy.
|
59 |
h=[j if i in y else k[x.index(i)] for i,j in enumerate(h)]
|
60 |
-
i=y[numpy.
|
61 |
h[i]=x[k[-1]] if k[-1]<len(x) else i
|
62 |
return h
|
|
|
1 |
+
import numpy
|
2 |
from transformers import TokenClassificationPipeline
|
3 |
|
4 |
class UniversalDependenciesPipeline(TokenClassificationPipeline):
|
|
|
8 |
with torch.no_grad():
|
9 |
e=self.model(input_ids=torch.tensor([v[0:i]+[self.tokenizer.mask_token_id]+v[i+1:]+[j] for i,j in enumerate(v[1:-1],1)],device=self.device))
|
10 |
return {"logits":e.logits[:,1:-2,:],**model_inputs}
|
11 |
+
def check_model_type(self,supported_models):
|
12 |
+
pass
|
13 |
def postprocess(self,model_outputs,**kwargs):
|
|
|
14 |
if "logits" not in model_outputs:
|
15 |
return "".join(self.postprocess(x,**kwargs) for x in model_outputs)
|
16 |
e=model_outputs["logits"].numpy()
|
17 |
r=[1 if i==0 else -1 if j.endswith("|root") else 0 for i,j in sorted(self.model.config.id2label.items())]
|
18 |
+
e+=numpy.where(numpy.add.outer(numpy.identity(e.shape[0]),r)==0,0,-numpy.inf)
|
19 |
g=self.model.config.label2id["X|_|goeswith"]
|
20 |
r=numpy.tri(e.shape[0])
|
21 |
for i in range(e.shape[0]):
|
22 |
for j in range(i+2,e.shape[1]):
|
23 |
+
r[i,j]=r[i,j-1] if numpy.argmax(e[i,j-1])==g else 1
|
24 |
+
e[:,:,g]+=numpy.where(r==0,0,-numpy.inf)
|
25 |
+
m,p=numpy.max(e,axis=2),numpy.argmax(e,axis=2)
|
26 |
h=self.chu_liu_edmonds(m)
|
27 |
z=[i for i,j in enumerate(h) if i==j]
|
28 |
if len(z)>1:
|
29 |
+
k,h=z[numpy.argmax(m[z,z])],numpy.min(m)-numpy.max(m)
|
30 |
m[:,z]+=[[0 if j in z and (i!=j or i==k) else h for i in z] for j in range(m.shape[0])]
|
31 |
h=self.chu_liu_edmonds(m)
|
32 |
v=[(s,e) for s,e in model_outputs["offset_mapping"][0].tolist() if s<e]
|
|
|
37 |
h=[b if i>b else b-1 for a,b in enumerate(h) if i!=a]
|
38 |
v[i-1]=(v[i-1][0],v.pop(i)[1])
|
39 |
q.pop(i)
|
40 |
+
elif v[i-1][1]>v[i][0]:
|
41 |
+
h=[b if i>b else b-1 for a,b in enumerate(h) if i!=a]
|
42 |
+
v[i-1]=(v[i-1][0],v.pop(i)[1])
|
43 |
+
q.pop(i)
|
44 |
t=model_outputs["sentence"].replace("\n"," ")
|
45 |
+
for i,(s,e) in reversed(list(enumerate(v))):
|
46 |
+
w=t[s:e]
|
47 |
+
if w.startswith(" "):
|
48 |
+
j=len(w)-len(w.lstrip())
|
49 |
+
w=w.lstrip()
|
50 |
+
v[i]=(v[i][0]+j,v[i][1])
|
51 |
+
if w.endswith(" "):
|
52 |
+
j=len(w)-len(w.rstrip())
|
53 |
+
w=w.rstrip()
|
54 |
+
v[i]=(v[i][0],v[i][1]-j)
|
55 |
+
if w.strip()=="":
|
56 |
+
h=[b if i>b else b-1 for a,b in enumerate(h) if i!=a]
|
57 |
+
v.pop(i)
|
58 |
+
q.pop(i)
|
59 |
u="# text = "+t+"\n"
|
60 |
for i,(s,e) in enumerate(v):
|
61 |
u+="\t".join([str(i+1),t[s:e],"_",q[i][0],"_","|".join(q[i][1:-1]),str(0 if h[i]==i else h[i]+1),q[i][-1],"_","_" if i+1<len(v) and e<v[i+1][0] else "SpaceAfter=No"])+"\n"
|
62 |
return u+"\n"
|
63 |
def chu_liu_edmonds(self,matrix):
|
64 |
+
h=numpy.argmax(matrix,axis=0)
|
|
|
65 |
x=[-1 if i==j else j for i,j in enumerate(h)]
|
66 |
for b in [lambda x,i,j:-1 if i not in x else x[i],lambda x,i,j:-1 if j<0 else x[j]]:
|
67 |
y=[]
|
|
|
72 |
if max(x)<0:
|
73 |
return h
|
74 |
y,x=[i for i,j in enumerate(x) if j==max(x)],[i for i,j in enumerate(x) if j<max(x)]
|
75 |
+
z=matrix-numpy.max(matrix,axis=0)
|
76 |
+
m=numpy.block([[z[x,:][:,x],numpy.max(z[x,:][:,y],axis=1).reshape(len(x),1)],[numpy.max(z[y,:][:,x],axis=0),numpy.max(z[y,y])]])
|
77 |
+
k=[j if i==len(x) else x[j] if j<len(x) else y[numpy.argmax(z[y,x[i]])] for i,j in enumerate(self.chu_liu_edmonds(m))]
|
78 |
h=[j if i in y else k[x.index(i)] for i,j in enumerate(h)]
|
79 |
+
i=y[numpy.argmax(z[x[k[-1]],y] if k[-1]<len(x) else z[y,y])]
|
80 |
h[i]=x[k[-1]] if k[-1]<len(x) else i
|
81 |
return h
|