remove image tokens from chatglm-6b
Browse files- modeling_chatglm.py +1 -1
modeling_chatglm.py
CHANGED
@@ -28,7 +28,7 @@ from transformers.utils import logging
|
|
28 |
from transformers.generation.logits_process import LogitsProcessor
|
29 |
from transformers.generation.utils import LogitsProcessorList, StoppingCriteriaList, GenerationConfig
|
30 |
|
31 |
-
from
|
32 |
|
33 |
# flags required to enable jit fusion kernels
|
34 |
torch._C._jit_set_profiling_mode(False)
|
|
|
28 |
from transformers.generation.logits_process import LogitsProcessor
|
29 |
from transformers.generation.utils import LogitsProcessorList, StoppingCriteriaList, GenerationConfig
|
30 |
|
31 |
+
from configuration_chatglm import ChatGLMConfig
|
32 |
|
33 |
# flags required to enable jit fusion kernels
|
34 |
torch._C._jit_set_profiling_mode(False)
|