Spaces:
Sleeping
Sleeping
use tokenizer fix only for str + remote code
Browse files- ShaderEval.py +4 -3
ShaderEval.py
CHANGED
@@ -76,9 +76,10 @@ class ReturnGenerationEvaluator(evaluate.TextGenerationEvaluator):
|
|
76 |
or isinstance(model_or_pipeline, transformers.PreTrainedModel)
|
77 |
or isinstance(model_or_pipeline, transformers.TFPreTrainedModel)
|
78 |
):
|
79 |
-
|
80 |
-
|
81 |
-
|
|
|
82 |
pipe = pipeline(
|
83 |
self.task,
|
84 |
model=model_or_pipeline,
|
|
|
76 |
or isinstance(model_or_pipeline, transformers.PreTrainedModel)
|
77 |
or isinstance(model_or_pipeline, transformers.TFPreTrainedModel)
|
78 |
):
|
79 |
+
if isinstance(model_or_pipeline, str):
|
80 |
+
# load tokenizer manually, since the pipeline does fail to do so at times. needed for bigcode/santacoder for example.
|
81 |
+
tokenizer = AutoTokenizer.from_pretrained(model_or_pipeline, trust_remote_code=True)
|
82 |
+
|
83 |
pipe = pipeline(
|
84 |
self.task,
|
85 |
model=model_or_pipeline,
|