remove handler
Browse files- .gitignore +2 -1
- __pycache__/handler.cpython-38.pyc +0 -0
- handler.py +2 -1
.gitignore
CHANGED
@@ -1,3 +1,4 @@
|
|
1 |
test_handler.py
|
2 |
audio1.wav
|
3 |
-
test_online_inference.py
|
|
|
|
1 |
test_handler.py
|
2 |
audio1.wav
|
3 |
+
test_online_inference.py
|
4 |
+
handler_tmp.py
|
__pycache__/handler.cpython-38.pyc
CHANGED
Binary files a/__pycache__/handler.cpython-38.pyc and b/__pycache__/handler.cpython-38.pyc differ
|
|
handler.py
CHANGED
@@ -42,7 +42,8 @@ class EndpointHandler():
|
|
42 |
"""
|
43 |
|
44 |
inputs = data.pop("inputs", data)
|
45 |
-
with torch.cuda.amp.autocast():
|
|
|
46 |
prediction = self.pipeline(inputs, generate_kwargs={"forced_decoder_ids": self.forced_decoder_ids}, max_new_tokens=255)
|
47 |
# prediction = self.pipeline(inputs, return_timestamps=False)
|
48 |
prediction['text'] = prediction['text'] + '????'
|
|
|
42 |
"""
|
43 |
|
44 |
inputs = data.pop("inputs", data)
|
45 |
+
# with torch.cuda.amp.autocast():
|
46 |
+
with torch.no_grad():
|
47 |
prediction = self.pipeline(inputs, generate_kwargs={"forced_decoder_ids": self.forced_decoder_ids}, max_new_tokens=255)
|
48 |
# prediction = self.pipeline(inputs, return_timestamps=False)
|
49 |
prediction['text'] = prediction['text'] + '????'
|