Spaces:
Runtime error
Runtime error
Canstralian
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -7,13 +7,17 @@ model_path = "Canstralian/pentest_ai" # Replace with your model path if needed
|
|
7 |
model = AutoModelForCausalLM.from_pretrained(model_path)
|
8 |
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
9 |
|
|
|
|
|
|
|
10 |
# Function to handle user inputs and generate responses
|
11 |
def generate_text(instruction):
|
12 |
# Encode the input text to token IDs
|
13 |
inputs = tokenizer.encode(instruction, return_tensors='pt', truncation=True, max_length=512)
|
|
|
14 |
|
15 |
# Generate the output text
|
16 |
-
outputs = model.generate(inputs, max_length=150, num_beams=5,
|
17 |
|
18 |
# Decode the output and return the response
|
19 |
output_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
|
|
7 |
model = AutoModelForCausalLM.from_pretrained(model_path)
|
8 |
tokenizer = AutoTokenizer.from_pretrained(model_path)
|
9 |
|
10 |
+
# Confirm successful loading
|
11 |
+
print(f"Model and Tokenizer loaded from {model_path}")
|
12 |
+
|
13 |
# Function to handle user inputs and generate responses
|
14 |
def generate_text(instruction):
|
15 |
# Encode the input text to token IDs
|
16 |
inputs = tokenizer.encode(instruction, return_tensors='pt', truncation=True, max_length=512)
|
17 |
+
print(f"Encoded input: {inputs}")
|
18 |
|
19 |
# Generate the output text
|
20 |
+
outputs = model.generate(inputs, max_length=150, num_beams=5, do_sample=True) # Adjust if needed
|
21 |
|
22 |
# Decode the output and return the response
|
23 |
output_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
|