Darka001 commited on
Commit
61edd3a
·
verified ·
1 Parent(s): 97d9b1c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -64,7 +64,7 @@ if compute_dtype == torch.float16 and use_4bit:
64
 
65
  model = AutoModelForCausalLM.from_pretrained(
66
  model_name, quantization_config=bnb_config)
67
- stop_list = [" \n\nAnswer:", " \n\n"]
68
  stop_token_ids = [tokenizer(x, return_tensors='pt', add_special_tokens=False)['input_ids'] for x in stop_list]
69
  stop_token_ids = [torch.LongTensor(x).to("cuda") for x in stop_token_ids]
70
 
@@ -97,8 +97,8 @@ db3 = Chroma(persist_directory="chroma/", embedding_function=instructor_embeddin
97
 
98
 
99
 
100
- retriever = db3.as_retriever(search_type="similarity_score_threshold",
101
- search_kwargs={"score_threshold": .8,
102
  "k": 20})
103
 
104
  #retriever = db3.as_retriever(search_kwargs={"k":15})
 
64
 
65
  model = AutoModelForCausalLM.from_pretrained(
66
  model_name, quantization_config=bnb_config)
67
+ stop_list = [" \n\nAnswer:", " \n ", " \n\n"]
68
  stop_token_ids = [tokenizer(x, return_tensors='pt', add_special_tokens=False)['input_ids'] for x in stop_list]
69
  stop_token_ids = [torch.LongTensor(x).to("cuda") for x in stop_token_ids]
70
 
 
97
 
98
 
99
 
100
+ retriever = db3.as_retriever(search_type="mmr",
101
+ search_kwargs={
102
  "k": 20})
103
 
104
  #retriever = db3.as_retriever(search_kwargs={"k":15})