Closed
Description
from llama_cpp import Llama
llm = Llama(model_path="/home/zadmin/.cache/lm-studio/models/TheBloke/MythoMax-L2-13B-GGUF/mythomax-l2-13b.Q8_0.gguf", logits_all=True,chat_format="chatml",n_ctx=10000)
def mytho_extraction():
source_sentence = "That is a happy person"
sentences = [
"That is a very happy dog",
"That is a very happy person",
"Today is a sunny day"
]
user_message_content = f"Source Sentence: {source_sentence}\nSentences to Match: {' | '.join(sentences)}\nPlease provide the sentence from the list which is the best matches the source sentence."
completion = llm.create_chat_completion(
model="local-model",
messages=[
{"role": "system", "content": "Give me Matched sentence with the source sentence"},
{"role": "user", "content": user_message_content}
],
temperature=0.7,
logprobs= True
)
generated_sentence = completion
print(generated_sentence)
if name=="main":
mytho_extraction()