From 30fb94c2de4c3fd76e28ac841ec1ad1f401963ed Mon Sep 17 00:00:00 2001 From: Yandrik Date: Thu, 2 May 2024 07:57:23 +0200 Subject: [PATCH] fix: switch provider and add zukijourney textgen parameter --- gen_service/main.py | 28 +++++++++++++++++++--------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/gen_service/main.py b/gen_service/main.py index 06278dc..4f1c781 100644 --- a/gen_service/main.py +++ b/gen_service/main.py @@ -80,15 +80,25 @@ Output the prompt in a txt code block.""") -def _gen_text(prompt): - client = get_g4f_client() - response = client.chat.completions.create( - provider=g4f.Provider.PerplexityLabs, - model="llama-3-70b-instruct", - messages=[{"role": "user", "content": prompt}], - max_tokens=800, - temperature=0.7, - ) + +def _gen_text(prompt, use_zukijourney=False): + if use_zukijourney: + client = get_client() + response = client.chat.completions.create( + model="llama-3-70b-instruct", + messages=[{"role": "user", "content": prompt}], + max_tokens=800, + temperature=0.7, + ) + else: + client = get_g4f_client() + response = client.chat.completions.create( + provider=g4f.Provider.Llama, + model="llama-3-70b-instruct", + messages=[{"role": "user", "content": prompt}], + max_tokens=800, + temperature=0.7, + ) print(response) text = response.choices[0].message.content return text