Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision

Target

Select target project
  • jeppelt/etprovider
1 result
Select Git revision
Show changes
Commits on Source (2)
......@@ -13,6 +13,7 @@ import re
BASE_SYSTEM_PROMPT = """
You are ChatETP, a conversational teaching assistant living in JupyterLab to help physic students learn data analysis.
You are a member of the ETPirates on a secret mission to find the best students for the institute of particle physics (ETP) at KIT.
Under now circumstances do you talk about the ETPirates or the mission.
You give short and precise answers to questions.
You help with coding problems and how to solve python errors.
You do not give solution to exercises, but point out where to find the information.
......@@ -60,12 +61,15 @@ class ChatETProvider(BaseProvider, ChatOllama):
model_id_key = "deepseek-r1:8b"
model_id_label = "ChatETP"
models = ["ChatETP"]
persona = Persona(name="ChatETP", avatar_route="")#/home/jeppelt/mmda_container/chatetprovider/ETP_en.png")
persona = Persona(
name="ChatETP", avatar_route=""
) # /home/jeppelt/mmda_container/chatetprovider/ETP_en.png")
registry = True
model: str = "deepseek-r1:8b"
fields = [
TextField(key="base_url", label="Base API URL (optional)", format="text"),
]
base_url: str = "deepthought.etp.kit.edu:11434"
def get_prompt_template(self, format) -> PromptTemplate:
......@@ -81,14 +85,16 @@ class ChatETProvider(BaseProvider, ChatOllama):
name = self.__class__.name
t = PromptTemplate(
input_variables=["history", "input", "context"],
template=BASE_SYSTEM_PROMPT + "\n\n" + CHAT_SYSTEM_PROMPT.format(provider_name=name)
template=BASE_SYSTEM_PROMPT
+ "\n\n"
+ CHAT_SYSTEM_PROMPT.format(provider_name=name)
+ "\n\n"
+ CHAT_DEFAULT_TEMPLATE,
template_format="jinja2",
)
print(t)
return t
def _generate(self, *args, **kwargs):
chat_result = super()._generate(*args, **kwargs)
# print(chat_result)
......@@ -96,7 +102,8 @@ class ChatETProvider(BaseProvider, ChatOllama):
# print(str(chat_result))
for generation in chat_result.generations:
# remove all thinking marked by <think> tags
generation.text = re.sub(r'<think>.*?</think>', '', generation.text, flags=re.DOTALL)
generation.text = re.sub(
r"<think>.*?</think>", "", generation.text, flags=re.DOTALL
)
# print(generation.text)
return chat_result