How to use custom system prompts in LMEval
Feb 21, 2025 - ⧖ 2 minWe start by creating a DataScienceCluster
.
Then, we deploy an LMEvalJob
that uses a custom system prompt.
apiVersion: trustyai.opendatahub.io/v1alpha1
kind: LMEvalJob
metadata:
name: custom-card-template
namespace: tas
spec:
allowOnline: true
allowCodeExecution: true
model: hf
modelArgs:
- name: pretrained
value: google/flan-t5-base
taskList:
taskRecipes:
- template:
ref: tp_0
systemPrompt:
ref: sp_0
card:
name: "cards.wnli"
custom:
templates:
- name: tp_0
value: |
{
"__type__": "input_output_template",
"input_format": "{text_a_type}: {text_a}\n{text_b_type}: {text_b}",
"output_format": "{label}",
"target_prefix": "The {type_of_relation} class is ",
"instruction": "Given a {text_a_type} and {text_b_type} classify the {type_of_relation} of the {text_b_type} to one of {classes}.",
"postprocessors": [
"processors.take_first_non_empty_line",
"processors.lower_case_till_punc"
]
}
systemPrompts:
- name: sp_0
value: "Be concise. At every point give the shortest acceptable answer."
logSamples: true