chore: switch to chatgpt #4

Merged
ssdd merged 2 commits from chore/switch-to-chatgpt into main 2024-10-13 14:24:07 +00:00
3 changed files with 23 additions and 10 deletions

@ -1,13 +1,28 @@
Based on the attached text/link below and answer questions accordingly after I write "question:".
Based on the attached markdown/link below and answer questions accordingly after I write "question:".
And you should focus on mentioning link for the source if possible.
Search the markdown below and answer according to that and do not entertain unethical questions.
Be specific and to the point while answering the questions.
Before answering,
refer to the markdown in the website and check if any keyword matches it else say that you can not answer that question.
Make sure that you are not answering any unethical questions.
If there are link paths like `04_HandlingData/` then add a prefix of
`https://icds-docs.readthedocs.io/en/latest/` so it becomes `https://icds-docs.readthedocs.io/en/latest/04_HandlingData/`.
Try to answer in brief and do not provide any irrelevant information until the question is asked.
Lastly, do not provide any images, just respond in text in your own words.
text:
markdown:
{{input}}
question:
{{question}}
Make sure to respond in markdown format.
Make sure to respond in json which contains content in markdown format format.
The response format must be {"response": "your response"}.

@ -18,7 +18,8 @@ pub struct Question<'a> {
impl<'a> Question<'a> {
pub fn process(qry: &str, md: &str) -> String {
let qry = BASE.replace("{{question}}", qry);
let qry = format!("{} on the text above", qry);
let qry = BASE.replace("{{question}}", &qry);
let qry = qry.replace("{{input}}", md);
qry
}
@ -67,11 +68,11 @@ impl Wizard {
config = config.with_auth_resolver(AuthResolver::from_key_value(key));
}
let adapter_kind = AdapterKind::from_model(model.as_str()).unwrap_or(AdapterKind::Ollama);
let adapter_kind = AdapterKind::from_model(model.as_str()).unwrap_or(AdapterKind::OpenAI);
let chat_options = ChatOptions::default()
.with_json_mode(true)
.with_temperature(0.0);
.with_temperature(1.0);
Self {
client: Client::builder()

@ -29,10 +29,7 @@ pub async fn run() -> anyhow::Result<()> {
let query = Question::process(&md, &query);
let question = Question::new(&md, &query);
let wizard = Wizard::new(
"gemini-1.5-flash-latest".to_string(),
rt.env.get_env("API_KEY"),
);
let wizard = Wizard::new("gpt-4o-mini".to_string(), rt.env.get_env("API_KEY"));
tracing::info!("Warming up!");
let _ans = wizard.ask(question).await?;
tracing::info!("Warmup complete.");