使用 reqwest 请求 deepseek api,感受下使用基础请求连接 LLM 需要完成哪些工作
#[tokio::main]
async fn main() -> Result<(), anyhow::Error> {
dotenvy::dotenv().ok();
let api_key = std::env::var("DEEPSEEK_API_KEY").expect("DEEPSEEK_API_KEY must be set");
let api_url = std::env::var("DEEPSEEK_API_URL")
.unwrap_or_else(|_| "https://api.deepseek.com/v1/chat/completions".to_string());
let body = serde_json::json!({
"model": "deepseek-v4-flash",
"messages": [{"role": "user", "content": "你好,你是?"}],
"stream": false
});
let response = reqwest::Client::new()
.post(api_url)
.header("Authorization", format!("Bearer {}", api_key))
.header("Content-Type", "application/json")
.body(body.to_string())
.send()
.await?;
let text = response.text().await?;
let body: serde_json::Value = serde_json::from_str(&text).unwrap();
println!("{:#?}", body);
Ok(())
}
使用 async-openai sdk 来完成请求 LLM
use async_openai::{Client,types::chat::{ChatCompletionRequestUserMessage,CreateChatCompletionRequestArgs},};
#[tokio::main]
async fn main() -> anyhow::Result<()> {
dotenvy::dotenv().ok();
let client = Client::new();
let request = CreateChatCompletionRequestArgs::default()
.model("gpt-5.4-mini")
.messages(vec![
ChatCompletionRequestUserMessage::from("你好,你是?").into(),
])
.build()?;
let response = client.chat().create(request).await?;
println!("{response:#?}");
Ok(())
}
使用 responses 的话,代码如下
[Chrome] 我的 chrome 还停留在 114 版本,今天发现一个网站无法下载文件,最终发现是 chrome 版本太老了
再开吐槽帖 z.ai平台的agent条件真的不适合用来 coding
use async_openai::{Client, types::responses::CreateResponseArgs};
#[tokio::main]
async fn main() -> anyhow::Result<()> {
dotenvy::dotenv().ok();
let client = Client::new();
let request = CreateResponseArgs::default()
.model("gpt-5.4-mini")
.input("你好,你是?")
.build()?;
let response = client.responses().create(request).await?;
println!("{response:#?}");
Ok(())
}
1 个帖子 - 1 位参与者