1#![deny(clippy::cast_possible_truncation, clippy::cast_precision_loss)]
2use candle_core::Device;
3use engine::Engine;
4pub use engine::{
5 BertEmbeddingModel, EngineInstruction, ENGINE_INSTRUCTIONS, TERMINATE_ALL_NEXT_STEP,
6};
7use hf_hub::Cache;
8pub use lora::Ordering;
9pub use pipeline::ModelCategory;
10pub use pipeline::Pipeline;
11#[cfg(feature = "pyo3_macros")]
12use pyo3::exceptions::PyValueError;
13use std::io::BufRead;
14use std::io::BufReader;
15use std::sync::OnceLock;
16use std::time::Instant;
17use std::{
18 cell::RefCell,
19 error::Error,
20 fs::OpenOptions,
21 io::Write,
22 sync::{
23 atomic::{self, AtomicBool, AtomicUsize},
24 Arc, Mutex, RwLock,
25 },
26 thread::{self, JoinHandle},
27 time::{SystemTime, UNIX_EPOCH},
28};
29use tokio::sync::mpsc::{channel, Sender};
30use tracing::info;
31use tracing::warn;
32
33mod cuda;
34mod device_map;
35mod engine;
36mod lora;
37mod model_loader;
38mod ops;
39pub use model_loader::{
40 get_auto_device_map_params, get_model_dtype, get_tgt_non_granular_index, LoaderBuilder,
41};
42mod search;
43
44mod model_selected;
45pub use model_selected::ModelSelected;
46pub use toml_selector::{get_toml_selected_model_device_map_params, get_toml_selected_model_dtype};
47
48mod amoe;
49#[cfg(not(any(all(feature = "cuda", target_family = "unix"), feature = "metal")))]
50mod dummy_paged_attention;
51mod embedding;
52mod gguf;
53pub mod layers;
54mod layers_masker;
55mod layers_utils;
56mod models;
57#[cfg(any(all(feature = "cuda", target_family = "unix"), feature = "metal"))]
58mod paged_attention;
59#[cfg(not(any(all(feature = "cuda", target_family = "unix"), feature = "metal")))]
60use dummy_paged_attention as paged_attention;
61mod attention;
62mod diffusion_models;
63pub mod distributed;
64mod pipeline;
65mod prefix_cacher;
66mod request;
67mod response;
68mod sampler;
69mod scheduler;
70mod sequence;
71mod toml_selector;
72mod tools;
73mod topology;
74mod utils;
75mod vision_models;
76mod xlora_models;
77
78pub use amoe::{AnyMoeConfig, AnyMoeExpertType};
79pub use device_map::{
80 DeviceLayerMapMetadata, DeviceMapMetadata, DeviceMapSetting, LayerDeviceMapper,
81};
82pub use gguf::{GGUFArchitecture, GGUF_MULTI_FILE_DELIMITER};
83pub use mistralrs_quant::{IsqType, MULTI_LORA_DELIMITER};
84pub use paged_attention::{MemoryGpuConfig, PagedAttentionConfig};
85pub use pipeline::{
86 chat_template::ChatTemplate, parse_isq_value, AnyMoeLoader, AnyMoePipeline,
87 AutoDeviceMapParams, DiffusionGenerationParams, DiffusionLoader, DiffusionLoaderBuilder,
88 DiffusionLoaderType, DiffusionSpecificConfig, GGMLLoader, GGMLLoaderBuilder,
89 GGMLSpecificConfig, GGUFLoader, GGUFLoaderBuilder, GGUFSpecificConfig, GemmaLoader,
90 Idefics2Loader, IsqOrganization, LLaVALoader, LLaVANextLoader, LlamaLoader, Loader,
91 LocalModelPaths, MistralLoader, MixtralLoader, ModelKind, ModelPaths, NormalLoader,
92 NormalLoaderBuilder, NormalLoaderType, NormalSpecificConfig, Phi2Loader, Phi3Loader,
93 Phi3VLoader, Qwen2Loader, SpeculativeConfig, SpeculativeLoader, SpeculativePipeline,
94 Starcoder2Loader, TokenSource, VisionLoader, VisionLoaderBuilder, VisionLoaderType,
95 VisionPromptPrefixer, VisionSpecificConfig,
96};
97pub use request::{
98 ApproximateUserLocation, Constraint, DetokenizationRequest, ImageGenerationResponseFormat,
99 LlguidanceGrammar, MessageContent, NormalRequest, Request, RequestMessage, TokenizationRequest,
100 WebSearchOptions, WebSearchUserLocation,
101};
102pub use response::*;
103pub use sampler::{
104 CustomLogitsProcessor, DrySamplingParams, SamplingParams, StopTokens, TopLogprob,
105};
106pub use scheduler::{DefaultSchedulerMethod, SchedulerConfig};
107use serde::Serialize;
108use tokio::runtime::Runtime;
109use toml_selector::{TomlLoaderArgs, TomlSelector};
110pub use tools::{
111 CalledFunction, Function, Tool, ToolCallResponse, ToolCallType, ToolChoice, ToolType,
112};
113pub use topology::{LayerTopology, Topology};
114pub use utils::debug::initialize_logging;
115pub use utils::memory_usage::MemoryUsage;
116pub use utils::normal::{ModelDType, TryIntoDType};
117pub use utils::{paged_attn_supported, using_flash_attn};
118
119pub use llguidance;
121
122pub(crate) static DEBUG: AtomicBool = AtomicBool::new(false);
124pub static GLOBAL_HF_CACHE: OnceLock<Cache> = OnceLock::new();
125static ENGINE_ID: AtomicUsize = AtomicUsize::new(0);
126
127pub struct MistralRsConfig {
128 pub kind: ModelKind,
129 pub device: Device,
130 pub category: ModelCategory,
131}
132
133pub struct MistralRs {
138 sender: RwLock<Sender<Request>>,
139 log: Option<String>,
140 id: String,
141 creation_time: u64,
142 next_request_id: Mutex<RefCell<usize>>,
143 reboot_state: RebootState,
144 engine_handler: RwLock<JoinHandle<()>>,
145 engine_id: usize,
146 category: ModelCategory,
147 config: MistralRsConfig,
148}
149
150#[derive(Clone)]
151struct RebootState {
152 pipeline: Arc<tokio::sync::Mutex<dyn Pipeline>>,
153 method: SchedulerConfig,
154 truncate_sequence: bool,
155 no_kv_cache: bool,
156 no_prefix_cache: bool,
157 prefix_cache_n: usize,
158 disable_eos_stop: bool,
159 throughput_logging_enabled: bool,
160 search_embedding_model: Option<BertEmbeddingModel>,
161}
162
163#[derive(Debug)]
164pub enum MistralRsError {
165 EnginePoisoned,
166 SenderPoisoned,
167}
168
169impl std::fmt::Display for MistralRsError {
170 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
171 write!(f, "{:?}", &self)
172 }
173}
174
175impl std::error::Error for MistralRsError {}
176
177#[cfg(feature = "pyo3_macros")]
178impl From<MistralRsError> for pyo3::PyErr {
179 fn from(value: MistralRsError) -> Self {
180 PyValueError::new_err(format!("{:?}", value))
181 }
182}
183
184pub struct MistralRsBuilder {
188 pipeline: Arc<tokio::sync::Mutex<dyn Pipeline>>,
189 method: SchedulerConfig,
190 log: Option<String>,
191 truncate_sequence: Option<bool>,
192 no_kv_cache: Option<bool>,
193 no_prefix_cache: Option<bool>,
194 prefix_cache_n: Option<usize>,
195 disable_eos_stop: Option<bool>,
196 throughput_logging_enabled: bool,
197 search_embedding_model: Option<BertEmbeddingModel>,
198}
199
200impl MistralRsBuilder {
201 pub fn new(
202 pipeline: Arc<tokio::sync::Mutex<dyn Pipeline>>,
203 method: SchedulerConfig,
204 throughput_logging: bool,
205 search_embedding_model: Option<BertEmbeddingModel>,
206 ) -> Self {
207 Self {
208 pipeline,
209 method,
210 log: None,
211 truncate_sequence: None,
212 no_kv_cache: None,
213 no_prefix_cache: None,
214 prefix_cache_n: None,
215 disable_eos_stop: None,
216 throughput_logging_enabled: throughput_logging,
217 search_embedding_model,
218 }
219 }
220 pub fn with_log(mut self, log: String) -> Self {
221 self.log = Some(log);
222 self
223 }
224 pub fn with_opt_log(mut self, log: Option<String>) -> Self {
225 self.log = log;
226 self
227 }
228 pub fn with_truncate_sequence(mut self, truncate_sequence: bool) -> Self {
229 self.truncate_sequence = Some(truncate_sequence);
230 self
231 }
232 pub fn with_no_kv_cache(mut self, no_kv_cache: bool) -> Self {
233 self.no_kv_cache = Some(no_kv_cache);
234 self
235 }
236 pub fn with_no_prefix_cache(mut self, no_prefix_cache: bool) -> Self {
237 self.no_prefix_cache = Some(no_prefix_cache);
238 self
239 }
240 pub fn with_prefix_cache_n(mut self, prefix_cache_n: usize) -> Self {
241 self.prefix_cache_n = Some(prefix_cache_n);
242 self
243 }
244 pub fn with_disable_eos_stop(mut self, disable_eos_stop: bool) -> Self {
245 self.disable_eos_stop = Some(disable_eos_stop);
246 self
247 }
248
249 pub fn build(self) -> Arc<MistralRs> {
250 MistralRs::new(self)
251 }
252}
253
254impl Drop for MistralRs {
255 fn drop(&mut self) {
256 ENGINE_INSTRUCTIONS
257 .lock()
258 .expect("`ENGINE_INSTRUCTIONS` was poisioned")
259 .insert(self.engine_id, Some(EngineInstruction::Terminate));
260 }
261}
262
263impl MistralRs {
264 fn new(config: MistralRsBuilder) -> Arc<Self> {
265 let MistralRsBuilder {
266 pipeline,
267 method,
268 log,
269 truncate_sequence,
270 no_kv_cache,
271 no_prefix_cache,
272 prefix_cache_n,
273 disable_eos_stop,
274 throughput_logging_enabled,
275 search_embedding_model,
276 } = config;
277
278 let category = pipeline.try_lock().unwrap().category();
279 mistralrs_quant::cublaslt::maybe_init_cublas_lt_wrapper(
280 get_mut_arcmutex!(pipeline).device(),
281 );
282
283 let truncate_sequence = truncate_sequence.unwrap_or(false);
284 let no_kv_cache = no_kv_cache.unwrap_or(false);
285 let no_prefix_cache = no_prefix_cache.unwrap_or(false);
286 let prefix_cache_n = prefix_cache_n.unwrap_or(16);
287 let disable_eos_stop = disable_eos_stop.unwrap_or(false);
288
289 let reboot_state = RebootState {
290 pipeline: pipeline.clone(),
291 method: method.clone(),
292 truncate_sequence,
293 no_kv_cache,
294 no_prefix_cache,
295 prefix_cache_n,
296 disable_eos_stop,
297 throughput_logging_enabled,
298 search_embedding_model: search_embedding_model.clone(),
299 };
300
301 let (tx, rx) = channel(10_000);
302
303 let sender = RwLock::new(tx);
304 let id = pipeline.try_lock().unwrap().name();
305
306 let kind = pipeline.try_lock().unwrap().get_metadata().kind.clone();
307 let device = pipeline.try_lock().unwrap().device();
308 let config = MistralRsConfig {
309 kind,
310 device,
311 category: category.clone(),
312 };
313
314 let engine_handler = thread::spawn(move || {
315 let rt = Runtime::new().unwrap();
316 rt.block_on(async move {
317 let engine = Engine::new(
318 rx,
319 pipeline,
320 method,
321 truncate_sequence,
322 no_kv_cache,
323 no_prefix_cache,
324 prefix_cache_n,
325 disable_eos_stop,
326 throughput_logging_enabled,
327 search_embedding_model,
328 )
329 .expect("Engine creation failed.");
330 Arc::new(engine).run().await;
331 });
332 });
333
334 let engine_id = ENGINE_ID.fetch_add(1, atomic::Ordering::SeqCst);
335
336 if distributed::is_daemon() {
337 let request_sender = sender.write().unwrap().clone();
338 thread::spawn(move || {
339 let rt = Runtime::new().unwrap();
340 rt.block_on(async move {
341 use interprocess::local_socket::traits::Stream;
342 use interprocess::local_socket::Stream as LocalStream;
343
344 loop {
345 let name = distributed::ipc_name().unwrap();
346 if let Ok(stream) = LocalStream::connect(name) {
347 let mut reader = BufReader::new(stream);
348 let mut buf = String::new();
349 reader.read_line(&mut buf).unwrap();
350 let mut req: Request = serde_json::from_str(&buf).unwrap();
351
352 req = match req {
353 Request::ReIsq(x) => Request::ReIsq(x),
354 Request::Terminate => Request::Terminate,
355 Request::Detokenize(mut x) => {
356 let (sender, mut receiver) = tokio::sync::mpsc::channel(1);
357 x.response = sender;
358 let req = Request::Detokenize(x);
359
360 request_sender.send(req).await.unwrap();
361 let resp = receiver.recv().await.unwrap();
362 resp.unwrap();
363 continue;
364 }
365 Request::Tokenize(mut x) => {
366 let (sender, mut receiver) = tokio::sync::mpsc::channel(1);
367 x.response = sender;
368 let req = Request::Tokenize(x);
369
370 request_sender.send(req).await.unwrap();
371 let resp = receiver.recv().await.unwrap();
372 resp.unwrap();
373 continue;
374 }
375 Request::Normal(mut x) => {
376 let (sender, mut receiver) = tokio::sync::mpsc::channel(1);
377 x.is_streaming = false;
378 x.response = sender;
379 let req = Request::Normal(x);
380
381 request_sender.send(req).await.unwrap();
382 let resp = receiver.recv().await.unwrap();
383 resp.as_result().unwrap();
384 continue;
385 }
386 Request::TerminateAllSeqsNextStep => {
387 Request::TerminateAllSeqsNextStep
388 }
389 };
390
391 request_sender.send(req).await.unwrap();
392 }
393 }
394 });
395 });
396
397 #[allow(clippy::empty_loop)]
398 loop {}
399 }
400
401 let is_multi_threaded = tokio::runtime::Handle::try_current()
403 .is_ok_and(|h| h.runtime_flavor() != tokio::runtime::RuntimeFlavor::CurrentThread);
404
405 if !distributed::is_daemon()
407 && is_multi_threaded
408 && matches!(category, ModelCategory::Text | ModelCategory::Vision { .. })
409 {
410 let clone_sender = sender.read().unwrap().clone();
411 tokio::task::block_in_place(|| {
412 let (tx, mut rx) = channel(1);
413 let req = Request::Normal(NormalRequest {
414 id: 0,
415 messages: RequestMessage::Completion {
416 text: "hello".to_string(),
417 echo_prompt: false,
418 best_of: None,
419 },
420 sampling_params: SamplingParams {
421 max_len: Some(1),
422 ..SamplingParams::deterministic()
423 },
424 response: tx,
425 return_logprobs: false,
426 is_streaming: false,
427 constraint: Constraint::None,
428 suffix: None,
429 tool_choice: None,
430 tools: None,
431 logits_processors: None,
432 return_raw_logits: false,
433 web_search_options: None,
434 });
435 info!("Beginning dummy run.");
436 let start = Instant::now();
437 clone_sender.blocking_send(req).unwrap();
438
439 if let Some(_resp) = rx.blocking_recv() {
440 let end = Instant::now();
441 info!(
442 "Dummy run completed in {}s.",
443 end.duration_since(start).as_secs_f64()
444 );
445 } else {
446 warn!("Dummy run failed!");
447 }
448 });
449 }
450
451 Arc::new(Self {
452 engine_id,
453 sender,
454 log,
455 id,
456 creation_time: SystemTime::now()
457 .duration_since(UNIX_EPOCH)
458 .expect("Time travel has occurred!")
459 .as_secs(),
460 next_request_id: Mutex::new(RefCell::new(1)),
461 reboot_state,
462 engine_handler: RwLock::new(engine_handler),
463 category,
464 config,
465 })
466 }
467
468 fn reboot_engine(&self) -> Result<(), MistralRsError> {
471 let (new_sender, rx) = channel(10_000);
472 let reboot_state = self.reboot_state.clone();
473 let mut sender_lock = self.sender.write().map_err(|_| {
474 tracing::warn!("Couldn't get write lock on the sender during reboot attempt");
475 MistralRsError::SenderPoisoned
476 })?;
477 let mut engine_lock = self.engine_handler.write().map_err(|_| {
478 tracing::warn!("Couldn't get write lock on the engine during reboot attempt");
479 MistralRsError::EnginePoisoned
480 })?;
481
482 if !engine_lock.is_finished() {
483 tracing::info!("Engine already running, returning ok");
484 Ok(())
485 } else {
486 let new_engine_handler = thread::spawn(move || {
488 let rt = Runtime::new().unwrap();
489 rt.block_on(async move {
490 let engine = Engine::new(
491 rx,
492 reboot_state.pipeline.clone(),
493 reboot_state.method,
494 reboot_state.truncate_sequence,
495 reboot_state.no_kv_cache,
496 reboot_state.no_prefix_cache,
497 reboot_state.prefix_cache_n,
498 reboot_state.disable_eos_stop,
499 reboot_state.throughput_logging_enabled,
500 reboot_state.search_embedding_model,
501 )
502 .expect("Engine creation failed");
503 Arc::new(engine).run().await;
504 });
505 });
506 *sender_lock = new_sender;
507 *engine_lock = new_engine_handler;
508 tracing::info!("Successfully rebooted engine and updated sender + engine handler");
509 Ok(())
510 }
511 }
512
513 fn engine_dead(&self) -> Result<bool, MistralRsError> {
514 match self.engine_handler.read() {
515 Ok(handler) => Ok(handler.is_finished()),
516 Err(_) => {
517 tracing::warn!("Couldn't get read lock on engine!");
518 Err(MistralRsError::EnginePoisoned)
519 }
520 }
521 }
522
523 pub fn get_sender(&self) -> Result<Sender<Request>, MistralRsError> {
524 if self.engine_dead()? {
525 tracing::warn!("Engine is dead, rebooting");
526 self.reboot_engine()?
527 }
528 match self.sender.read() {
529 Ok(sender) => Ok(sender.clone()),
530 Err(_) => Err(MistralRsError::SenderPoisoned),
531 }
532 }
533
534 pub fn get_id(&self) -> String {
535 self.id.clone()
536 }
537
538 pub fn get_creation_time(&self) -> u64 {
539 self.creation_time
540 }
541
542 pub fn get_model_category(&self) -> ModelCategory {
543 self.category.clone()
544 }
545
546 pub fn next_request_id(&self) -> usize {
547 let l = self.next_request_id.lock().unwrap();
548 let last = &mut *l.borrow_mut();
549 let last_v = *last;
550 *last += 1;
551 last_v
552 }
553
554 pub fn maybe_log_request(this: Arc<Self>, repr: String) {
555 if let Some(file) = &this.log {
556 let mut f = OpenOptions::new()
557 .append(true)
558 .create(true) .open(file)
560 .expect("Unable to open file");
561 let time = chrono::offset::Local::now();
562 f.write_all(format!("Request at {time}: {repr}\n\n").as_bytes())
563 .expect("Unable to write data");
564 }
565 }
566
567 pub fn maybe_log_response<T: Serialize>(this: Arc<Self>, resp: &T) {
568 if let Some(file) = &this.log {
569 let mut f = OpenOptions::new()
570 .append(true)
571 .create(true) .open(file)
573 .expect("Unable to open file");
574 let time = chrono::offset::Local::now();
575 let repr = serde_json::to_string(resp).expect("Serialization of response failed.");
576 f.write_all(format!("Response at {time}: {repr}\n\n").as_bytes())
577 .expect("Unable to write data");
578 }
579 }
580
581 pub fn maybe_log_error(this: Arc<Self>, err: &dyn Error) {
582 if let Some(file) = &this.log {
583 let mut f = OpenOptions::new()
584 .append(true)
585 .create(true) .open(file)
587 .expect("Unable to open file");
588 let time = chrono::offset::Local::now();
589 f.write_all(format!("Error response at {time}: {err}\n\n").as_bytes())
590 .expect("Unable to write data");
591 }
592 }
593
594 pub fn config(&self) -> &MistralRsConfig {
595 &self.config
596 }
597}