mistralrs_core/pipeline/
llg.rs1use std::sync::Arc;
2
3use anyhow::Result;
4use llguidance::{
5 api::{ParserLimits, TopLevelGrammar},
6 toktrie::{InferenceCapabilities, TokEnv},
7 TokenParser,
8};
9use tokenizers::Tokenizer;
10
11use crate::Constraint;
12
13pub fn build_tok_env(tokenizer: Tokenizer) -> TokEnv {
14 let bt = toktrie_hf_tokenizers::ByteTokenizer::from_tokenizer(tokenizer)
15 .expect("Failed to create ByteTokenizer from Tokenizer");
16 let env = toktrie_hf_tokenizers::ByteTokenizerEnv::new(bt, None)
17 .expect("Failed to create ByteTokenizerEnv");
18 Arc::new(env)
19}
20
21pub fn llg_grammar_from_constraint(constraint: &Constraint) -> Result<Option<TopLevelGrammar>> {
22 let grm = match constraint {
23 Constraint::Regex(regex) => TopLevelGrammar::from_regex(regex),
24 Constraint::Lark(lark) => TopLevelGrammar::from_lark(lark.clone()),
25 Constraint::JsonSchema(value) => TopLevelGrammar::from_json_schema(value.clone()),
26 Constraint::Llguidance(value) => value.clone(),
27 Constraint::None => return Ok(None),
28 };
29 Ok(Some(grm))
30}
31
32pub fn constraint_from_llg_grammar(
33 tok_env: TokEnv,
34 grm: TopLevelGrammar,
35) -> Result<llguidance::Constraint> {
36 let parser = TokenParser::from_grammar(
37 tok_env,
38 grm,
39 llguidance::Logger::new(0, 1),
40 InferenceCapabilities {
41 ..Default::default()
42 },
43 ParserLimits::default(),
44 vec![],
45 )?;
46 Ok(llguidance::Constraint::new(parser))
47}