language_model_token_expander/
language_model_token_expander.rs1crate::ix!();
3
4#[derive(Debug,Getters,LanguageModelBatchWorkflow)]
5#[getset(get = "pub")]
6#[batch_error_type(TokenExpanderError)]
7#[batch_json_output_format(E)]
8pub struct LanguageModelTokenExpander<E>
9where E: ExpandedToken
10 + DeserializeOwned
11 + Named
12 + AiJsonTemplate
13 + GetTargetPathForAIExpansion
14 + LoadFromFile<Error = SaveLoadError> + 'static,
15{
16
17 language_model_request_creator: Arc<<E as ExpandedToken>::Expander>,
18 agent_coordinate: AgentCoordinate,
19
20 #[batch_client] client: Arc<dyn LanguageModelClientInterface<TokenExpanderError>>,
21 #[batch_workspace] batch_workspace: Arc<BatchWorkspace>,
22 #[expected_content_type] expected_content_type: ExpectedContentType,
23 #[model_type] language_model_type: LanguageModelType,
24}
25
26impl<E> LanguageModelTokenExpander<E>
27where E: ExpandedToken
28 + DeserializeOwned
29 + Named
30 + AiJsonTemplate
31 + GetTargetPathForAIExpansion
32 + LoadFromFile<Error = SaveLoadError> + 'static,
33
34{
35 pub async fn new(
36 product_root: impl AsRef<Path>,
37 language_model_request_creator: Arc<<E as ExpandedToken>::Expander>,
38 agent_coordinate: AgentCoordinate,
39 language_model_type: LanguageModelType,
40 expected_content_type: ExpectedContentType,
41
42 ) -> Result<Self,TokenExpanderError> {
43
44 info!("creating LanguageModelTokenExpander");
45
46 let client: Arc<dyn LanguageModelClientInterface<TokenExpanderError>> = OpenAIClientHandle::new();
47
48 Ok(Self {
49 language_model_request_creator,
50 agent_coordinate,
51 client,
52 batch_workspace: BatchWorkspace::new_in(product_root).await?,
53 expected_content_type,
54 language_model_type,
55 })
56 }
57}
58
59impl<E> ComputeSystemMessage for LanguageModelTokenExpander<E>
60where E: ExpandedToken
61 + DeserializeOwned
62 + Named
63 + AiJsonTemplate
64 + GetTargetPathForAIExpansion
65 + LoadFromFile<Error = SaveLoadError> + 'static,
66
67{
68 fn system_message() -> String {
69 formatdoc!{
71 "We are performing a token expansion."
72 }
73 }
74}
75
76impl<E> ComputeLanguageModelCoreQuery for LanguageModelTokenExpander<E>
78where E: ExpandedToken
79 + DeserializeOwned
80 + Named
81 + AiJsonTemplate
82 + GetTargetPathForAIExpansion
83 + LoadFromFile<Error = SaveLoadError> + 'static,
84{
85 type Seed = TokenPackagedForExpansion;
86
87 fn compute_language_model_core_query(
88 &self,
89 input: &Self::Seed
90
91 ) -> String {
92
93 trace!("Computing query core from seed...");
94
95 let coord = self.agent_coordinate();
96 let model = self.language_model_type();
97 let creator = self.language_model_request_creator();
98
99 creator.create_language_model_query_at_agent_coordinate(
100 &model,
101 &coord,
102 input
103 )
104 }
105}