batch_mode_token_expansion_step/
token_expansion_step.rs1crate::ix!();
3
4pub enum TokenExpansionStep {
5 ExtractAndCleanData,
6 MapTokenToAxes {
7 axes: Vec<Arc<dyn TokenExpansionAxis>>,
8 },
9 EnrichAndRephraseContent,
10 ApplySpecificAdjustments,
11 OutputTheJsonStructure,
12}
13
14impl TokenExpansionStep {
15
16 pub fn vec_from_axes(axes: &[Arc<dyn TokenExpansionAxis>]) -> Vec<TokenExpansionStep> {
17
18 TokenExpansionStep::default_steps_from_axes(axes)
20 }
21
22 pub fn default_steps_from_axes(axes: &[Arc<dyn TokenExpansionAxis>]) -> Vec<Self> {
23 vec![
24 TokenExpansionStep::ExtractAndCleanData,
25 TokenExpansionStep::MapTokenToAxes { axes: axes.to_vec() },
26 TokenExpansionStep::EnrichAndRephraseContent,
27 TokenExpansionStep::ApplySpecificAdjustments,
28 TokenExpansionStep::OutputTheJsonStructure,
29 ]
30 }
31
32 pub fn name(&self) -> &'static str {
33 match self {
34 TokenExpansionStep::ExtractAndCleanData => "Extract and Clean Data",
35 TokenExpansionStep::MapTokenToAxes { .. } => "Map Token to Axes",
36 TokenExpansionStep::EnrichAndRephraseContent => "Enrich and Rephrase Content",
37 TokenExpansionStep::ApplySpecificAdjustments => "Apply Specific Adjustments",
38 TokenExpansionStep::OutputTheJsonStructure => "Output the JSON Structure",
39 }
40 }
41
42 pub fn ai_instructions(&self) -> String {
43 match self {
44 TokenExpansionStep::ExtractAndCleanData => "Carefully read and parse the token: ".to_string(),
45 TokenExpansionStep::MapTokenToAxes { axes } => {
46
47 let axes_descriptions = axes
48 .iter()
49 .map(|axis| {
50 format!(
51 "{} [{}]",
52 axis.axis_name(),
53 axis.axis_description()
54 )
55 }).collect::<Vec<_>>().join("\n");
56
57 formatdoc!{
58 "
59 Reorganize the Extracted Data:
60
61 - Assign each piece of data to the appropriate axis based on its content.
62 - Ensure that each entry fits logically within its designated category.
63 - Each axis should be deep, detailed, and specific. Use optimally descriptive and useful language. Do not be too verbose.
64 - There should be at least twelve items per category. There may be more than twelve if you think it will be useful to provide more.
65
66 Axes to Use:
67
68 {}",
69 axes_descriptions
70 }
71 },
72 TokenExpansionStep::EnrichAndRephraseContent => formatdoc!{
73 "
74 Rephrase Entries:
75
76 - Ensure entries are concise and focused.
77 - Remove vague introductory phrases (e.g., avoid starting with \"Illustrates the...\"; instead, use direct details like \"The hanging green vines on the garden wall\").
78
79 Ensure Clarity and Consistency:
80
81 - Use clear, grammatically correct sentences.
82 - Maintain a consistent tone and style throughout.
83
84 Enrich Content:
85
86 - Add additional entries to each axis where appropriate to enhance depth and value.
87 - Ensure that the content is comprehensive and covers multiple facets of the token."
88 },
89 TokenExpansionStep::ApplySpecificAdjustments => formatdoc!{
90 "
91 Focus Language:
92
93 - Use deliberate and precise language.
94 - Avoid vague verbs and keep the maximally intelligent and detail oriented audience in mind.
95 - Do not use modern cultural references or generically reference ideas which do not fit the overall aura of our setting.
96 "
97 },
98 TokenExpansionStep::OutputTheJsonStructure => formatdoc!{
99 "
100 Present the Final JSON:
101
102 - Format the output as a JSON object.
103 - Include the token name and all the axes with their corresponding entries.
104 - The token name should be properly upper camel cased and placed under the `token_name` json key.
105
106 Ensure Proper Formatting:
107
108 - Use proper JSON syntax with keys and arrays.
109 - Ensure that all entries are correctly placed under their respective axes.
110
111 Your output should only consist of the JSON object. do *not* include a preamble or postamble to your response. We would like to be able to parse your response directly as JSON.
112 "
113
114 },
115 }
116 }
117}