ai_write/
lib.rs

1use proc_macro::TokenStream;
2use syn::{parse_macro_input, Expr, Lit};
3use openai_api_rust::*;
4use openai_api_rust::chat::*;
5
6enum Randomness {
7    NotRandom = 0,
8    Random = 1,
9    VeryRandom = 2
10}
11
12const START_PROMPT : &'static str = "You are an assistant that only respond with rust code. Do not write normal text";
13
14const START_PROMPT_EXPR : &'static str = "You are an assistant that only respond with rust code. Do not write normal text and don't write functions, assume you are writing code that will be in a function.";
15
16fn get_from_ai(prompt : String, start_prompt : &'static str, randomness : Randomness) -> String {
17    let auth = Auth::from_env().unwrap();
18    let openai = OpenAI::new(auth, "https://api.openai.com/v1/");
19    let content = format!("{}.\n {}", start_prompt, prompt);
20    let body = ChatBody {
21        model: "gpt-3.5-turbo".to_string(),
22        max_tokens: Some(15),
23        temperature: Some(randomness as i32 as f32),
24        top_p: Some(0_f32),
25        n: Some(2),
26        stream: Some(false),
27        stop: None,
28        presence_penalty: None,
29        frequency_penalty: None,
30        logit_bias: None,
31        user: None,
32        messages: vec![Message { role: Role::User, content: content.clone() }],
33    };
34    let rs = openai.chat_completion_create(&body);
35    let choice = rs.unwrap().choices;
36    let message = &choice[0].message.as_ref().unwrap();
37    message.content.clone()
38}
39
40fn from_expr_to_string(expr : Expr) -> Result<String, String> {
41    match expr {
42        Expr::Lit(l) => {
43            match l.lit {
44                Lit::Str(lit_str) => {
45                    Ok(lit_str.value())
46                },
47                _ => { 
48                    Err("expected string in ai_write macro".to_string())
49                }
50            }
51        },
52        _ => { 
53            Err("expected string in ai_write macro".to_string())
54        }
55    }
56}
57
58#[proc_macro]
59pub fn ai_write(input: TokenStream) -> TokenStream {
60    let input = parse_macro_input!(input as Expr);
61    let arg = from_expr_to_string(input);
62    let content = get_from_ai(arg.unwrap(), START_PROMPT, Randomness::NotRandom);
63    let return_val = content.as_str().parse();
64    match return_val {
65        Result::Ok(v) => { 
66            v
67        }
68        Result::Err(e) => { 
69            println!("Lexing Error : {}", e); 
70            TokenStream::new()
71        }
72    }
73}
74
75#[proc_macro]
76pub fn ai_write_expr(input: TokenStream) -> TokenStream {
77    let input = parse_macro_input!(input as Expr);
78    let arg = from_expr_to_string(input);
79    let content = get_from_ai(arg.unwrap(), START_PROMPT_EXPR, Randomness::NotRandom);
80    let return_val = content.as_str().parse();
81    match return_val {
82        Result::Ok(v) => { 
83            v
84        }
85        Result::Err(e) => { 
86            println!("Lexing Error : {}", e); 
87            TokenStream::new()
88        }
89    }
90}