1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
use proc_macro::TokenStream;
use syn::{parse_macro_input, Expr, Lit};
use openai_api_rust::*;
use openai_api_rust::chat::*;

enum Randomness {
    NotRandom = 0,
    Random = 1,
    VeryRandom = 2
}

const START_PROMPT : &'static str = "You are an assistant that only respond with rust code. Do not write normal text";

const START_PROMPT_EXPR : &'static str = "You are an assistant that only respond with rust code. Do not write normal text and don't write functions, assume you are writing code that will be in a function.";

fn get_from_ai(prompt : String, start_prompt : &'static str, randomness : Randomness) -> String {
    let auth = Auth::from_env().unwrap();
    let openai = OpenAI::new(auth, "https://api.openai.com/v1/");
    let content = format!("{}.\n {}", start_prompt, prompt);
    let body = ChatBody {
        model: "gpt-3.5-turbo".to_string(),
        max_tokens: Some(15),
        temperature: Some(randomness as i32 as f32),
        top_p: Some(0_f32),
        n: Some(2),
        stream: Some(false),
        stop: None,
        presence_penalty: None,
        frequency_penalty: None,
        logit_bias: None,
        user: None,
        messages: vec![Message { role: Role::User, content: content.clone() }],
    };
    let rs = openai.chat_completion_create(&body);
    let choice = rs.unwrap().choices;
    let message = &choice[0].message.as_ref().unwrap();
    //println!("{}", message.content);
    message.content.clone()
    //content
    //"".to_string()
}

fn from_expr_to_string(expr : Expr) -> Result<String, String> {
    match expr {
        Expr::Lit(l) => {
            match l.lit {
                Lit::Str(lit_str) => {
                    Ok(lit_str.value())
                },
                _ => { 
                    Err("expected string in ai_write macro".to_string())
                }
            }
        },
        _ => { 
            Err("expected string in ai_write macro".to_string())
        }
    }
}

#[proc_macro]
pub fn ai_write(input: TokenStream) -> TokenStream {
    let input = parse_macro_input!(input as Expr);
    let arg = from_expr_to_string(input);
    let content = get_from_ai(arg.unwrap(), START_PROMPT, Randomness::NotRandom);
    //println!("content : {}", content);
    let return_val = content.as_str().parse();
    match return_val {
        Result::Ok(v) => { 
            v
        }
        Result::Err(e) => { 
            println!("Lexing Error : {}", e); 
            TokenStream::new()
        }
    }
    //arg.unwrap().parse().unwrap()
    //"let a = 2;".parse().unwrap()
}

#[proc_macro]
pub fn ai_write_expr(input: TokenStream) -> TokenStream {
    let input = parse_macro_input!(input as Expr);
    let arg = from_expr_to_string(input);
    let content = get_from_ai(arg.unwrap(), START_PROMPT_EXPR, Randomness::NotRandom);
    //println!("content : {}", content);
    let return_val = content.as_str().parse();
    match return_val {
        Result::Ok(v) => { 
            v
        }
        Result::Err(e) => { 
            println!("Lexing Error : {}", e); 
            TokenStream::new()
        }
    }
    //arg.unwrap().parse().unwrap()
    //"let a = 2;".parse().unwrap()
}