openqasm_parser/openqasm/
mod.rs1mod ast;
4mod lexer_rules;
5pub mod semantic_analysis;
6mod token;
7
8use self::semantic_analysis::{Cbit, Condition, Gate, GateOperation, Operation, Qubit};
9use crate::openqasm::token::Token;
10use crate::parser::ast::parse;
11use crate::parser::lexer::Lexer;
12use ast::MainProgram;
13use semantic_analysis::{OpenQASMProgram, SemanticError};
14use std::path::Path;
15use std::{collections::HashMap, fs::read_to_string};
16use token::TokenMatch;
17
18#[derive(Debug)]
20pub enum OpenQASMError {
21 FileError,
23
24 TokenError,
26
27 SyntaxError,
29
30 SemanticError(SemanticError),
33}
34
35pub fn parse_openqasm(file_path: &Path) -> Result<OpenQASMProgram, OpenQASMError> {
53 let mut lexer = Lexer::new();
54 lexer_rules::add_open_qasm_rules(&mut lexer);
55
56 let tokens = read_file_tokens(&lexer, file_path)?;
57
58 let t_vec: Vec<TokenMatch> = tokens
59 .iter()
60 .map(|(a, s)| (*a, s.into_iter().collect::<String>()))
61 .collect();
62
63 let ast = parse::<MainProgram, Token>(&t_vec).map_err(|_| OpenQASMError::SyntaxError)?;
64
65 OpenQASMProgram::from_ast(&ast).map_err(|e| OpenQASMError::SemanticError(e))
66}
67
68fn read_file_tokens(
74 lexer: &Lexer<Token>,
75 file_path: &Path,
76) -> Result<Vec<(Token, Vec<char>)>, OpenQASMError> {
77 let file_str = read_to_string(file_path).map_err(|_| OpenQASMError::FileError)?;
78 let file_chars = file_str.bytes().map(|b| b as char).collect();
79
80 let mut tokens = lexer
82 .parse(file_chars)
83 .map_err(|_| OpenQASMError::TokenError)?;
84
85 for i in 0..tokens.len() {
87 if i + 2 < tokens.len()
88 && tokens[i + 0].0 == Token::Include
89 && tokens[i + 1].0 == Token::Str
90 && tokens[i + 2].0 == Token::Semicolon
91 {
92 let file_dir = file_path.parent().ok_or(OpenQASMError::FileError)?;
93
94 let other_file_path_str = tokens[i + 1].1.clone().into_iter().collect::<String>();
96 let other_file_path_str = &other_file_path_str[1..other_file_path_str.len() - 1];
97 let other_file_path = file_dir.join(Path::new(other_file_path_str));
98
99 let other_tokens = read_file_tokens(lexer, &other_file_path)?;
100
101 tokens.splice(i..i + 3, other_tokens);
102 }
103 }
104
105 Ok(tokens)
106}
107
108pub enum BasicOp {
112 U(f32, f32, f32, Qubit),
114
115 CX(Qubit, Qubit),
117
118 Measure(Qubit, Cbit),
120
121 ResetQ(Qubit),
123
124 ResetC(Cbit),
126}
127
128impl OpenQASMProgram {
129 pub fn get_basic_operations(&self) -> Vec<(Option<Condition>, BasicOp)> {
135 let mut res = Vec::new();
136
137 for (condition, op) in self.operations.iter() {
138 for op in op.get_basic_operations(&self.gates) {
139 res.push((condition.clone(), op));
140 }
141 }
142
143 res
144 }
145}
146
147impl Operation {
148
149 pub fn get_basic_operations(&self, gates: &HashMap<String, Gate>) -> Vec<BasicOp> {
154 match self {
155 Operation::U(p1, p2, p3, a) => vec![BasicOp::U(*p1, *p2, *p3, a.clone())],
156 Operation::CX(a1, a2) => vec![BasicOp::CX(a1.clone(), a2.clone())],
157 Operation::Custom(name, params, args) => gates
158 .get(name)
159 .map(|gate| {
160 gate.operations
161 .iter()
162 .flat_map(|gate_op| {
163 gate_op
164 .get_operation(params, args)
165 .get_basic_operations(gates)
166 })
167 .collect()
168 })
169 .unwrap_or(vec![]),
170 Operation::Measure(a1, a2) => vec![BasicOp::Measure(a1.clone(), a2.clone())],
171 Operation::ResetQ(a) => vec![BasicOp::ResetQ(a.clone())],
172 Operation::ResetC(a) => vec![BasicOp::ResetC(a.clone())],
173 }
174 }
175}
176
177impl GateOperation {
178
179 pub fn get_operation(&self, params: &Vec<f32>, args: &Vec<Qubit>) -> Operation {
181 match self {
182 GateOperation::U(p1, p2, p3, a) => {
183 Operation::U(p1(params), p2(params), p3(params), args[*a].clone())
184 }
185 GateOperation::CX(a1, a2) => Operation::CX(args[*a1].clone(), args[*a2].clone()),
186 GateOperation::Custom(name, gate_params, gate_args) => Operation::Custom(
187 name.clone(),
188 gate_params.iter().map(|gp| gp(params)).collect(),
189 gate_args.iter().map(|ga| args[*ga].clone()).collect(),
190 ),
191 }
192 }
193}