1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
use json_tools::{Buffer, BufferType, Lexer, TokenType};

pub fn fix(json: &str) -> String {
    let bytes = json.as_bytes();
    let mut fixed = Vec::with_capacity(bytes.len());
    let mut last_token_type = TokenType::Invalid;
    for token in Lexer::new(json.bytes(), BufferType::Bytes(32)) {
        match token.kind {
            TokenType::Number => {
                if let Buffer::MultiByte(bs) = token.buf {
                    if last_token_type == TokenType::CurlyOpen {
                        fixed.push(b'"');
                        fixed.extend(bs);
                        fixed.push(b'"');
                    } else if last_token_type == TokenType::Comma {
                        fixed.push(b'"');
                        fixed.extend(bs);
                        fixed.push(b'"');
                    } else {
                        fixed.extend(bs);
                    }
                }
            }
            TokenType::String => {
                if let Buffer::MultiByte(bs) = token.buf {
                    fixed.extend(bs);
                }
            }
            _ => {
                fixed.extend_from_slice(token.kind.as_ref().as_bytes());
            }
        }
        last_token_type = token.kind;
    }
    unsafe { String::from_utf8_unchecked(fixed) }
}

#[cfg(test)]
mod tests {
    use super::fix;

    #[test]
    fn test_fix_json() {
        let json = r#"{1: "foo", 2 : "bar"}"#;
        let fixed = fix(json);
        assert_eq!(fixed, r#"{"1":"foo","2":"bar"}"#);
    }
}