var searchIndex = {}; searchIndex["csv"] = {"doc":"This crate provides a streaming CSV (comma separated values) writer and\nreader that works with the `serialize` crate to do type based encoding\nand decoding. There are two primary goals of this project:","items":[[3,"Encoded","csv","A record to be encoded.",null,null],[3,"Decoded","","A record to be decoded.",null,null],[3,"Reader","","A CSV reader.",null,null],[3,"DecodedRecords","","An iterator of decoded records.",null,null],[3,"StringRecords","","An iterator of `String` records.",null,null],[3,"ByteRecords","","An iterator of `ByteString` records.",null,null],[3,"Writer","","A CSV writer.",null,null],[3,"LocatableError","","An error tagged with a location at which it occurred.",null,null],[12,"record","","The record number (starting at 1).",0,null],[12,"field","","The field number (starting at 1).",0,null],[12,"err","","The error.",0,null],[4,"NextField","","NextField is the result of parsing a single CSV field.",null,null],[13,"Data","","A single CSV field as a borrowed slice of the parser's internal buffer.",1,null],[13,"Error","","A CSV error found during parsing. When an error is found, it is\nfirst returned. All subsequent calls of `next_bytes` will return\n`EndOfCsv`. (EOF is exempt from this. Depending on the state of the\nparser, an EOF could trigger `Data`, `EndOfRecord` and `EndOfCsv`,\nall in succession.)",1,null],[13,"EndOfRecord","","Indicates the end of a record.",1,null],[13,"EndOfCsv","","Indicates the end of the CSV data. Once this state is entered, the\nparser can never leave it.",1,null],[4,"RecordTerminator","","A record terminator.",null,null],[13,"CRLF","","Parses `\\r`, `\\n` or `\\r\\n` as a single record terminator.",2,null],[13,"Any","","Parses the byte given as a record terminator.",2,null],[4,"QuoteStyle","","The quoting style to use when writing CSV data.",null,null],[13,"Always","","This puts quotes around every field. Always.",3,null],[13,"Necessary","","This puts quotes around fields only when necessary.",3,null],[13,"Never","","This *never* writes quotes.",3,null],[4,"Error","","An error produced by an operation on CSV data.",null,null],[13,"Encode","","An error reported by the type-based encoder.",4,null],[13,"Decode","","An error reported by the type-based decoder.",4,null],[13,"Parse","","An error reported by the CSV parser.",4,null],[13,"Io","","An error originating from reading or writing to the underlying buffer.",4,null],[13,"Index","","An error originating from using a CSV index.",4,null],[4,"ParseError","","A description of a CSV parse error.",null,null],[13,"UnequalLengths","","A record was found that has a different size than other records.",5,null],[12,"expected","csv::ParseError","Expected a record with this many fields.",5,null],[12,"got","","Got a record with this many fields.",5,null],[13,"InvalidUtf8","csv","An error occurred when trying to convert a field to a Unicode string.",5,null],[0,"index","","This sub-module provides experimental CSV record indexing.",null,null],[3,"Indexed","csv::index","A type for representing CSV data with a basic record index.",null,null],[5,"create_index","","Creates a new index for the given CSV reader.",null,{"inputs":[{"name":"reader"},{"name":"w"}],"output":{"name":"result"}}],[11,"deref","","",6,null],[11,"deref_mut","","",6,null],[11,"open","","Opens a new index corresponding to the CSV reader given.",6,{"inputs":[{"name":"reader"},{"name":"i"}],"output":{"name":"result"}}],[11,"seek","","Seeks to `i`th record.",6,null],[11,"count","","Returns the number of CSV records in the index in `O(1)` time.",6,null],[11,"new","csv","Creates a new encodable record. The value returned can be passed to\n`Encodable::encode`.",7,{"inputs":[],"output":{"name":"encoded"}}],[11,"unwrap","","Once a record has been encoded into this value, `unwrap` can be used\nto access the raw CSV record.",7,null],[11,"emit_nil","","",7,null],[11,"emit_usize","","",7,null],[11,"emit_u64","","",7,null],[11,"emit_u32","","",7,null],[11,"emit_u16","","",7,null],[11,"emit_u8","","",7,null],[11,"emit_isize","","",7,null],[11,"emit_i64","","",7,null],[11,"emit_i32","","",7,null],[11,"emit_i16","","",7,null],[11,"emit_i8","","",7,null],[11,"emit_bool","","",7,null],[11,"emit_f64","","",7,null],[11,"emit_f32","","",7,null],[11,"emit_char","","",7,null],[11,"emit_str","","",7,null],[11,"emit_enum","","",7,null],[11,"emit_enum_variant","","",7,null],[11,"emit_enum_variant_arg","","",7,null],[11,"emit_enum_struct_variant","","",7,null],[11,"emit_enum_struct_variant_field","","",7,null],[11,"emit_struct","","",7,null],[11,"emit_struct_field","","",7,null],[11,"emit_tuple","","",7,null],[11,"emit_tuple_arg","","",7,null],[11,"emit_tuple_struct","","",7,null],[11,"emit_tuple_struct_arg","","",7,null],[11,"emit_option","","",7,null],[11,"emit_option_none","","",7,null],[11,"emit_option_some","","",7,null],[11,"emit_seq","","",7,null],[11,"emit_seq_elt","","",7,null],[11,"emit_map","","",7,null],[11,"emit_map_elt_key","","",7,null],[11,"emit_map_elt_val","","",7,null],[11,"new","","Creates a new decodable record from a record of byte strings.",8,{"inputs":[{"name":"vec"}],"output":{"name":"decoded"}}],[11,"error","","",8,null],[11,"read_nil","","",8,null],[11,"read_usize","","",8,null],[11,"read_u64","","",8,null],[11,"read_u32","","",8,null],[11,"read_u16","","",8,null],[11,"read_u8","","",8,null],[11,"read_isize","","",8,null],[11,"read_i64","","",8,null],[11,"read_i32","","",8,null],[11,"read_i16","","",8,null],[11,"read_i8","","",8,null],[11,"read_bool","","",8,null],[11,"read_f64","","",8,null],[11,"read_f32","","",8,null],[11,"read_char","","",8,null],[11,"read_str","","",8,null],[11,"read_enum","","",8,null],[11,"read_enum_variant","","",8,null],[11,"read_enum_variant_arg","","",8,null],[11,"read_enum_struct_variant","","",8,null],[11,"read_enum_struct_variant_field","","",8,null],[11,"read_struct","","",8,null],[11,"read_struct_field","","",8,null],[11,"read_tuple","","",8,null],[11,"read_tuple_arg","","",8,null],[11,"read_tuple_struct","","",8,null],[11,"read_tuple_struct_arg","","",8,null],[11,"read_option","","",8,null],[11,"read_seq","","",8,null],[11,"read_seq_elt","","",8,null],[11,"read_map","","",8,null],[11,"read_map_elt_key","","",8,null],[11,"read_map_elt_val","","",8,null],[11,"clone","","",2,null],[11,"eq","","",2,null],[11,"from_reader","","Creates a new CSV reader from an arbitrary `io::Read`.",9,{"inputs":[{"name":"r"}],"output":{"name":"reader"}}],[11,"from_file","","Creates a new CSV reader for the data at the file path given.",9,{"inputs":[{"name":"p"}],"output":{"name":"result"}}],[11,"from_string","","Creates a CSV reader for an in memory string buffer.",9,{"inputs":[{"name":"s"}],"output":{"name":"reader"}}],[11,"from_bytes","","Creates a CSV reader for an in memory buffer of bytes.",9,{"inputs":[{"name":"v"}],"output":{"name":"reader"}}],[11,"decode","","Uses type-based decoding to read a single record from CSV data.",9,null],[11,"records","","Returns an iterator of records in the CSV data where each field is\na `String`.",9,null],[11,"headers","","Returns a *copy* of the first record in the CSV data as strings.",9,null],[11,"delimiter","","The delimiter to use when reading CSV data.",9,null],[11,"has_headers","","Whether to treat the first row as a special header row.",9,null],[11,"flexible","","Whether to allow flexible length records when reading CSV data.",9,null],[11,"record_terminator","","Set the record terminator to use when reading CSV data.",9,null],[11,"quote","","Set the quote character to use when reading CSV data.",9,null],[11,"escape","","Set the escape character to use when reading CSV data.",9,null],[11,"double_quote","","Enable double quote escapes.",9,null],[11,"ascii","","A convenience method for reading ASCII delimited text.",9,null],[11,"fmt","","",1,null],[11,"into_iter_result","","Transform NextField into an iterator result.",1,null],[11,"is_end","","Returns true if and only if the end of CSV data has been reached.",1,null],[11,"unwrap","","Returns the underlying field data.",1,null],[11,"byte_headers","","This is just like `headers`, except fields are `ByteString`s instead\nof `String`s.",9,null],[11,"byte_records","","This is just like `records`, except fields are `ByteString`s instead\nof `String`s.",9,null],[11,"done","","Returns `true` if the CSV parser has reached its final state. When\nthis method returns `true`, all iterators will always return `None`.",9,null],[11,"next_bytes","","An iterator over fields in the current record.",9,null],[11,"next_str","","This is just like `next_bytes` except it converts each field to\na Unicode string in place.",9,null],[11,"byte_offset","","Returns the byte offset at which the current record started.",9,null],[11,"seek","","Seeks the underlying reader to the file cursor specified.",9,null],[11,"next","","",10,null],[11,"next","","",11,null],[11,"next","","",12,null],[11,"clone","","",3,null],[11,"from_file","","Creates a new `Writer` that writes CSV data to the file path given.",13,{"inputs":[{"name":"p"}],"output":{"name":"result"}}],[11,"from_writer","","Creates a new CSV writer that writes to the `io::Write` given.",13,{"inputs":[{"name":"w"}],"output":{"name":"writer"}}],[11,"from_buffer","","Creates a new CSV writer that writes to the buffer given.",13,{"inputs":[{"name":"bufwriter"}],"output":{"name":"writer"}}],[11,"from_memory","","Creates a new CSV writer that writes to an in memory buffer. At any\ntime, `as_string` or `as_bytes` can be called to retrieve the\ncumulative CSV data.",13,{"inputs":[],"output":{"name":"writer"}}],[11,"as_string","","Returns the written CSV data as a string.",13,null],[11,"as_bytes","","Returns the encoded CSV data as raw bytes.",13,null],[11,"into_string","","Convert the Writer into a string of written CSV data",13,null],[11,"into_bytes","","Convert the Writer into a vector of encoded CSV bytes.",13,null],[11,"encode","","Writes a record by encoding any `Encodable` value.",13,null],[11,"write","","Writes a record of strings (Unicode or raw bytes).",13,null],[11,"flush","","Flushes the underlying buffer.",13,null],[11,"delimiter","","The delimiter to use when writing CSV data.",13,null],[11,"flexible","","Whether to allow flexible length records when writing CSV data.",13,null],[11,"record_terminator","","Sets the record terminator to use when writing CSV data.",13,null],[11,"quote_style","","Set the quoting style to use when writing CSV data.",13,null],[11,"quote","","Set the quote character to use when writing CSV data.",13,null],[11,"escape","","Set the escape character to use when writing CSV data.",13,null],[11,"double_quote","","Set the quoting escape mechanism.",13,null],[6,"Result","","A convenience type for representing the result of most CSV reader/writer\noperations.",null,null],[6,"ByteString","","A convenience type for referring to a plain byte string.",null,null],[8,"BorrowBytes","","A trait that permits borrowing byte vectors.",null,null],[10,"borrow_bytes","","Borrow a byte vector.",14,null],[11,"fmt","","",4,null],[11,"fmt","","",0,null],[11,"clone","","",0,null],[11,"fmt","","",5,null],[11,"clone","","",5,null],[11,"fmt","","",4,null],[11,"fmt","","",0,null],[11,"fmt","","",5,null],[11,"description","","",4,null],[11,"cause","","",4,null],[11,"from","","",4,{"inputs":[{"name":"error"}],"output":{"name":"error"}}],[11,"borrow_bytes","","",15,null]],"paths":[[3,"LocatableError"],[4,"NextField"],[4,"RecordTerminator"],[4,"QuoteStyle"],[4,"Error"],[4,"ParseError"],[3,"Indexed"],[3,"Encoded"],[3,"Decoded"],[3,"Reader"],[3,"DecodedRecords"],[3,"StringRecords"],[3,"ByteRecords"],[3,"Writer"],[8,"BorrowBytes"],[6,"ByteString"]]}; initSearch(searchIndex);