use super::ensure;
use super::posix::posix_tz_string;
use crate::data::posix::PosixTzString;
use crate::data::time::Seconds;
use crate::data::tzif::{
DataBlock, LeapSecondRecord, LocalTimeTypeRecord, StandardWallIndicator, TzifData, TzifHeader,
UtLocalIndicator,
};
use combine::parser::byte::byte;
use combine::parser::byte::num::{be_i32, be_i64, be_u32};
use combine::{
any, between, choice, count_min_max, one_of, skip_count, value, ParseError, Parser, Stream,
};
fn magic_sequence<Input>() -> impl Parser<Input, Output = u8>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
byte(b'T')
.with(byte(b'Z'))
.with(byte(b'i'))
.with(byte(b'f'))
}
fn version<Input>() -> impl Parser<Input, Output = usize>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
one_of([0, b'2', b'3'])
.map(|byte: u8| byte.saturating_sub(b'0') as usize)
.map(|version| if version == 0 { 1 } else { version })
}
fn isutcnt<Input>() -> impl Parser<Input, Output = usize>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
be_u32().map(|u32| u32 as usize)
}
fn isstdcnt<Input>() -> impl Parser<Input, Output = usize>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
be_u32().map(|u32| u32 as usize)
}
fn leapcnt<Input>() -> impl Parser<Input, Output = usize>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
be_u32().map(|u32| u32 as usize)
}
fn timecnt<Input>() -> impl Parser<Input, Output = usize>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
be_u32().map(|u32| u32 as usize)
}
fn typecnt<Input>(isutcnt: usize, isstdcnt: usize) -> impl Parser<Input, Output = usize>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
be_u32()
.map(|u32| u32 as usize)
.then(|typecnt| {
ensure(
typecnt,
|&typecnt| typecnt != 0,
"typecnt should never be equal to zero",
)
})
.then(move |typecnt| {
ensure(
typecnt,
|&typecnt| isutcnt == 0 || isutcnt == typecnt,
"if isutcnt is non-zero it should be equal to typecnt",
)
})
.then(move |typecnt| {
ensure(
typecnt,
|&typecnt| isstdcnt == 0 || isstdcnt == typecnt,
"if isstdcnt is non-zero it should be equal to typecnt",
)
})
}
fn charcnt<Input>() -> impl Parser<Input, Output = usize>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
be_u32().map(|u32| u32 as usize).then(|charcnt| {
ensure(
charcnt,
|&charcnt| charcnt != 0,
"charcnt should never be zero",
)
})
}
fn header<Input>() -> impl Parser<Input, Output = TzifHeader>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
magic_sequence()
.with((
version(),
skip_count(15, any()).with(isutcnt()),
isstdcnt(),
leapcnt(),
timecnt(),
))
.then(|(version, isutcnt, isstdcnt, leapcnt, timecnt)| {
combine::struct_parser! {
TzifHeader {
version: value(version),
isutcnt: value(isutcnt),
isstdcnt: value(isstdcnt),
leapcnt: value(leapcnt),
timecnt: value(timecnt),
typecnt: typecnt(isutcnt, isstdcnt),
charcnt: charcnt(),
}
}
})
}
fn historic_transition_time<const V: usize, Input>() -> impl Parser<Input, Output = Seconds>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
match V {
1 => be_i32().map(i64::from).left(),
_ => be_i64().right(),
}
.then(|time| {
ensure(
time,
|&time| time >= (-2_i64).pow(59),
"transition time should not be less than -2.pow(59)",
)
})
.map(Seconds)
}
fn historic_transition_times<const V: usize, Input>(
timecnt: usize,
) -> impl Parser<Input, Output = Vec<Seconds>>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
count_min_max(timecnt, timecnt, historic_transition_time::<V, _>()).then(
|times: Vec<Seconds>| {
ensure(
times,
|times| {
times
.iter()
.zip(times.iter().skip(1))
.all(|(lhs, rhs)| lhs <= rhs)
},
"historic transition times should be in ascenting order",
)
},
)
}
fn transition_types<Input>(
timecnt: usize,
typecnt: usize,
) -> impl Parser<Input, Output = Vec<usize>>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
count_min_max(timecnt, timecnt, any().map(|byte| byte as usize)).then(
move |types: Vec<usize>| {
ensure(
types,
|types| types.iter().all(|&t| t < typecnt),
"all transition types should be in range [0, typecnt - 1]",
)
},
)
}
fn utoff<Input>() -> impl Parser<Input, Output = Seconds>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
be_i32()
.then(|utoff| {
ensure(
utoff,
|&utoff| utoff != (-2i32).pow(31),
"utoff should never be equal to -2.pow(31)",
)
})
.map(|utoff| Seconds(i64::from(utoff)))
}
fn boolean<Input>() -> impl Parser<Input, Output = bool>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
choice((byte(b'\x00').map(|_| false), byte(b'\x01').map(|_| true)))
}
fn is_dst<Input>() -> impl Parser<Input, Output = bool>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
boolean()
}
fn idx<Input>(charcnt: usize) -> impl Parser<Input, Output = usize>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
any()
.map(|byte| byte as usize)
.then(move |idx| ensure(idx, |&idx| idx < charcnt, "idx should be less than charcnt"))
}
fn local_time_type_record<Input>(charcnt: usize) -> impl Parser<Input, Output = LocalTimeTypeRecord>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
combine::struct_parser! {
LocalTimeTypeRecord {
utoff: utoff(),
is_dst: is_dst(),
idx: idx(charcnt),
}
}
}
fn local_time_type_records<Input>(
typecnt: usize,
charcnt: usize,
) -> impl Parser<Input, Output = Vec<LocalTimeTypeRecord>>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
count_min_max(typecnt, typecnt, local_time_type_record(charcnt))
}
fn time_zone_designations<Input>(charcnt: usize) -> impl Parser<Input, Output = Vec<String>>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
count_min_max(charcnt, charcnt, any()).map(|bytes: Vec<u8>| {
bytes
.split_inclusive(|&b| b == b'\0')
.map(|s| String::from_utf8_lossy(&s[0..s.len() - 1]).into_owned())
.collect()
})
}
fn leap_second_occurrence<const V: usize, Input>() -> impl Parser<Input, Output = Seconds>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
match V {
1 => be_i32().map(i64::from).left(),
_ => be_i64().right(),
}
.map(Seconds)
}
fn leap_second_correction<Input>() -> impl Parser<Input, Output = i32>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
be_i32()
}
fn leap_second_record<const V: usize, Input>() -> impl Parser<Input, Output = LeapSecondRecord>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
combine::struct_parser! {
LeapSecondRecord {
occurrence: leap_second_occurrence::<V, _>(),
correction: leap_second_correction(),
}
}
}
fn leap_second_records<const V: usize, Input>(
leapcnt: usize,
) -> impl Parser<Input, Output = Vec<LeapSecondRecord>>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
count_min_max(leapcnt, leapcnt, leap_second_record::<V, _>())
.then(|records: Vec<LeapSecondRecord>| {
ensure(
records,
|records| {
records
.first()
.is_none_or(|first| first.occurrence >= Seconds(0))
},
"The first leap-second occurrence, if present, must be non-negative",
)
})
.then(|records: Vec<LeapSecondRecord>| {
ensure(
records,
|records| {
records
.first()
.is_none_or(|first| first.correction == 1 || first.correction == -1)
},
"The first leap-second correction, if present, must be 1 or -1",
)
})
.then(|records: Vec<LeapSecondRecord>| {
ensure(
records,
|records| {
records
.iter()
.zip(records.iter().skip(1))
.all(|(prev, next)| next.occurrence - prev.occurrence >= Seconds(2_419_199))
},
"Each subsequent leap-second occurrence must be at least 2419199 greater than the previous value",
)
})
.then(|records: Vec<LeapSecondRecord>| {
ensure(
records,
|records| {
records
.iter()
.zip(records.iter().skip(1))
.all(|(prev, next)| (next.correction - prev.correction).abs() == 1)
},
"Adjacent leap-second corrections must differ by exactly 1",
)
})
}
fn standard_wall_indicator<Input>() -> impl Parser<Input, Output = StandardWallIndicator>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
boolean().map(|bool| {
if bool {
StandardWallIndicator::Standard
} else {
StandardWallIndicator::Wall
}
})
}
fn standard_wall_indicators<Input>(
isstdcnt: usize,
) -> impl Parser<Input, Output = Vec<StandardWallIndicator>>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
count_min_max(isstdcnt, isstdcnt, standard_wall_indicator())
}
fn ut_local_indicator<Input>() -> impl Parser<Input, Output = UtLocalIndicator>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
boolean().map(|bool| {
if bool {
UtLocalIndicator::Ut
} else {
UtLocalIndicator::Local
}
})
}
fn ut_local_indicators<Input>(isstdcnt: usize) -> impl Parser<Input, Output = Vec<UtLocalIndicator>>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
count_min_max(isstdcnt, isstdcnt, ut_local_indicator())
}
fn data_block<const V: usize, Input>(header: TzifHeader) -> impl Parser<Input, Output = DataBlock>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
combine::struct_parser! {
DataBlock {
transition_times: historic_transition_times::<V, _>(header.timecnt),
transition_types: transition_types(header.timecnt, header.typecnt),
local_time_type_records: local_time_type_records(header.typecnt, header.charcnt),
time_zone_designations: time_zone_designations(header.charcnt),
leap_second_records: leap_second_records::<V, _>(header.leapcnt),
standard_wall_indicators: standard_wall_indicators(header.isstdcnt),
ut_local_indicators: ut_local_indicators(header.isutcnt),
}
}
}
fn footer<Input>() -> impl Parser<Input, Output = PosixTzString>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
between(byte(b'\n'), byte(b'\n'), posix_tz_string())
}
#[must_use]
pub fn tzif<Input>() -> impl Parser<Input, Output = TzifData>
where
Input: Stream<Token = u8>,
Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,
{
header()
.then(|header1| {
if header1.version() == 1 {
(
value(header1),
data_block::<1, _>(header1),
value(None).left(),
)
} else {
(
value(header1),
data_block::<1, _>(header1),
header().map(Some).right(),
)
}
})
.then(|(header1, block1, header2)| match header2 {
None => combine::struct_parser! {
TzifData {
header1: value(header1),
data_block1: value(block1),
header2: value(header2),
data_block2: value(None),
footer: value(None),
}
}
.left(),
Some(header) => (match header.version() {
2 => combine::struct_parser! {
TzifData {
header1: value(header1),
data_block1: value(block1),
header2: value(header2),
data_block2: data_block::<2, _>(header).map(Some),
footer: footer().map(Some),
}
}
.left(),
_ => combine::struct_parser! {
TzifData {
header1: value(header1),
data_block1: value(block1),
header2: value(header2),
data_block2: data_block::<3, _>(header).map(Some),
footer: footer().map(Some),
}
}
.right(),
})
.right(),
})
}
#[cfg(test)]
mod test {
use super::*;
use crate::data::posix::{
DstTransitionInfo, TimeZoneVariantInfo, TransitionDate, TransitionDay,
};
use crate::data::time::Hours;
use crate::{assert_parse_eq, assert_parse_err, assert_parse_ok};
use combine::EasyParser;
const A: usize = b'A' as usize;
const B: usize = b'B' as usize;
const C: usize = b'C' as usize;
const D: usize = b'D' as usize;
#[test]
fn parse_magic_sequence() {
assert_parse_err!(magic_sequence(), "");
assert_parse_err!(magic_sequence(), "asdf");
assert_parse_err!(magic_sequence(), "tzif");
assert_parse_err!(magic_sequence(), "TZIF");
assert_parse_ok!(magic_sequence(), "TZif");
}
#[test]
fn parse_version() {
assert_parse_err!(version(), "");
assert_parse_err!(version(), "0");
assert_parse_err!(version(), "1");
assert_parse_err!(version(), "4");
assert_parse_eq!(version(), "\x00", 1);
assert_parse_eq!(version(), "2", 2);
assert_parse_eq!(version(), "3", 3);
}
#[test]
fn parse_trivial_count_values() {
assert_parse_err!(isutcnt(), "");
assert_parse_err!(isstdcnt(), "");
assert_parse_err!(leapcnt(), "");
assert_parse_err!(timecnt(), "");
assert_parse_err!(isutcnt(), "\x00");
assert_parse_err!(isutcnt(), "\x00\x00");
assert_parse_err!(isutcnt(), "\x00\x00\x00");
assert_parse_err!(isstdcnt(), "\x00");
assert_parse_err!(isstdcnt(), "\x00\x00");
assert_parse_err!(isstdcnt(), "\x00\x00\x00");
assert_parse_err!(leapcnt(), "\x00");
assert_parse_err!(leapcnt(), "\x00\x00");
assert_parse_err!(leapcnt(), "\x00\x00\x00");
assert_parse_err!(timecnt(), "\x00");
assert_parse_err!(timecnt(), "\x00\x00");
assert_parse_err!(timecnt(), "\x00\x00\x00");
assert_parse_eq!(isutcnt(), "\x00\x00\x00\x41", A);
assert_parse_eq!(isutcnt(), "\x00\x00\x00\x00", 0);
assert_parse_eq!(isstdcnt(), "\x00\x00\x00\x42", B);
assert_parse_eq!(isstdcnt(), "\x00\x00\x00\x00", 0);
assert_parse_eq!(leapcnt(), "\x00\x00\x00\x43", C);
assert_parse_eq!(leapcnt(), "\x00\x00\x00\x00", 0);
assert_parse_eq!(timecnt(), "\x00\x00\x00\x44", D);
assert_parse_eq!(timecnt(), "\x00\x00\x00\x00", 0);
}
#[test]
fn parse_typecnt() {
assert_parse_err!(typecnt(0, 0), "");
assert_parse_err!(typecnt(0, 0), "\x00");
assert_parse_err!(typecnt(0, 0), "\x00\x00");
assert_parse_err!(typecnt(0, 0), "\x00\x00\x00");
assert_parse_err!(typecnt(0, 0), "\x00\x00\x00\x00");
assert_parse_err!(typecnt(B, 0), "\x00\x00\x00\x41");
assert_parse_err!(typecnt(0, B), "\x00\x00\x00\x41");
assert_parse_err!(typecnt(A, B), "\x00\x00\x00\x41");
assert_parse_err!(typecnt(B, A), "\x00\x00\x00\x41");
assert_parse_eq!(typecnt(0, 0), "\x00\x00\x00\x41", A);
assert_parse_eq!(typecnt(B, B), "\x00\x00\x00\x42", B);
}
#[test]
fn parse_charcnt() {
assert_parse_err!(charcnt(), "");
assert_parse_err!(charcnt(), "\x00");
assert_parse_err!(charcnt(), "\x00\x00");
assert_parse_err!(charcnt(), "\x00\x00\x00");
assert_parse_err!(charcnt(), "\x00\x00\x00\x00");
assert_parse_eq!(charcnt(), "\x00\x00\x00\x41", A);
assert_parse_eq!(charcnt(), "\x00\x00\x00\x42", B);
}
#[test]
fn parse_header() {
assert_parse_err!(header(), "");
assert_parse_err!(
header(),
"TZif1\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\0\0\0\x01\0\0\0\0\0\0\0",
);
assert_parse_err!(
header(),
"TZif2\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0",
);
assert_parse_err!(
header(),
"TZif2\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0",
);
assert_parse_err!(
header(),
"TZif2\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x02\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\0\0\0\x01\0\0\0\0\0\0\0",
);
assert_parse_err!(
header(),
"TZif2\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x02\0\0\0\0\0\0\0\0\0\0\0\x01\0\0\0\x01\0\0\0\0\0\0\0",
);
assert_parse_eq!(
header(),
"TZif2\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\0\0\0\x01\0\0\0\0\0\0\0",
TzifHeader {
version: 2,
isutcnt: 0,
isstdcnt: 0,
leapcnt: 0,
timecnt: 0,
typecnt: 1,
charcnt:1,
}
);
}
#[test]
fn parse_historic_transition_time() {
const ONE: &[u8] = 1i64.to_be_bytes().as_slice();
const AT_BOUNDARY: &[u8] = (-2_i64).pow(59).to_be_bytes().as_slice();
const OUT_OF_BOUNDS: &[u8] = ((-2_i64).pow(59) - 1).to_be_bytes().as_slice();
assert_parse_err!(
historic_transition_time::<2, _>(),
bytes OUT_OF_BOUNDS,
);
assert_parse_eq!(
historic_transition_time::<2, _>(),
bytes ONE,
Seconds(1),
);
assert_parse_eq!(
historic_transition_time::<1, _>(),
bytes ONE,
Seconds(0),
);
assert_parse_eq!(
historic_transition_time::<1, _>(),
bytes ONE[ONE.len() / 2..].as_ref(),
Seconds(1),
);
assert_parse_eq!(
historic_transition_time::<3, _>(),
bytes AT_BOUNDARY,
Seconds((-2_i64).pow(59)),
);
}
#[test]
fn parse_historic_transition_times() {
const ONE: &[u8] = 1i64.to_be_bytes().as_slice();
const TWO: &[u8] = 2i64.to_be_bytes().as_slice();
const SIX: &[u8] = 6i64.to_be_bytes().as_slice();
let ascending = ONE
.iter()
.chain(TWO)
.chain(SIX)
.copied()
.collect::<Vec<u8>>();
let descending = SIX
.iter()
.chain(TWO)
.chain(ONE)
.copied()
.collect::<Vec<u8>>();
assert_parse_err!(
historic_transition_times::<2, _>(3),
bytes descending.as_slice(),
);
assert_parse_err!(
historic_transition_times::<2, _>(4),
bytes ascending.as_slice(),
);
assert_parse_eq!(
historic_transition_times::<2, _>(3),
bytes ascending.as_slice(),
vec![
Seconds(1),
Seconds(2),
Seconds(6),
]
);
}
#[test]
fn parse_transition_types() {
assert_parse_err!(transition_types(3, 3), "");
assert_parse_err!(transition_types(3, 3), "\x00\x01");
assert_parse_err!(transition_types(3, 3), "\x00\x01\x03");
assert_parse_eq!(transition_types(3, 3), "\x00\x01\x02", vec![0, 1, 2],);
assert_parse_eq!(transition_types(3, 3), "\x02\x01\x01", vec![2, 1, 1],);
}
#[test]
fn parse_utoff() {
const ONE: &[u8] = 1i32.to_be_bytes().as_slice();
const TWO: &[u8] = 2i32.to_be_bytes().as_slice();
const SIX: &[u8] = 6i32.to_be_bytes().as_slice();
const INVALID: &[u8] = ((-2i32).pow(31)).to_be_bytes().as_slice();
assert_parse_err!(utoff(), bytes INVALID);
assert_parse_eq!(utoff(), bytes ONE, Seconds(1));
assert_parse_eq!(utoff(), bytes TWO, Seconds(2));
assert_parse_eq!(utoff(), bytes SIX, Seconds(6));
}
#[test]
fn parse_is_dst() {
assert_parse_err!(is_dst(), "");
assert_parse_err!(is_dst(), "0");
assert_parse_err!(is_dst(), "1");
assert_parse_eq!(is_dst(), "\x00", false);
assert_parse_eq!(is_dst(), "\x01", true);
}
#[test]
fn parse_idx() {
assert_parse_err!(idx(0), "");
assert_parse_err!(idx(3), "\x03");
assert_parse_eq!(idx(3), "\x00", 0);
assert_parse_eq!(idx(3), "\x01", 1);
assert_parse_eq!(idx(3), "\x02", 2);
}
#[test]
fn parse_local_time_type_record() {
assert_parse_eq!(
local_time_type_record(3),
"\x00\x00\x00\x10\x01\x02",
LocalTimeTypeRecord {
utoff: Seconds(16),
is_dst: true,
idx: 2
}
);
assert_parse_eq!(
local_time_type_record(3),
"\x00\x00\x10\x10\x00\x01",
LocalTimeTypeRecord {
utoff: Seconds(16 * 16 * 16 + 16),
is_dst: false,
idx: 1,
}
);
}
#[test]
fn parse_local_time_type_records() {
assert_parse_err!(
local_time_type_records(3, 3),
"\x00\x00\x00\x10\x01\x02\x00\x00\x10\x10\x00\x01",
);
assert_parse_eq!(
local_time_type_records(2, 3),
"\x00\x00\x00\x10\x01\x02\x00\x00\x10\x10\x00\x01",
vec![
LocalTimeTypeRecord {
utoff: Seconds(16),
is_dst: true,
idx: 2
},
LocalTimeTypeRecord {
utoff: Seconds(16 * 16 * 16 + 16),
is_dst: false,
idx: 1,
},
]
);
}
#[test]
fn parse_time_zone_designations() {
assert_parse_eq!(
time_zone_designations(14),
"LMT\0AEDT\0AEST\0",
vec!["LMT".to_owned(), "AEDT".to_owned(), "AEST".to_owned()],
);
}
#[test]
fn time_zone_designation_indexing() {
let block: &[u8] = &[
0x00, 0x00, 0x00, 0x10, 0x01, 0x00, 0x00, 0x00, 0x00, 0x10, 0x01, 0x03, 0x00, 0x00, 0x00, 0x10, 0x01, 0x04, 0x00, 0x00, 0x00, 0x10, 0x01, 0x05, b'L', b'M', b'T', 0x00, b'A', b'E', b'D', b'T', 0x00, ];
let header = TzifHeader {
version: 0,
isutcnt: 0,
isstdcnt: 0,
leapcnt: 0,
timecnt: 0,
typecnt: 4,
charcnt: 9,
};
let (block, _) = data_block::<1, _>(header).parse(block).unwrap();
assert_eq!(
block.time_zone_designation(block.local_time_type_records[0].idx),
Some("LMT")
);
assert_eq!(
block.time_zone_designation(block.local_time_type_records[1].idx),
Some("")
);
assert_eq!(
block.time_zone_designation(block.local_time_type_records[2].idx),
Some("AEDT")
);
assert_eq!(
block.time_zone_designation(block.local_time_type_records[3].idx),
Some("EDT")
);
assert_eq!(block.time_zone_designation(8), Some(""));
assert_eq!(block.time_zone_designation(9), None);
}
#[test]
fn parse_leap_second_occurrence() {
const FIVE: &[u8] = 5i64.to_be_bytes().as_slice();
assert_parse_eq!(
historic_transition_time::<1, _>(),
bytes FIVE,
Seconds(0),
);
assert_parse_eq!(
historic_transition_time::<1, _>(),
bytes FIVE[FIVE.len() / 2..].as_ref(),
Seconds(5),
);
assert_parse_eq!(
historic_transition_time::<2, _>(),
bytes FIVE,
Seconds(5),
);
assert_parse_eq!(
historic_transition_time::<3, _>(),
bytes FIVE,
Seconds(5),
);
}
#[test]
fn parse_leap_second_record() {
const ONE_64BIT: &[u8] = 1i64.to_be_bytes().as_slice();
const ONE_32BIT: &[u8] = 1i32.to_be_bytes().as_slice();
let record_v1 = ONE_32BIT
.iter()
.chain(ONE_32BIT)
.copied()
.collect::<Vec<u8>>();
let record_v2p = ONE_64BIT
.iter()
.chain(ONE_32BIT)
.copied()
.collect::<Vec<u8>>();
assert_parse_eq!(
leap_second_record::<1, _>(),
bytes record_v1.as_slice(),
LeapSecondRecord {
occurrence: Seconds(1),
correction: 1,
}
);
assert_parse_eq!(
leap_second_record::<2, _>(),
bytes record_v2p.as_slice(),
LeapSecondRecord {
occurrence: Seconds(1),
correction: 1,
}
);
assert_parse_eq!(
leap_second_record::<3, _>(),
bytes record_v2p.as_slice(),
LeapSecondRecord {
occurrence: Seconds(1),
correction: 1,
}
);
}
#[test]
fn parse_leap_second_records() {
let invalid_first_occurrence = (-5i64)
.to_be_bytes()
.iter()
.copied()
.chain(1i32.to_be_bytes().iter().copied())
.collect::<Vec<u8>>();
let invalid_first_correction = 0i64
.to_be_bytes()
.iter()
.copied()
.chain(0i32.to_be_bytes().iter().copied())
.collect::<Vec<u8>>();
let invalid_second_occurrence = 0i64
.to_be_bytes()
.iter()
.copied()
.chain(1i32.to_be_bytes().iter().copied())
.chain(2419198i64.to_be_bytes().iter().copied())
.chain(2i32.to_be_bytes().iter().copied())
.chain((2 * 2419199i64).to_be_bytes().iter().copied())
.chain(3i32.to_be_bytes().iter().copied())
.collect::<Vec<u8>>();
let invalid_second_correction = 0i64
.to_be_bytes()
.iter()
.copied()
.chain(1i32.to_be_bytes().iter().copied())
.chain(2419199i64.to_be_bytes().iter().copied())
.chain(3i32.to_be_bytes().iter().copied())
.chain((2 * 2419199i64).to_be_bytes().iter().copied())
.chain(4i32.to_be_bytes().iter().copied())
.collect::<Vec<u8>>();
let valid_v1 = 0i32
.to_be_bytes()
.iter()
.copied()
.chain(1i32.to_be_bytes().iter().copied())
.chain(2419199i32.to_be_bytes().iter().copied())
.chain(2i32.to_be_bytes().iter().copied())
.chain((2 * 2419199i32).to_be_bytes().iter().copied())
.chain(3i32.to_be_bytes().iter().copied())
.collect::<Vec<u8>>();
let valid_v2p = 0i64
.to_be_bytes()
.iter()
.copied()
.chain(1i32.to_be_bytes().iter().copied())
.chain(2419199i64.to_be_bytes().iter().copied())
.chain(2i32.to_be_bytes().iter().copied())
.chain((2 * 2419199i64).to_be_bytes().iter().copied())
.chain(3i32.to_be_bytes().iter().copied())
.collect::<Vec<u8>>();
assert_parse_err!(
leap_second_records::<2, _>(4),
bytes valid_v2p.as_slice(),
);
assert_parse_err!(
leap_second_records::<2, _>(1),
bytes invalid_first_correction.as_slice(),
);
assert_parse_err!(
leap_second_records::<2, _>(1),
bytes invalid_first_occurrence.as_slice(),
);
assert_parse_err!(
leap_second_records::<2, _>(2),
bytes invalid_second_correction.as_slice(),
);
assert_parse_err!(
leap_second_records::<2, _>(2),
bytes invalid_second_occurrence.as_slice(),
);
assert_parse_eq!(
leap_second_records::<1, _>(2),
bytes valid_v1.as_slice(),
vec![
LeapSecondRecord {
occurrence: Seconds(0),
correction: 1,
},
LeapSecondRecord {
occurrence: Seconds(2419199),
correction: 2,
},
],
);
assert_parse_eq!(
leap_second_records::<2, _>(2),
bytes valid_v2p.as_slice(),
vec![
LeapSecondRecord {
occurrence: Seconds(0),
correction: 1,
},
LeapSecondRecord {
occurrence: Seconds(2419199),
correction: 2,
},
],
);
}
#[test]
fn parse_standard_wall_indicators() {
assert_parse_err!(standard_wall_indicators(3), "");
assert_parse_err!(standard_wall_indicators(3), "\x00\x01");
assert_parse_err!(standard_wall_indicators(3), "\x00\x01\x02");
assert_parse_eq!(
standard_wall_indicators(0),
"",
Vec::<StandardWallIndicator>::new()
);
assert_parse_eq!(
standard_wall_indicators(4),
"\x00\x01\x01\x00",
vec![
StandardWallIndicator::Wall,
StandardWallIndicator::Standard,
StandardWallIndicator::Standard,
StandardWallIndicator::Wall,
]
);
}
#[test]
fn parse_ut_local_indicators() {
assert_parse_err!(ut_local_indicators(3), "");
assert_parse_err!(ut_local_indicators(3), "\x00\x01");
assert_parse_err!(ut_local_indicators(3), "\x00\x01\x02");
assert_parse_eq!(ut_local_indicators(0), "", Vec::<UtLocalIndicator>::new());
assert_parse_eq!(
ut_local_indicators(4),
"\x01\x00\x00\x01",
vec![
UtLocalIndicator::Ut,
UtLocalIndicator::Local,
UtLocalIndicator::Local,
UtLocalIndicator::Ut,
]
);
}
#[test]
fn parse_footer() {
assert_parse_err!(footer(), "EST+5EDT,M3.2.0/2,M11.1.0/2\n");
assert_parse_err!(footer(), "\nEST+5EDT,M3.2.0/2,M11.1.0/2");
assert_parse_eq!(
footer(),
"\nEST+5EDT,M3.2.0/2,M11.1.0/2\n",
PosixTzString {
std_info: TimeZoneVariantInfo {
name: "EST".to_owned(),
offset: Hours(5).as_seconds(),
},
dst_info: Some(DstTransitionInfo {
variant_info: TimeZoneVariantInfo {
name: "EDT".to_owned(),
offset: Hours(4).as_seconds()
},
start_date: TransitionDate {
day: TransitionDay::Mwd(3, 2, 0),
time: Hours(2).as_seconds(),
},
end_date: TransitionDate {
day: TransitionDay::Mwd(11, 1, 0),
time: Hours(2).as_seconds(),
},
})
}
);
}
}