#![cfg_attr(not(any(test, feature = "std")), no_std)]
#![cfg_attr(
not(test),
deny(
clippy::indexing_slicing,
clippy::unwrap_used,
clippy::expect_used,
clippy::panic,
clippy::exhaustive_structs,
clippy::exhaustive_enums,
// TODO(#2266): enable missing_debug_implementations,
)
)]
#![warn(missing_docs)]
extern crate alloc;
mod error;
pub mod properties;
pub mod provider;
pub use crate::error::NormalizerError;
use crate::provider::CanonicalDecompositionDataV1Marker;
use crate::provider::CompatibilityDecompositionSupplementV1Marker;
use crate::provider::DecompositionDataV1;
#[cfg(feature = "experimental")]
use crate::provider::Uts46DecompositionSupplementV1Marker;
use alloc::string::String;
use alloc::vec::Vec;
use core::char::REPLACEMENT_CHARACTER;
use core::str::from_utf8_unchecked;
use icu_collections::char16trie::Char16Trie;
use icu_collections::char16trie::Char16TrieIterator;
use icu_collections::char16trie::TrieResult;
use icu_collections::codepointtrie::CodePointTrie;
use icu_properties::CanonicalCombiningClass;
use icu_provider::prelude::*;
use icu_provider::zerofrom::ZeroFrom;
use provider::CanonicalCompositionsV1Marker;
use provider::CanonicalDecompositionTablesV1Marker;
use provider::CompatibilityDecompositionTablesV1Marker;
use provider::DecompositionSupplementV1;
use provider::DecompositionTablesV1;
use smallvec::SmallVec;
use utf16_iter::Utf16CharsEx;
use utf8_iter::Utf8CharsEx;
use write16::Write16;
use zerovec::ule::AsULE;
use zerovec::ZeroSlice;
enum SupplementPayloadHolder {
Compatibility(DataPayload<CompatibilityDecompositionSupplementV1Marker>),
#[cfg(feature = "experimental")]
Uts46(DataPayload<Uts46DecompositionSupplementV1Marker>),
}
impl SupplementPayloadHolder {
fn get(&self) -> &DecompositionSupplementV1 {
match self {
SupplementPayloadHolder::Compatibility(d) => d.get(),
#[cfg(feature = "experimental")]
SupplementPayloadHolder::Uts46(d) => d.get(),
}
}
}
const UTF16_FAST_PATH_FLUSH_THRESHOLD: usize = 4096;
const BACKWARD_COMBINING_STARTER_MARKER: u32 = 1;
const SPECIAL_NON_STARTER_DECOMPOSITION_MARKER: u32 = 2;
const SPECIAL_NON_STARTER_DECOMPOSITION_MARKER_U16: u16 = 2;
const NON_ROUND_TRIP_MARKER: u16 = 1;
fn trie_value_has_ccc(trie_value: u32) -> bool {
(trie_value & 0xFFFFFF00) == 0xD800
}
fn trie_value_indicates_special_non_starter_decomposition(trie_value: u32) -> bool {
trie_value == SPECIAL_NON_STARTER_DECOMPOSITION_MARKER
}
fn decomposition_starts_with_non_starter(trie_value: u32) -> bool {
trie_value_has_ccc(trie_value)
|| trie_value_indicates_special_non_starter_decomposition(trie_value)
}
fn ccc_from_trie_value(trie_value: u32) -> CanonicalCombiningClass {
if trie_value_has_ccc(trie_value) {
CanonicalCombiningClass(trie_value as u8)
} else {
debug_assert_ne!(trie_value, SPECIAL_NON_STARTER_DECOMPOSITION_MARKER);
CanonicalCombiningClass::NotReordered
}
}
static FDFA_NFKD: [u16; 17] = [
0x644, 0x649, 0x20, 0x627, 0x644, 0x644, 0x647, 0x20, 0x639, 0x644, 0x64A, 0x647, 0x20, 0x648,
0x633, 0x644, 0x645,
];
const FDFA_MARKER: u16 = 3;
const HANGUL_S_BASE: u32 = 0xAC00;
const HANGUL_L_BASE: u32 = 0x1100;
const HANGUL_V_BASE: u32 = 0x1161;
const HANGUL_T_BASE: u32 = 0x11A7;
const HANGUL_L_COUNT: u32 = 19;
const HANGUL_V_COUNT: u32 = 21;
const HANGUL_T_COUNT: u32 = 28;
const HANGUL_N_COUNT: u32 = 588;
const HANGUL_S_COUNT: u32 = 11172;
const HANGUL_JAMO_LIMIT: u32 = 0x1200;
#[inline(always)]
fn unwrap_or_gigo<T>(opt: Option<T>, default: T) -> T {
if let Some(val) = opt {
val
} else {
debug_assert!(false);
default
}
}
#[inline(always)]
fn char_from_u32(u: u32) -> char {
unwrap_or_gigo(core::char::from_u32(u), REPLACEMENT_CHARACTER)
}
#[inline(always)]
fn char_from_u16(u: u16) -> char {
char_from_u32(u32::from(u))
}
const EMPTY_U16: &ZeroSlice<u16> =
ZeroSlice::<u16>::from_ule_slice(&<u16 as AsULE>::ULE::from_array([]));
const EMPTY_CHAR: &ZeroSlice<char> = ZeroSlice::new_empty();
#[inline(always)]
fn in_inclusive_range(c: char, start: char, end: char) -> bool {
u32::from(c).wrapping_sub(u32::from(start)) <= (u32::from(end) - u32::from(start))
}
#[inline(always)]
fn in_inclusive_range32(u: u32, start: u32, end: u32) -> bool {
u.wrapping_sub(start) <= (end - start)
}
#[inline(always)]
fn in_inclusive_range16(u: u16, start: u16, end: u16) -> bool {
u.wrapping_sub(start) <= (end - start)
}
#[inline]
fn compose(iter: Char16TrieIterator, starter: char, second: char) -> Option<char> {
let v = u32::from(second).wrapping_sub(HANGUL_V_BASE);
if v >= HANGUL_JAMO_LIMIT - HANGUL_V_BASE {
return compose_non_hangul(iter, starter, second);
}
if v < HANGUL_V_COUNT {
let l = u32::from(starter).wrapping_sub(HANGUL_L_BASE);
if l < HANGUL_L_COUNT {
let lv = l * HANGUL_N_COUNT + v * HANGUL_T_COUNT;
return Some(unsafe { char::from_u32_unchecked(HANGUL_S_BASE + lv) });
}
return None;
}
if in_inclusive_range(second, '\u{11A8}', '\u{11C2}') {
let lv = u32::from(starter).wrapping_sub(HANGUL_S_BASE);
if lv < HANGUL_S_COUNT && lv % HANGUL_T_COUNT == 0 {
let lvt = lv + (u32::from(second) - HANGUL_T_BASE);
return Some(unsafe { char::from_u32_unchecked(HANGUL_S_BASE + lvt) });
}
}
None
}
fn compose_non_hangul(mut iter: Char16TrieIterator, starter: char, second: char) -> Option<char> {
match iter.next(second) {
TrieResult::NoMatch => None,
TrieResult::NoValue => match iter.next(starter) {
TrieResult::NoMatch => None,
TrieResult::FinalValue(i) => {
if let Some(c) = char::from_u32(i as u32) {
Some(c)
} else {
debug_assert!(false);
None
}
}
TrieResult::NoValue | TrieResult::Intermediate(_) => {
debug_assert!(false);
None
}
},
TrieResult::FinalValue(_) | TrieResult::Intermediate(_) => {
debug_assert!(false);
None
}
}
}
#[derive(Debug, PartialEq, Eq)]
struct CharacterAndTrieValue {
character: char,
trie_val: u32,
from_supplement: bool,
}
impl CharacterAndTrieValue {
#[inline(always)]
pub fn new(c: char, trie_value: u32) -> Self {
CharacterAndTrieValue {
character: c,
trie_val: trie_value,
from_supplement: false,
}
}
#[inline(always)]
pub fn new_from_supplement(c: char, trie_value: u32) -> Self {
CharacterAndTrieValue {
character: c,
trie_val: trie_value,
from_supplement: true,
}
}
#[inline(always)]
pub fn starter_and_decomposes_to_self(&self) -> bool {
if self.trie_val > BACKWARD_COMBINING_STARTER_MARKER {
return false;
}
u32::from(self.character).wrapping_sub(HANGUL_S_BASE) >= HANGUL_S_COUNT
}
#[inline(always)]
pub fn can_combine_backwards(&self) -> bool {
decomposition_starts_with_non_starter(self.trie_val)
|| self.trie_val == BACKWARD_COMBINING_STARTER_MARKER
|| in_inclusive_range32(self.trie_val, 0x1161, 0x11C2)
}
#[inline(always)]
pub fn potential_passthrough(&self) -> bool {
self.potential_passthrough_impl(BACKWARD_COMBINING_STARTER_MARKER)
}
#[inline(always)]
pub fn potential_passthrough_and_cannot_combine_backwards(&self) -> bool {
self.potential_passthrough_impl(0)
}
#[inline(always)]
fn potential_passthrough_impl(&self, bound: u32) -> bool {
if self.trie_val <= bound {
return true;
}
if self.from_supplement {
return false;
}
let trail_or_complex = (self.trie_val >> 16) as u16;
if trail_or_complex == 0 {
return false;
}
let lead = self.trie_val as u16;
if lead == 0 {
return true;
}
if lead == NON_ROUND_TRIP_MARKER {
return false;
}
if (trail_or_complex & 0x7F) == 0x3C
&& in_inclusive_range16(trail_or_complex, 0x0900, 0x0BFF)
{
return false;
}
if in_inclusive_range(self.character, '\u{FB1D}', '\u{FB4E}') {
return false;
}
if in_inclusive_range(self.character, '\u{1F71}', '\u{1FFB}') {
return false;
}
true
}
}
#[derive(Debug)]
struct CharacterAndClass(u32);
impl CharacterAndClass {
pub fn new(c: char, ccc: CanonicalCombiningClass) -> Self {
CharacterAndClass(u32::from(c) | (u32::from(ccc.0) << 24))
}
pub fn new_with_placeholder(c: char) -> Self {
CharacterAndClass(u32::from(c) | ((0xFF) << 24))
}
pub fn new_with_trie_value(c_tv: CharacterAndTrieValue) -> Self {
Self::new(c_tv.character, ccc_from_trie_value(c_tv.trie_val))
}
pub fn new_starter(c: char) -> Self {
CharacterAndClass(u32::from(c))
}
pub fn character(&self) -> char {
unsafe { char::from_u32_unchecked(self.0 & 0xFFFFFF) }
}
pub fn ccc(&self) -> CanonicalCombiningClass {
CanonicalCombiningClass((self.0 >> 24) as u8)
}
pub fn character_and_ccc(&self) -> (char, CanonicalCombiningClass) {
(self.character(), self.ccc())
}
pub fn set_ccc_from_trie_if_not_already_set(&mut self, trie: &CodePointTrie<u32>) {
if self.0 >> 24 != 0xFF {
return;
}
let scalar = self.0 & 0xFFFFFF;
self.0 = ((ccc_from_trie_value(trie.get32_u32(scalar)).0 as u32) << 24) | scalar;
}
}
#[inline(always)]
fn sort_slice_by_ccc(slice: &mut [CharacterAndClass], trie: &CodePointTrie<u32>) {
if slice.len() < 2 {
return;
}
slice
.iter_mut()
.for_each(|cc| cc.set_ccc_from_trie_if_not_already_set(trie));
slice.sort_by_key(|cc| cc.ccc());
}
pub struct Decomposition<'data, I>
where
I: Iterator<Item = char>,
{
delegate: I,
buffer: SmallVec<[CharacterAndClass; 17]>, buffer_pos: usize,
pending: Option<CharacterAndTrieValue>, trie: &'data CodePointTrie<'data, u32>,
supplementary_trie: Option<&'data CodePointTrie<'data, u32>>,
scalars16: &'data ZeroSlice<u16>,
scalars24: &'data ZeroSlice<char>,
supplementary_scalars16: &'data ZeroSlice<u16>,
supplementary_scalars24: &'data ZeroSlice<char>,
half_width_voicing_marks_become_non_starters: bool,
decomposition_passthrough_bound: u32, }
impl<'data, I> Decomposition<'data, I>
where
I: Iterator<Item = char>,
{
#[doc(hidden)]
pub fn new(
delegate: I,
decompositions: &'data DecompositionDataV1,
tables: &'data DecompositionTablesV1,
) -> Self {
Self::new_with_supplements(delegate, decompositions, None, tables, None, 0xC0)
}
fn new_with_supplements(
delegate: I,
decompositions: &'data DecompositionDataV1,
supplementary_decompositions: Option<&'data DecompositionSupplementV1>,
tables: &'data DecompositionTablesV1,
supplementary_tables: Option<&'data DecompositionTablesV1>,
decomposition_passthrough_bound: u8,
) -> Self {
let half_width_voicing_marks_become_non_starters =
if let Some(supplementary) = supplementary_decompositions {
supplementary.half_width_voicing_marks_become_non_starters()
} else {
false
};
let mut ret = Decomposition::<I> {
delegate,
buffer: SmallVec::new(), buffer_pos: 0,
pending: Some(CharacterAndTrieValue::new('\u{FFFF}', 0)),
trie: &decompositions.trie,
supplementary_trie: supplementary_decompositions.map(|s| &s.trie),
scalars16: &tables.scalars16,
scalars24: &tables.scalars24,
supplementary_scalars16: if let Some(supplementary) = supplementary_tables {
&supplementary.scalars16
} else {
EMPTY_U16
},
supplementary_scalars24: if let Some(supplementary) = supplementary_tables {
&supplementary.scalars24
} else {
EMPTY_CHAR
},
half_width_voicing_marks_become_non_starters,
decomposition_passthrough_bound: u32::from(decomposition_passthrough_bound),
};
let _ = ret.next(); ret
}
fn push_decomposition16(
&mut self,
low: u16,
offset: usize,
slice16: &ZeroSlice<u16>,
) -> (char, usize) {
let len = usize::from(low >> 13) + 2;
let (starter, tail) = slice16
.get_subslice(offset..offset + len)
.and_then(|slice| slice.split_first())
.map_or_else(
|| {
debug_assert!(false);
(REPLACEMENT_CHARACTER, EMPTY_U16)
},
|(first, trail)| (char_from_u16(first), trail),
);
if low & 0x1000 != 0 {
self.buffer.extend(
tail.iter()
.map(|u| CharacterAndClass::new_with_placeholder(char_from_u16(u))),
);
(starter, 0)
} else {
let mut i = 0;
let mut combining_start = 0;
for u in tail.iter() {
let ch = char_from_u16(u);
let trie_value = self.trie.get(ch);
self.buffer.push(CharacterAndClass::new_with_trie_value(
CharacterAndTrieValue::new(ch, trie_value),
));
i += 1;
if decomposition_starts_with_non_starter(trie_value) {
combining_start = i;
}
}
(starter, combining_start)
}
}
fn push_decomposition32(
&mut self,
low: u16,
offset: usize,
slice32: &ZeroSlice<char>,
) -> (char, usize) {
let len = usize::from(low >> 13) + 1;
let (starter, tail) = slice32
.get_subslice(offset..offset + len)
.and_then(|slice| slice.split_first())
.unwrap_or_else(|| {
debug_assert!(false);
(REPLACEMENT_CHARACTER, EMPTY_CHAR)
});
if low & 0x1000 != 0 {
self.buffer
.extend(tail.iter().map(CharacterAndClass::new_with_placeholder));
(starter, 0)
} else {
let mut i = 0;
let mut combining_start = 0;
for ch in tail.iter() {
let trie_value = self.trie.get(ch);
self.buffer.push(CharacterAndClass::new_with_trie_value(
CharacterAndTrieValue::new(ch, trie_value),
));
i += 1;
if decomposition_starts_with_non_starter(trie_value) {
combining_start = i;
}
}
(starter, combining_start)
}
}
#[inline(always)]
fn attach_trie_value(&self, c: char) -> CharacterAndTrieValue {
if let Some(supplementary) = self.supplementary_trie {
if let Some(value) = self.attach_supplementary_trie_value(c, supplementary) {
return value;
}
}
CharacterAndTrieValue::new(c, self.trie.get(c))
}
#[inline(never)]
fn attach_supplementary_trie_value(
&self,
c: char,
supplementary: &CodePointTrie<u32>,
) -> Option<CharacterAndTrieValue> {
let voicing_mark = u32::from(c).wrapping_sub(0xFF9E);
if voicing_mark <= 1 && self.half_width_voicing_marks_become_non_starters {
return Some(CharacterAndTrieValue::new(
if voicing_mark == 0 {
'\u{3099}'
} else {
'\u{309A}'
},
0xD800 | u32::from(CanonicalCombiningClass::KanaVoicing.0),
));
}
let trie_value = supplementary.get32(u32::from(c));
if trie_value != 0 {
return Some(CharacterAndTrieValue::new_from_supplement(c, trie_value));
}
None
}
fn delegate_next_no_pending(&mut self) -> Option<CharacterAndTrieValue> {
debug_assert!(self.pending.is_none());
let c = self.delegate.next()?;
if u32::from(c) < self.decomposition_passthrough_bound {
return Some(CharacterAndTrieValue::new(c, 0));
}
Some(self.attach_trie_value(c))
}
fn delegate_next(&mut self) -> Option<CharacterAndTrieValue> {
if let Some(pending) = self.pending.take() {
Some(pending)
} else {
self.delegate_next_no_pending()
}
}
fn decomposing_next(&mut self, c_and_trie_val: CharacterAndTrieValue) -> char {
let (starter, combining_start) = {
let c = c_and_trie_val.character;
let hangul_offset = u32::from(c).wrapping_sub(HANGUL_S_BASE); if hangul_offset >= HANGUL_S_COUNT {
let decomposition = c_and_trie_val.trie_val;
if decomposition <= BACKWARD_COMBINING_STARTER_MARKER {
(c, 0)
} else {
let trail_or_complex = (decomposition >> 16) as u16;
let lead = decomposition as u16;
if lead > NON_ROUND_TRIP_MARKER && trail_or_complex != 0 {
let starter = char_from_u16(lead);
let combining = char_from_u16(trail_or_complex);
self.buffer
.push(CharacterAndClass::new_with_placeholder(combining));
(starter, 0)
} else if lead > NON_ROUND_TRIP_MARKER {
if lead != FDFA_MARKER {
debug_assert_ne!(
lead, SPECIAL_NON_STARTER_DECOMPOSITION_MARKER_U16,
"Should not reach this point with non-starter marker"
);
let starter = char_from_u16(lead);
(starter, 0)
} else {
self.buffer.extend(FDFA_NFKD.map(|u| {
CharacterAndClass::new_starter(unsafe {
core::char::from_u32_unchecked(u32::from(u))
})
}));
('\u{0635}', 17)
}
} else {
let offset = usize::from(trail_or_complex & 0xFFF);
if offset < self.scalars16.len() {
self.push_decomposition16(trail_or_complex, offset, self.scalars16)
} else if offset < self.scalars16.len() + self.scalars24.len() {
self.push_decomposition32(
trail_or_complex,
offset - self.scalars16.len(),
self.scalars24,
)
} else if offset
< self.scalars16.len()
+ self.scalars24.len()
+ self.supplementary_scalars16.len()
{
self.push_decomposition16(
trail_or_complex,
offset - (self.scalars16.len() + self.scalars24.len()),
self.supplementary_scalars16,
)
} else {
self.push_decomposition32(
trail_or_complex,
offset
- (self.scalars16.len()
+ self.scalars24.len()
+ self.supplementary_scalars16.len()),
self.supplementary_scalars24,
)
}
}
}
} else {
let l = hangul_offset / HANGUL_N_COUNT;
let v = (hangul_offset % HANGUL_N_COUNT) / HANGUL_T_COUNT;
let t = hangul_offset % HANGUL_T_COUNT;
self.buffer.push(CharacterAndClass::new_starter(unsafe {
core::char::from_u32_unchecked(HANGUL_V_BASE + v)
}));
let first = unsafe { core::char::from_u32_unchecked(HANGUL_L_BASE + l) };
if t != 0 {
self.buffer.push(CharacterAndClass::new_starter(unsafe {
core::char::from_u32_unchecked(HANGUL_T_BASE + t)
}));
(first, 2)
} else {
(first, 1)
}
}
};
self.gather_and_sort_combining(combining_start);
starter
}
fn gather_and_sort_combining(&mut self, combining_start: usize) {
while let Some(ch_and_trie_val) = self.delegate_next() {
if trie_value_has_ccc(ch_and_trie_val.trie_val) {
self.buffer
.push(CharacterAndClass::new_with_trie_value(ch_and_trie_val));
} else if trie_value_indicates_special_non_starter_decomposition(
ch_and_trie_val.trie_val,
) {
let mapped = match ch_and_trie_val.character {
'\u{0340}' => {
CharacterAndClass::new('\u{0300}', CanonicalCombiningClass::Above)
}
'\u{0341}' => {
CharacterAndClass::new('\u{0301}', CanonicalCombiningClass::Above)
}
'\u{0343}' => {
CharacterAndClass::new('\u{0313}', CanonicalCombiningClass::Above)
}
'\u{0344}' => {
self.buffer.push(CharacterAndClass::new(
'\u{0308}',
CanonicalCombiningClass::Above,
));
CharacterAndClass::new('\u{0301}', CanonicalCombiningClass::Above)
}
'\u{0F73}' => {
self.buffer.push(CharacterAndClass::new(
'\u{0F71}',
CanonicalCombiningClass::CCC129,
));
CharacterAndClass::new('\u{0F72}', CanonicalCombiningClass::CCC130)
}
'\u{0F75}' => {
self.buffer.push(CharacterAndClass::new(
'\u{0F71}',
CanonicalCombiningClass::CCC129,
));
CharacterAndClass::new('\u{0F74}', CanonicalCombiningClass::CCC132)
}
'\u{0F81}' => {
self.buffer.push(CharacterAndClass::new(
'\u{0F71}',
CanonicalCombiningClass::CCC129,
));
CharacterAndClass::new('\u{0F80}', CanonicalCombiningClass::CCC130)
}
_ => {
debug_assert!(false);
CharacterAndClass::new_with_placeholder(REPLACEMENT_CHARACTER)
}
};
self.buffer.push(mapped);
} else {
self.pending = Some(ch_and_trie_val);
break;
}
}
#[allow(clippy::indexing_slicing)]
sort_slice_by_ccc(&mut self.buffer[combining_start..], self.trie);
}
}
impl<'data, I> Iterator for Decomposition<'data, I>
where
I: Iterator<Item = char>,
{
type Item = char;
fn next(&mut self) -> Option<char> {
if let Some(ret) = self.buffer.get(self.buffer_pos).map(|c| c.character()) {
self.buffer_pos += 1;
if self.buffer_pos == self.buffer.len() {
self.buffer.clear();
self.buffer_pos = 0;
}
return Some(ret);
}
debug_assert_eq!(self.buffer_pos, 0);
let c_and_trie_val = self.pending.take()?;
Some(self.decomposing_next(c_and_trie_val))
}
}
pub struct Composition<'data, I>
where
I: Iterator<Item = char>,
{
decomposition: Decomposition<'data, I>,
canonical_compositions: Char16Trie<'data>,
unprocessed_starter: Option<char>,
composition_passthrough_bound: u32,
}
impl<'data, I> Composition<'data, I>
where
I: Iterator<Item = char>,
{
fn new(
decomposition: Decomposition<'data, I>,
canonical_compositions: Char16Trie<'data>,
composition_passthrough_bound: u16,
) -> Self {
Self {
decomposition,
canonical_compositions,
unprocessed_starter: None,
composition_passthrough_bound: u32::from(composition_passthrough_bound),
}
}
#[inline(always)]
pub fn compose(&self, starter: char, second: char) -> Option<char> {
compose(self.canonical_compositions.iter(), starter, second)
}
#[inline(always)]
fn compose_non_hangul(&self, starter: char, second: char) -> Option<char> {
compose_non_hangul(self.canonical_compositions.iter(), starter, second)
}
}
impl<'data, I> Iterator for Composition<'data, I>
where
I: Iterator<Item = char>,
{
type Item = char;
#[inline]
fn next(&mut self) -> Option<char> {
let mut undecomposed_starter = CharacterAndTrieValue::new('\u{0}', 0); if self.unprocessed_starter.is_none() {
#[allow(clippy::never_loop)]
loop {
if let Some((character, ccc)) = self
.decomposition
.buffer
.get(self.decomposition.buffer_pos)
.map(|c| c.character_and_ccc())
{
self.decomposition.buffer_pos += 1;
if self.decomposition.buffer_pos == self.decomposition.buffer.len() {
self.decomposition.buffer.clear();
self.decomposition.buffer_pos = 0;
}
if ccc == CanonicalCombiningClass::NotReordered {
self.unprocessed_starter = Some(character);
break; }
return Some(character);
}
debug_assert_eq!(self.decomposition.buffer_pos, 0);
undecomposed_starter = self.decomposition.pending.take()?;
if u32::from(undecomposed_starter.character) < self.composition_passthrough_bound
|| undecomposed_starter.potential_passthrough()
{
if let Some(upcoming) = self.decomposition.delegate_next_no_pending() {
let cannot_combine_backwards = u32::from(upcoming.character)
< self.composition_passthrough_bound
|| !upcoming.can_combine_backwards();
self.decomposition.pending = Some(upcoming);
if cannot_combine_backwards {
return Some(undecomposed_starter.character);
}
} else {
return Some(undecomposed_starter.character);
}
}
break; }
}
let mut starter = '\u{0}';
let mut attempt_composition = false;
loop {
if let Some(unprocessed) = self.unprocessed_starter.take() {
debug_assert_eq!(undecomposed_starter, CharacterAndTrieValue::new('\u{0}', 0));
debug_assert_eq!(starter, '\u{0}');
starter = unprocessed;
} else {
debug_assert_eq!(self.decomposition.buffer_pos, 0);
let next_starter = self.decomposition.decomposing_next(undecomposed_starter);
if !attempt_composition {
starter = next_starter;
} else if let Some(composed) = self.compose(starter, next_starter) {
starter = composed;
} else {
self.unprocessed_starter = Some(next_starter);
return Some(starter);
}
}
loop {
let (character, ccc) = if let Some((character, ccc)) = self
.decomposition
.buffer
.get(self.decomposition.buffer_pos)
.map(|c| c.character_and_ccc())
{
(character, ccc)
} else {
self.decomposition.buffer.clear();
self.decomposition.buffer_pos = 0;
break;
};
if let Some(composed) = self.compose(starter, character) {
starter = composed;
self.decomposition.buffer_pos += 1;
continue;
}
let mut most_recent_skipped_ccc = ccc;
{
let _ = self
.decomposition
.buffer
.drain(0..self.decomposition.buffer_pos);
}
self.decomposition.buffer_pos = 0;
if most_recent_skipped_ccc == CanonicalCombiningClass::NotReordered {
return Some(starter);
}
let mut i = 1; while let Some((character, ccc)) = self
.decomposition
.buffer
.get(i)
.map(|c| c.character_and_ccc())
{
if ccc == CanonicalCombiningClass::NotReordered {
return Some(starter);
}
debug_assert!(ccc >= most_recent_skipped_ccc);
if ccc != most_recent_skipped_ccc {
if let Some(composed) = self.compose_non_hangul(starter, character) {
self.decomposition.buffer.remove(i);
starter = composed;
continue;
}
}
most_recent_skipped_ccc = ccc;
i += 1;
}
break;
}
debug_assert_eq!(self.decomposition.buffer_pos, 0);
if !self.decomposition.buffer.is_empty() {
return Some(starter);
}
#[allow(clippy::unwrap_used)]
if self.decomposition.pending.is_some() {
let pending = self.decomposition.pending.as_ref().unwrap();
if u32::from(pending.character) < self.composition_passthrough_bound
|| !pending.can_combine_backwards()
{
return Some(starter);
}
undecomposed_starter = self.decomposition.pending.take().unwrap();
attempt_composition = true;
continue;
}
return Some(starter);
}
}
}
macro_rules! composing_normalize_to {
($(#[$meta:meta])*,
$normalize_to:ident,
$write:path,
$slice:ty,
$prolog:block,
$always_valid_utf:literal,
$as_slice:ident,
$fast:block,
$text:ident,
$sink:ident,
$composition:ident,
$composition_passthrough_bound:ident,
$undecomposed_starter:ident,
$pending_slice:ident,
$len_utf:ident,
) => {
$(#[$meta])*
pub fn $normalize_to<W: $write + ?Sized>(
&self,
$text: $slice,
$sink: &mut W,
) -> core::fmt::Result {
$prolog
let mut $composition = self.normalize_iter($text.chars());
for cc in $composition.decomposition.buffer.drain(..) {
$sink.write_char(cc.character())?;
}
let $composition_passthrough_bound = $composition.composition_passthrough_bound;
'outer: loop {
debug_assert_eq!($composition.decomposition.buffer_pos, 0);
let mut $undecomposed_starter =
if let Some(pending) = $composition.decomposition.pending.take() {
pending
} else {
return Ok(());
};
#[allow(clippy::indexing_slicing)]
if u32::from($undecomposed_starter.character) < $composition_passthrough_bound ||
$undecomposed_starter.potential_passthrough()
{
if $always_valid_utf || $undecomposed_starter.character != REPLACEMENT_CHARACTER {
let $pending_slice = &$text[$text.len() - $composition.decomposition.delegate.$as_slice().len() - $undecomposed_starter.character.$len_utf()..];
$fast
}
}
let mut starter = $composition
.decomposition
.decomposing_next($undecomposed_starter);
'bufferloop: loop {
loop {
let (character, ccc) = if let Some((character, ccc)) = $composition
.decomposition
.buffer
.get($composition.decomposition.buffer_pos)
.map(|c| c.character_and_ccc())
{
(character, ccc)
} else {
$composition.decomposition.buffer.clear();
$composition.decomposition.buffer_pos = 0;
break;
};
if let Some(composed) = $composition.compose(starter, character) {
starter = composed;
$composition.decomposition.buffer_pos += 1;
continue;
}
let mut most_recent_skipped_ccc = ccc;
if most_recent_skipped_ccc == CanonicalCombiningClass::NotReordered {
$sink.write_char(starter)?;
starter = character;
$composition.decomposition.buffer_pos += 1;
continue 'bufferloop;
} else {
{
let _ = $composition
.decomposition
.buffer
.drain(0..$composition.decomposition.buffer_pos);
}
$composition.decomposition.buffer_pos = 0;
}
let mut i = 1; while let Some((character, ccc)) = $composition
.decomposition
.buffer
.get(i)
.map(|c| c.character_and_ccc())
{
if ccc == CanonicalCombiningClass::NotReordered {
$sink.write_char(starter)?;
for cc in $composition.decomposition.buffer.drain(..i) {
$sink.write_char(cc.character())?;
}
starter = character;
{
let removed = $composition.decomposition.buffer.remove(0);
debug_assert_eq!(starter, removed.character());
}
debug_assert_eq!($composition.decomposition.buffer_pos, 0);
continue 'bufferloop;
}
debug_assert!(ccc >= most_recent_skipped_ccc);
if ccc != most_recent_skipped_ccc {
if let Some(composed) =
$composition.compose_non_hangul(starter, character)
{
$composition.decomposition.buffer.remove(i);
starter = composed;
continue;
}
}
most_recent_skipped_ccc = ccc;
i += 1;
}
break;
}
debug_assert_eq!($composition.decomposition.buffer_pos, 0);
if !$composition.decomposition.buffer.is_empty() {
$sink.write_char(starter)?;
for cc in $composition.decomposition.buffer.drain(..) {
$sink.write_char(cc.character())?;
}
continue 'outer;
}
if $composition.decomposition.pending.is_some() {
let pending = $composition.decomposition.pending.as_ref().unwrap();
if u32::from(pending.character) < $composition.composition_passthrough_bound
|| !pending.can_combine_backwards()
{
$sink.write_char(starter)?;
continue 'outer;
}
let pending_starter = $composition.decomposition.pending.take().unwrap();
let decomposed = $composition.decomposition.decomposing_next(pending_starter);
if let Some(composed) = $composition.compose(starter, decomposed) {
starter = composed;
} else {
$sink.write_char(starter)?;
starter = decomposed;
}
continue 'bufferloop;
}
$sink.write_char(starter)?;
return Ok(());
} }
}
};
}
macro_rules! decomposing_normalize_to {
($(#[$meta:meta])*,
$normalize_to:ident,
$write:path,
$slice:ty,
$prolog:block,
$as_slice:ident,
$fast:block,
$text:ident,
$sink:ident,
$decomposition:ident,
$decomposition_passthrough_bound:ident,
$undecomposed_starter:ident,
$pending_slice:ident,
$outer:lifetime, // loop labels use lifetime tokens
) => {
$(#[$meta])*
pub fn $normalize_to<W: $write + ?Sized>(
&self,
$text: $slice,
$sink: &mut W,
) -> core::fmt::Result {
$prolog
let mut $decomposition = self.normalize_iter($text.chars());
let $decomposition_passthrough_bound = $decomposition.decomposition_passthrough_bound;
$outer: loop {
for cc in $decomposition.buffer.drain(..) {
$sink.write_char(cc.character())?;
}
debug_assert_eq!($decomposition.buffer_pos, 0);
let mut $undecomposed_starter = if let Some(pending) = $decomposition.pending.take() {
pending
} else {
return Ok(());
};
#[allow(clippy::indexing_slicing)]
if $undecomposed_starter.starter_and_decomposes_to_self() {
$sink.write_char($undecomposed_starter.character)?;
let $pending_slice = $decomposition.delegate.$as_slice();
$fast
}
let starter = $decomposition.decomposing_next($undecomposed_starter);
$sink.write_char(starter)?;
}
}
};
}
macro_rules! normalizer_methods {
() => {
pub fn normalize(&self, text: &str) -> String {
let mut ret = String::new();
ret.reserve(text.len());
let _ = self.normalize_to(text, &mut ret);
ret
}
pub fn is_normalized(&self, text: &str) -> bool {
let mut sink = IsNormalizedSinkStr::new(text);
if self.normalize_to(text, &mut sink).is_err() {
return false;
}
sink.finished()
}
pub fn normalize_utf16(&self, text: &[u16]) -> Vec<u16> {
let mut ret = Vec::new();
let _ = self.normalize_utf16_to(text, &mut ret);
ret
}
pub fn is_normalized_utf16(&self, text: &[u16]) -> bool {
let mut sink = IsNormalizedSinkUtf16::new(text);
if self.normalize_utf16_to(text, &mut sink).is_err() {
return false;
}
sink.finished()
}
pub fn normalize_utf8(&self, text: &[u8]) -> String {
let mut ret = String::new();
ret.reserve(text.len());
let _ = self.normalize_utf8_to(text, &mut ret);
ret
}
pub fn is_normalized_utf8(&self, text: &[u8]) -> bool {
let mut sink = IsNormalizedSinkUtf8::new(text);
if self.normalize_utf8_to(text, &mut sink).is_err() {
return false;
}
sink.finished()
}
};
}
pub struct DecomposingNormalizer {
decompositions: DataPayload<CanonicalDecompositionDataV1Marker>,
supplementary_decompositions: Option<SupplementPayloadHolder>,
tables: DataPayload<CanonicalDecompositionTablesV1Marker>,
supplementary_tables: Option<DataPayload<CompatibilityDecompositionTablesV1Marker>>,
decomposition_passthrough_bound: u8, composition_passthrough_bound: u16, }
impl DecomposingNormalizer {
pub fn try_new_nfd_unstable<D>(data_provider: &D) -> Result<Self, NormalizerError>
where
D: DataProvider<CanonicalDecompositionDataV1Marker>
+ DataProvider<CanonicalDecompositionTablesV1Marker>
+ ?Sized,
{
let decompositions: DataPayload<CanonicalDecompositionDataV1Marker> =
data_provider.load(Default::default())?.take_payload()?;
let tables: DataPayload<CanonicalDecompositionTablesV1Marker> =
data_provider.load(Default::default())?.take_payload()?;
if tables.get().scalars16.len() + tables.get().scalars24.len() > 0xFFF {
return Err(NormalizerError::FutureExtension);
}
Ok(DecomposingNormalizer {
decompositions,
supplementary_decompositions: None,
tables,
supplementary_tables: None,
decomposition_passthrough_bound: 0xC0,
composition_passthrough_bound: 0x0300,
})
}
icu_provider::gen_any_buffer_constructors!(
locale: skip,
options: skip,
error: NormalizerError,
functions: [
Self::try_new_nfd_unstable,
try_new_nfd_with_any_provider,
try_new_nfd_with_buffer_provider
]
);
pub fn try_new_nfkd_unstable<D>(data_provider: &D) -> Result<Self, NormalizerError>
where
D: DataProvider<CanonicalDecompositionDataV1Marker>
+ DataProvider<CompatibilityDecompositionSupplementV1Marker>
+ DataProvider<CanonicalDecompositionTablesV1Marker>
+ DataProvider<CompatibilityDecompositionTablesV1Marker>
+ ?Sized,
{
let decompositions: DataPayload<CanonicalDecompositionDataV1Marker> =
data_provider.load(Default::default())?.take_payload()?;
let supplementary_decompositions: DataPayload<
CompatibilityDecompositionSupplementV1Marker,
> = data_provider.load(Default::default())?.take_payload()?;
let tables: DataPayload<CanonicalDecompositionTablesV1Marker> =
data_provider.load(Default::default())?.take_payload()?;
let supplementary_tables: DataPayload<CompatibilityDecompositionTablesV1Marker> =
data_provider.load(Default::default())?.take_payload()?;
if tables.get().scalars16.len()
+ tables.get().scalars24.len()
+ supplementary_tables.get().scalars16.len()
+ supplementary_tables.get().scalars24.len()
> 0xFFF
{
return Err(NormalizerError::FutureExtension);
}
let cap = supplementary_decompositions.get().passthrough_cap;
if cap > 0x0300 {
return Err(NormalizerError::ValidationError);
}
let decomposition_capped = cap.min(0xC0);
let composition_capped = cap.min(0x0300);
Ok(DecomposingNormalizer {
decompositions,
supplementary_decompositions: Some(SupplementPayloadHolder::Compatibility(
supplementary_decompositions,
)),
tables,
supplementary_tables: Some(supplementary_tables),
decomposition_passthrough_bound: decomposition_capped as u8,
composition_passthrough_bound: composition_capped as u16,
})
}
icu_provider::gen_any_buffer_constructors!(
locale: skip,
options: skip,
error: NormalizerError,
functions: [
Self::try_new_nfkd_unstable,
try_new_nfkd_with_any_provider,
try_new_nfkd_with_buffer_provider
]
);
#[doc(hidden)]
#[cfg(feature = "experimental")]
pub fn try_new_uts46_decomposed_without_ignored_and_disallowed<D>(
data_provider: &D,
) -> Result<Self, NormalizerError>
where
D: DataProvider<CanonicalDecompositionDataV1Marker>
+ DataProvider<Uts46DecompositionSupplementV1Marker>
+ DataProvider<CanonicalDecompositionTablesV1Marker>
+ DataProvider<CompatibilityDecompositionTablesV1Marker>
+ ?Sized,
{
let decompositions: DataPayload<CanonicalDecompositionDataV1Marker> =
data_provider.load(Default::default())?.take_payload()?;
let supplementary_decompositions: DataPayload<Uts46DecompositionSupplementV1Marker> =
data_provider.load(Default::default())?.take_payload()?;
let tables: DataPayload<CanonicalDecompositionTablesV1Marker> =
data_provider.load(Default::default())?.take_payload()?;
let supplementary_tables: DataPayload<CompatibilityDecompositionTablesV1Marker> =
data_provider.load(Default::default())?.take_payload()?;
if tables.get().scalars16.len()
+ tables.get().scalars24.len()
+ supplementary_tables.get().scalars16.len()
+ supplementary_tables.get().scalars24.len()
> 0xFFF
{
return Err(NormalizerError::FutureExtension);
}
let cap = supplementary_decompositions.get().passthrough_cap;
if cap > 0x0300 {
return Err(NormalizerError::ValidationError);
}
let decomposition_capped = cap.min(0xC0);
let composition_capped = cap.min(0x0300);
Ok(DecomposingNormalizer {
decompositions,
supplementary_decompositions: Some(SupplementPayloadHolder::Uts46(
supplementary_decompositions,
)),
tables,
supplementary_tables: Some(supplementary_tables),
decomposition_passthrough_bound: decomposition_capped as u8,
composition_passthrough_bound: composition_capped as u16,
})
}
pub fn normalize_iter<I: Iterator<Item = char>>(&self, iter: I) -> Decomposition<I> {
Decomposition::new_with_supplements(
iter,
self.decompositions.get(),
self.supplementary_decompositions.as_ref().map(|s| s.get()),
self.tables.get(),
self.supplementary_tables.as_ref().map(|s| s.get()),
self.decomposition_passthrough_bound,
)
}
normalizer_methods!();
decomposing_normalize_to!(
,
normalize_to,
core::fmt::Write,
&str,
{
},
as_str,
{
let decomposition_passthrough_byte_bound = if decomposition_passthrough_bound == 0xC0 {
0xC3u8
} else {
decomposition_passthrough_bound.min(0x80) as u8
};
#[allow(clippy::unwrap_used)]
'fast: loop {
let mut code_unit_iter = decomposition.delegate.as_str().as_bytes().iter();
'fastest: loop {
if let Some(&upcoming_byte) = code_unit_iter.next() {
if upcoming_byte < decomposition_passthrough_byte_bound {
continue 'fastest;
}
decomposition.delegate = pending_slice[pending_slice.len() - code_unit_iter.as_slice().len() - 1..].chars();
break 'fastest;
}
sink.write_str(pending_slice)?;
return Ok(());
}
let upcoming = decomposition.delegate.next().unwrap();
let upcoming_with_trie_value = decomposition.attach_trie_value(upcoming);
if upcoming_with_trie_value.starter_and_decomposes_to_self() {
continue 'fast;
}
let consumed_so_far_slice = &pending_slice[..pending_slice.len()
- decomposition.delegate.as_str().len()
- upcoming.len_utf8()];
sink.write_str(consumed_so_far_slice)?;
if decomposition_starts_with_non_starter(
upcoming_with_trie_value.trie_val,
) {
decomposition.pending = Some(upcoming_with_trie_value);
decomposition.gather_and_sort_combining(0);
continue 'outer;
}
undecomposed_starter = upcoming_with_trie_value;
debug_assert!(decomposition.pending.is_none());
break 'fast;
}
},
text,
sink,
decomposition,
decomposition_passthrough_bound,
undecomposed_starter,
pending_slice,
'outer,
);
decomposing_normalize_to!(
,
normalize_utf8_to,
core::fmt::Write,
&[u8],
{
},
as_slice,
{
let decomposition_passthrough_byte_bound = decomposition_passthrough_bound.min(0x80) as u8;
#[allow(clippy::unwrap_used)]
'fast: loop {
let mut code_unit_iter = decomposition.delegate.as_slice().iter();
'fastest: loop {
if let Some(&upcoming_byte) = code_unit_iter.next() {
if upcoming_byte < decomposition_passthrough_byte_bound {
continue 'fastest;
}
break 'fastest;
}
sink.write_str(unsafe { from_utf8_unchecked(pending_slice) })?;
return Ok(());
}
decomposition.delegate = pending_slice[pending_slice.len() - code_unit_iter.as_slice().len() - 1..].chars();
let upcoming = decomposition.delegate.next().unwrap();
let upcoming_with_trie_value = decomposition.attach_trie_value(upcoming);
if upcoming_with_trie_value.starter_and_decomposes_to_self() {
if upcoming != REPLACEMENT_CHARACTER {
continue 'fast;
}
let mut consumed_so_far = pending_slice[..pending_slice.len() - decomposition.delegate.as_slice().len()].chars();
let back = consumed_so_far.next_back();
debug_assert_eq!(back, Some(REPLACEMENT_CHARACTER));
let consumed_so_far_slice = consumed_so_far.as_slice();
sink.write_str(unsafe{from_utf8_unchecked(consumed_so_far_slice)})?;
undecomposed_starter = upcoming_with_trie_value;
debug_assert!(decomposition.pending.is_none());
break 'fast;
}
let consumed_so_far_slice = &pending_slice[..pending_slice.len()
- decomposition.delegate.as_slice().len()
- upcoming.len_utf8()];
sink.write_str(unsafe{from_utf8_unchecked(consumed_so_far_slice)})?;
if decomposition_starts_with_non_starter(
upcoming_with_trie_value.trie_val,
) {
decomposition.pending = Some(upcoming_with_trie_value);
decomposition.gather_and_sort_combining(0);
continue 'outer;
}
undecomposed_starter = upcoming_with_trie_value;
debug_assert!(decomposition.pending.is_none());
break 'fast;
}
},
text,
sink,
decomposition,
decomposition_passthrough_bound,
undecomposed_starter,
pending_slice,
'outer,
);
decomposing_normalize_to!(
,
normalize_utf16_to,
write16::Write16,
&[u16],
{
sink.size_hint(text.len())?;
},
as_slice,
{
let mut code_unit_iter = decomposition.delegate.as_slice().iter();
let mut counter = UTF16_FAST_PATH_FLUSH_THRESHOLD;
'fast: loop {
counter -= 1;
if let Some(&upcoming_code_unit) = code_unit_iter.next() {
let mut upcoming32 = u32::from(upcoming_code_unit);
if upcoming32 < decomposition_passthrough_bound && counter != 0 {
continue 'fast;
}
#[allow(clippy::never_loop)]
'surrogateloop: loop {
let surrogate_base = upcoming32.wrapping_sub(0xD800);
if surrogate_base > (0xDFFF - 0xD800) {
break 'surrogateloop;
}
if surrogate_base <= (0xDBFF - 0xD800) {
let iter_backup = code_unit_iter.clone();
if let Some(&low) = code_unit_iter.next() {
if in_inclusive_range16(low, 0xDC00, 0xDFFF) {
upcoming32 = (upcoming32 << 10) + u32::from(low)
- (((0xD800u32 << 10) - 0x10000u32) + 0xDC00u32);
break 'surrogateloop;
} else {
code_unit_iter = iter_backup;
}
}
}
let slice_to_write = &pending_slice
[..pending_slice.len() - code_unit_iter.as_slice().len() - 1];
sink.write_slice(slice_to_write)?;
undecomposed_starter =
CharacterAndTrieValue::new(REPLACEMENT_CHARACTER, 0);
debug_assert!(decomposition.pending.is_none());
break 'fast;
}
let upcoming = unsafe { char::from_u32_unchecked(upcoming32) };
let upcoming_with_trie_value =
decomposition.attach_trie_value(upcoming);
if upcoming_with_trie_value.starter_and_decomposes_to_self() && counter != 0 {
continue 'fast;
}
let consumed_so_far_slice = &pending_slice[..pending_slice.len()
- code_unit_iter.as_slice().len()
- upcoming.len_utf16()];
sink.write_slice(consumed_so_far_slice)?;
if decomposition_starts_with_non_starter(
upcoming_with_trie_value.trie_val,
) {
decomposition.delegate = code_unit_iter.as_slice().chars();
decomposition.pending = Some(upcoming_with_trie_value);
decomposition.gather_and_sort_combining(0);
continue 'outer;
}
undecomposed_starter = upcoming_with_trie_value;
debug_assert!(decomposition.pending.is_none());
break 'fast;
}
sink.write_slice(pending_slice)?;
return Ok(());
}
decomposition.delegate = code_unit_iter.as_slice().chars();
},
text,
sink,
decomposition,
decomposition_passthrough_bound,
undecomposed_starter,
pending_slice,
'outer,
);
}
pub struct ComposingNormalizer {
decomposing_normalizer: DecomposingNormalizer,
canonical_compositions: DataPayload<CanonicalCompositionsV1Marker>,
}
impl ComposingNormalizer {
pub fn try_new_nfc_unstable<D>(data_provider: &D) -> Result<Self, NormalizerError>
where
D: DataProvider<CanonicalDecompositionDataV1Marker>
+ DataProvider<CanonicalDecompositionTablesV1Marker>
+ DataProvider<CanonicalCompositionsV1Marker>
+ ?Sized,
{
let decomposing_normalizer = DecomposingNormalizer::try_new_nfd_unstable(data_provider)?;
let canonical_compositions: DataPayload<CanonicalCompositionsV1Marker> =
data_provider.load(Default::default())?.take_payload()?;
Ok(ComposingNormalizer {
decomposing_normalizer,
canonical_compositions,
})
}
icu_provider::gen_any_buffer_constructors!(
locale: skip,
options: skip,
error: NormalizerError,
functions: [
Self::try_new_nfc_unstable,
try_new_nfc_with_any_provider,
try_new_nfc_with_buffer_provider
]
);
pub fn try_new_nfkc_unstable<D>(data_provider: &D) -> Result<Self, NormalizerError>
where
D: DataProvider<CanonicalDecompositionDataV1Marker>
+ DataProvider<CompatibilityDecompositionSupplementV1Marker>
+ DataProvider<CanonicalDecompositionTablesV1Marker>
+ DataProvider<CompatibilityDecompositionTablesV1Marker>
+ DataProvider<CanonicalCompositionsV1Marker>
+ ?Sized,
{
let decomposing_normalizer = DecomposingNormalizer::try_new_nfkd_unstable(data_provider)?;
let canonical_compositions: DataPayload<CanonicalCompositionsV1Marker> =
data_provider.load(Default::default())?.take_payload()?;
Ok(ComposingNormalizer {
decomposing_normalizer,
canonical_compositions,
})
}
icu_provider::gen_any_buffer_constructors!(
locale: skip,
options: skip,
error: NormalizerError,
functions: [
Self::try_new_nfkc_unstable,
try_new_nfkc_with_any_provider,
try_new_nfkc_with_buffer_provider
]
);
#[cfg(feature = "experimental")]
pub fn try_new_uts46_without_ignored_and_disallowed_unstable<D>(
data_provider: &D,
) -> Result<Self, NormalizerError>
where
D: DataProvider<CanonicalDecompositionDataV1Marker>
+ DataProvider<Uts46DecompositionSupplementV1Marker>
+ DataProvider<CanonicalDecompositionTablesV1Marker>
+ DataProvider<CompatibilityDecompositionTablesV1Marker>
+ DataProvider<CanonicalCompositionsV1Marker>
+ ?Sized,
{
let decomposing_normalizer =
DecomposingNormalizer::try_new_uts46_decomposed_without_ignored_and_disallowed(
data_provider,
)?;
let canonical_compositions: DataPayload<CanonicalCompositionsV1Marker> =
data_provider.load(Default::default())?.take_payload()?;
Ok(ComposingNormalizer {
decomposing_normalizer,
canonical_compositions,
})
}
pub fn normalize_iter<I: Iterator<Item = char>>(&self, iter: I) -> Composition<I> {
Composition::new(
Decomposition::new_with_supplements(
iter,
self.decomposing_normalizer.decompositions.get(),
self.decomposing_normalizer
.supplementary_decompositions
.as_ref()
.map(|s| s.get()),
self.decomposing_normalizer.tables.get(),
self.decomposing_normalizer
.supplementary_tables
.as_ref()
.map(|s| s.get()),
self.decomposing_normalizer.decomposition_passthrough_bound,
),
ZeroFrom::zero_from(&self.canonical_compositions.get().canonical_compositions),
self.decomposing_normalizer.composition_passthrough_bound,
)
}
normalizer_methods!();
composing_normalize_to!(
,
normalize_to,
core::fmt::Write,
&str,
{},
true,
as_str,
{
let composition_passthrough_byte_bound = if composition_passthrough_bound == 0x300 {
0xCCu8
} else {
composition_passthrough_bound.min(0x80) as u8
};
let mut undecomposed_starter_valid = true;
#[allow(clippy::unwrap_used)]
'fast: loop {
let mut code_unit_iter = composition.decomposition.delegate.as_str().as_bytes().iter();
'fastest: loop {
if let Some(&upcoming_byte) = code_unit_iter.next() {
if upcoming_byte < composition_passthrough_byte_bound {
undecomposed_starter_valid = false;
continue 'fastest;
}
composition.decomposition.delegate = pending_slice[pending_slice.len() - code_unit_iter.as_slice().len() - 1..].chars();
break 'fastest;
}
sink.write_str(pending_slice)?;
return Ok(());
}
let upcoming = composition.decomposition.delegate.next().unwrap();
let upcoming_with_trie_value = composition.decomposition.attach_trie_value(upcoming);
if upcoming_with_trie_value.potential_passthrough_and_cannot_combine_backwards() {
undecomposed_starter = upcoming_with_trie_value;
undecomposed_starter_valid = true;
continue 'fast;
}
composition.decomposition.pending = Some(upcoming_with_trie_value);
let consumed_so_far_slice = if undecomposed_starter_valid {
&pending_slice[..pending_slice.len() - composition.decomposition.delegate.as_str().len() - upcoming.len_utf8() - undecomposed_starter.character.len_utf8()]
} else {
let mut consumed_so_far = pending_slice[..pending_slice.len() - composition.decomposition.delegate.as_str().len() - upcoming.len_utf8()].chars();
undecomposed_starter = composition.decomposition.attach_trie_value(consumed_so_far.next_back().unwrap());
undecomposed_starter_valid = true;
consumed_so_far.as_str()
};
sink.write_str(consumed_so_far_slice)?;
break 'fast;
}
debug_assert!(undecomposed_starter_valid);
},
text,
sink,
composition,
composition_passthrough_bound,
undecomposed_starter,
pending_slice,
len_utf8,
);
composing_normalize_to!(
,
normalize_utf8_to,
core::fmt::Write,
&[u8],
{},
false,
as_slice,
{
let mut undecomposed_starter_valid = true;
'fast: loop {
if let Some(upcoming) = composition.decomposition.delegate.next() {
if u32::from(upcoming) < composition_passthrough_bound {
undecomposed_starter_valid = false;
continue 'fast;
}
if upcoming == REPLACEMENT_CHARACTER {
let mut consumed_so_far = pending_slice[..pending_slice.len() - composition.decomposition.delegate.as_slice().len()].chars();
let back = consumed_so_far.next_back();
debug_assert_eq!(back, Some(REPLACEMENT_CHARACTER));
let consumed_so_far_slice = consumed_so_far.as_slice();
sink.write_str(unsafe{ from_utf8_unchecked(consumed_so_far_slice)})?;
undecomposed_starter = CharacterAndTrieValue::new(REPLACEMENT_CHARACTER, 0);
undecomposed_starter_valid = true;
composition.decomposition.pending = None;
break 'fast;
}
let upcoming_with_trie_value = composition.decomposition.attach_trie_value(upcoming);
if upcoming_with_trie_value.potential_passthrough_and_cannot_combine_backwards() {
undecomposed_starter = upcoming_with_trie_value;
undecomposed_starter_valid = true;
continue 'fast;
}
composition.decomposition.pending = Some(upcoming_with_trie_value);
#[allow(clippy::unwrap_used)]
let consumed_so_far_slice = if undecomposed_starter_valid {
&pending_slice[..pending_slice.len() - composition.decomposition.delegate.as_slice().len() - upcoming.len_utf8() - undecomposed_starter.character.len_utf8()]
} else {
let mut consumed_so_far = pending_slice[..pending_slice.len() - composition.decomposition.delegate.as_slice().len() - upcoming.len_utf8()].chars();
undecomposed_starter = composition.decomposition.attach_trie_value(consumed_so_far.next_back().unwrap());
undecomposed_starter_valid = true;
consumed_so_far.as_slice()
};
sink.write_str(unsafe { from_utf8_unchecked(consumed_so_far_slice)})?;
break 'fast;
}
sink.write_str(unsafe {from_utf8_unchecked(pending_slice) })?;
return Ok(());
}
debug_assert!(undecomposed_starter_valid);
},
text,
sink,
composition,
composition_passthrough_bound,
undecomposed_starter,
pending_slice,
len_utf8,
);
composing_normalize_to!(
,
normalize_utf16_to,
write16::Write16,
&[u16],
{
sink.size_hint(text.len())?;
},
false,
as_slice,
{
let mut code_unit_iter = composition.decomposition.delegate.as_slice().iter();
let mut upcoming32;
let mut undecomposed_starter_valid;
let mut counter = UTF16_FAST_PATH_FLUSH_THRESHOLD;
let mut counter_reference = counter - 1;
'fast: loop {
counter -= 1;
if let Some(&upcoming_code_unit) = code_unit_iter.next() {
upcoming32 = u32::from(upcoming_code_unit); if upcoming32 < composition_passthrough_bound && counter != 0 {
continue 'fast;
}
undecomposed_starter_valid = counter == counter_reference;
#[allow(clippy::never_loop)]
'surrogateloop: loop {
let surrogate_base = upcoming32.wrapping_sub(0xD800);
if surrogate_base > (0xDFFF - 0xD800) {
break 'surrogateloop;
}
if surrogate_base <= (0xDBFF - 0xD800) {
let iter_backup = code_unit_iter.clone();
if let Some(&low) = code_unit_iter.next() {
if in_inclusive_range16(low, 0xDC00, 0xDFFF) {
upcoming32 = (upcoming32 << 10) + u32::from(low)
- (((0xD800u32 << 10) - 0x10000u32) + 0xDC00u32);
break 'surrogateloop;
} else {
code_unit_iter = iter_backup;
}
}
}
let slice_to_write = &pending_slice[..pending_slice.len() - code_unit_iter.as_slice().len() - 1];
sink.write_slice(slice_to_write)?;
undecomposed_starter = CharacterAndTrieValue::new(REPLACEMENT_CHARACTER, 0);
undecomposed_starter_valid = true;
composition.decomposition.pending = None;
break 'fast;
}
let upcoming = unsafe { char::from_u32_unchecked(upcoming32) };
let upcoming_with_trie_value = composition.decomposition.attach_trie_value(upcoming);
if upcoming_with_trie_value.potential_passthrough_and_cannot_combine_backwards() && counter != 0 {
undecomposed_starter = upcoming_with_trie_value;
counter_reference = counter - 1;
continue 'fast;
}
composition.decomposition.pending = Some(upcoming_with_trie_value);
#[allow(clippy::unwrap_used)]
let consumed_so_far_slice = if undecomposed_starter_valid {
&pending_slice[..pending_slice.len() - code_unit_iter.as_slice().len() - upcoming.len_utf16() - undecomposed_starter.character.len_utf16()]
} else {
let mut consumed_so_far = pending_slice[..pending_slice.len() - code_unit_iter.as_slice().len() - upcoming.len_utf16()].chars();
undecomposed_starter = composition.decomposition.attach_trie_value(consumed_so_far.next_back().unwrap());
undecomposed_starter_valid = true;
consumed_so_far.as_slice()
};
sink.write_slice(consumed_so_far_slice)?;
break 'fast;
}
sink.write_slice(pending_slice)?;
return Ok(());
}
debug_assert!(undecomposed_starter_valid);
composition.decomposition.delegate = code_unit_iter.as_slice().chars();
},
text,
sink,
composition,
composition_passthrough_bound,
undecomposed_starter,
pending_slice,
len_utf16,
);
}
struct IsNormalizedSinkUtf16<'a> {
expect: &'a [u16],
}
impl<'a> IsNormalizedSinkUtf16<'a> {
pub fn new(slice: &'a [u16]) -> Self {
IsNormalizedSinkUtf16 { expect: slice }
}
pub fn finished(&self) -> bool {
self.expect.is_empty()
}
}
impl<'a> Write16 for IsNormalizedSinkUtf16<'a> {
fn write_slice(&mut self, s: &[u16]) -> core::fmt::Result {
#[allow(clippy::indexing_slicing)]
if s.as_ptr() == self.expect.as_ptr() {
self.expect = &self.expect[s.len()..];
Ok(())
} else {
Err(core::fmt::Error {})
}
}
fn write_char(&mut self, c: char) -> core::fmt::Result {
let mut iter = self.expect.chars();
if iter.next() == Some(c) {
self.expect = iter.as_slice();
Ok(())
} else {
Err(core::fmt::Error {})
}
}
}
struct IsNormalizedSinkUtf8<'a> {
expect: &'a [u8],
}
impl<'a> IsNormalizedSinkUtf8<'a> {
pub fn new(slice: &'a [u8]) -> Self {
IsNormalizedSinkUtf8 { expect: slice }
}
pub fn finished(&self) -> bool {
self.expect.is_empty()
}
}
impl<'a> core::fmt::Write for IsNormalizedSinkUtf8<'a> {
fn write_str(&mut self, s: &str) -> core::fmt::Result {
#[allow(clippy::indexing_slicing)]
if s.as_ptr() == self.expect.as_ptr() {
self.expect = &self.expect[s.len()..];
Ok(())
} else {
Err(core::fmt::Error {})
}
}
fn write_char(&mut self, c: char) -> core::fmt::Result {
let mut iter = self.expect.chars();
if iter.next() == Some(c) {
self.expect = iter.as_slice();
Ok(())
} else {
Err(core::fmt::Error {})
}
}
}
struct IsNormalizedSinkStr<'a> {
expect: &'a str,
}
impl<'a> IsNormalizedSinkStr<'a> {
pub fn new(slice: &'a str) -> Self {
IsNormalizedSinkStr { expect: slice }
}
pub fn finished(&self) -> bool {
self.expect.is_empty()
}
}
impl<'a> core::fmt::Write for IsNormalizedSinkStr<'a> {
fn write_str(&mut self, s: &str) -> core::fmt::Result {
#[allow(clippy::indexing_slicing)]
if s.as_ptr() == self.expect.as_ptr() {
self.expect = &self.expect[s.len()..];
Ok(())
} else {
Err(core::fmt::Error {})
}
}
fn write_char(&mut self, c: char) -> core::fmt::Result {
let mut iter = self.expect.chars();
if iter.next() == Some(c) {
self.expect = iter.as_str();
Ok(())
} else {
Err(core::fmt::Error {})
}
}
}