#![feature(step_trait)]
use std::{iter::Step, u64};
use utls::numerics::{UInt, random::LCG};
fn main() {
let a = UInt::new(100u128);
let b = UInt::new(50u128);
println!("a: {}, b: {}", a, b);
let sum = a.clone() + b.clone();
println!("a + b = {}", sum);
let diff = a.clone() - b.clone();
println!("a - b = {}", diff);
let prod = a.clone() * b.clone();
println!("a * b = {}", prod);
let quot = a.clone() / b.clone();
println!("a / b = {}", quot);
let rem = a.clone() % b.clone();
println!("a % b = {}", rem);
let c = UInt::new(0b1100u128); let d = UInt::new(0b1010u128);
let and = c.clone() & d.clone();
println!("c & d = {}", and);
let or = c.clone() | d.clone();
println!("c | d = {}", or);
let xor = c.clone() ^ d.clone();
println!("c ^ d = {}", xor);
let left_shift = c.clone() << 2;
println!("c << 2 = {}", left_shift);
let right_shift = c.clone() >> 1;
println!("c >> 1 = {}", right_shift);
let mut x = UInt::new(100u128);
x += UInt::new(50u128);
println!("x after += 50: {}", x);
x -= UInt::new(30u128);
println!("x after -= 30: {}", x);
x *= UInt::new(2u128);
println!("x after *= 2: {}", x);
x /= UInt::new(4u128);
println!("x after /= 4: {}", x);
x %= UInt::new(7u128);
println!("x after %= 7: {}", x);
let mut y = UInt::new(0b1100u128);
y &= UInt::new(0b1010u128);
println!("y after &= 0b1010: {}", y);
y |= UInt::new(0b0011u128);
println!("y after |= 0b0011: {}", y);
y ^= UInt::new(0b1111u128);
println!("y after ^= 0b1111: {}", y);
y <<= 2;
println!("y after <<= 2: {}", y);
y >>= 1;
println!("y after >>= 1: {}", y);
let e1 = UInt::new(100u128);
let e2 = UInt::new(100u128);
let e3 = UInt::new(200u128);
println!("e1 == e2: {}", e1 == e2); println!("e1 == e3: {}", e1 == e3);
let base = UInt::new(2u128);
let result = base.pow(3); println!("\n\n\n2^3 = {}", result);
let num = UInt::new(5u128);
let sci_result = num.sci(2); println!("5 * 10^2 = {}", sci_result);
let zero = UInt::zero(8); let one = UInt::one(8); let ten = UInt::ten(8); let max = UInt::max(8); println!(
"8-bit: zero: {}, one: {}, ten: {}, max: {}",
zero, one, ten, max
);
let zero = UInt::zero(1024); let one = UInt::one(1024); let ten = UInt::ten(1024); let max = UInt::max(1024); println!(
"1024-bit: zero: {}, one: {}, ten: {}, max: {}",
zero, one, ten, max
);
let hex = UInt::from_hex("FF").unwrap();
let binary = UInt::from_binary("1010").unwrap();
let custom = UInt::from_str_radix("123", 10).unwrap();
println!(
"From: hex FF = {}, binary 1010 = {}, decimal 123 = {}",
hex, binary, custom
);
let mut num = UInt::new(12u128);
num.dbg_disp_enabled = true;
println!("12 in binary: {}", num); num.dbg_disp_enabled = false;
println!("12 in decimal: {}", num);
let mut num = UInt::new(255u128);
num.dynamic_resizing = false;
let n1 = UInt::new(100u128);
let n2 = UInt::new(200u128);
println!("n1 < n2: {}", n1 < n2); println!("n1 <= n2: {}", n1 <= n2); println!("n1 > n2: {}", n1 > n2); println!("n1 >= n2: {}", n1 >= n2);
let u128_val: u128 = n1.clone().try_into().unwrap();
let u64_val: u64 = n1.clone().try_into().unwrap();
let u32_val: u32 = n1.clone().try_into().unwrap();
let u16_val: u16 = n1.clone().try_into().unwrap();
let u8_val: u8 = n1.clone().try_into().unwrap();
println!(
"Converted values: {}, {}, {}, {}, {}",
u128_val, u64_val, u32_val, u16_val, u8_val
);
let from_i8 = UInt::from(-8i8); let from_i16 = UInt::from(-16i16); let from_i32 = UInt::from(-32i32); println!("From signed: {}, {}, {}", from_i8, from_i16, from_i32);
let start = UInt::new(5u128);
let end = UInt::new(10u128);
let steps = UInt::steps_between(&start, &end);
println!("Steps between 5 and 10: {:?}", steps);
let next = UInt::forward_checked(start.clone(), 3);
let prev = UInt::backward_checked(start.clone(), 2);
println!(
"(From 5) Forward 3 steps: {}, Backward 2 steps: {}",
next.unwrap(),
prev.unwrap()
);
let hund = UInt::new(100u128);
let two = UInt::new(2u128);
let one = UInt::one(8);
let thirteen = UInt::new(13u128);
println!(
"Square root of: hundred: {}, two: {}, one: {}, thirteen: {}",
UInt::sqrt(hund),
UInt::sqrt(two),
UInt::sqrt(one),
UInt::sqrt(thirteen)
);
let num = UInt::new(100u128);
let base = UInt::new(8u128);
let log_result = num.log(&base);
println!("log_8(100) = {}", log_result.unwrap());
let ln_result = num.ln();
println!("ln(100) = {}", ln_result.unwrap());
let log2_result = num.log2();
println!("log_2(100) = {}", log2_result.unwrap());
let log10_result = num.log10();
println!("log_10(100) = {}", log10_result.unwrap());
let random_8bit = UInt::random(8);
println!("Random 8-bit number: {}", random_8bit);
let random_16bit = UInt::random(16);
println!("Random 16-bit number: {}", random_16bit);
let random_1024bit = UInt::random(1024);
println!("Random 1024-bit number: {}", random_1024bit);
let min = UInt::new(10u128);
let max = UInt::new(20u128);
let random_in_range = UInt::random_in_range(16, min, max);
println!("Random 16-bit number between 10 and 20: {}", random_in_range);
let mut lcg = LCG::new(12345); println!("Random u64: {}", lcg.next_u64());
println!("Random u8: {}", lcg.next_u8());
println!(
"Random u8 in range (10-20): {}",
lcg.next_u8_in_range(10, 20)
);
}