use core::panic;
use eframe::egui::{Label, Response, ScrollArea, Sense, Slider, Ui, Vec2};
use rodio::OutputStream;
use std::{
fs::File,
io::BufReader,
sync::{
Arc,
atomic::{AtomicBool, AtomicI32, Ordering},
},
thread::{self},
time::{Duration, Instant},
};
use crate::{
MediaType, TranscriptionData, TranscriptionProgress, TranscriptionSettings, media_information,
};
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum PlayerState {
Playing,
Paused,
Ended,
}
#[derive(Debug)]
pub struct Player {
pub media_type: MediaType,
pub file_path: String,
pub player_size: Vec2,
pub player_scale: f32,
pub player_state: PlayerState,
pub elapsed_time: Duration,
pub total_time: Duration,
playback_guard: bool,
start_playback: bool,
stop_playback: Arc<AtomicBool>,
stopwatch_instant: Option<Instant>,
pub start_time: Duration,
pub volume: Arc<AtomicI32>,
transcription_settings: TranscriptionSettings,
pub transcript: Vec<TranscriptionData>,
transcription_progress: TranscriptionProgress,
transcript_receiver: Option<tokio::sync::mpsc::UnboundedReceiver<TranscriptionProgress>>,
}
impl Player {
pub fn new(file_path: &str) -> Self {
let media_type = media_information::get_media_type(file_path);
Self {
media_type,
player_size: Vec2::default(),
player_state: PlayerState::Paused,
elapsed_time: Duration::ZERO,
total_time: media_information::get_total_time(media_type, file_path),
player_scale: 1.0,
playback_guard: false,
stop_playback: Arc::new(AtomicBool::new(false)),
file_path: file_path.to_string(),
start_playback: false,
stopwatch_instant: None,
start_time: Duration::ZERO,
volume: Arc::new(AtomicI32::new(100)),
transcript: vec![],
transcript_receiver: None,
transcription_settings: TranscriptionSettings::None,
transcription_progress: TranscriptionProgress::NoProgress,
}
}
pub fn set_transcript_settings(&mut self, setting: TranscriptionSettings) {
self.transcription_settings = setting;
}
pub fn set_player_scale(&mut self, scale: f32) {
self.player_scale = scale;
if self.player_size.eq(&Vec2::default()) {
match self.media_type {
MediaType::Audio => {
self.player_size = Vec2 { x: 50.0, y: 10.0 } * self.player_scale
}
MediaType::Video => self.player_size = Vec2 { x: 0.0, y: 0.0 } * self.player_scale,
MediaType::Error => panic!("No size since it is an unsupported type"),
}
} else {
self.player_size *= self.player_scale;
}
}
fn control_bar(&mut self, ui: &mut Ui) {
ui.horizontal(|ui| {
let pause_icon = match self.player_state {
PlayerState::Playing => "⏸",
PlayerState::Paused => "▶",
PlayerState::Ended => "↺",
};
if ui.button(pause_icon).clicked() {
match self.player_state {
PlayerState::Playing => {
self.pause_player();
}
PlayerState::Paused => {
self.play_player();
}
PlayerState::Ended => {
self.elapsed_time = Duration::ZERO;
self.play_player();
}
}
}
if self.elapsed_time >= self.total_time {
self.pause_player();
self.player_state = PlayerState::Ended;
}
ui.label(
media_information::format_duration(self.elapsed_time)
+ " / "
+ &media_information::format_duration(self.total_time),
);
let mut slider_value = self.elapsed_time.as_secs_f32();
let slider = Slider::new(&mut slider_value, 0.0..=self.total_time.as_secs_f32())
.show_value(false);
let slider_response = ui.add(slider);
if slider_response.drag_started() {
self.player_state = PlayerState::Paused;
self.pause_player();
}
if slider_response.dragged() {
self.elapsed_time = Duration::from_secs_f32(slider_value);
}
let mut volume = self.volume.load(Ordering::Acquire);
let volume_icon = if volume > 70 {
"🔊"
} else if volume > 40 {
"🔉"
} else if volume > 0 {
"🔈"
} else {
"🔇"
};
ui.menu_button(volume_icon, |ui| {
ui.add(Slider::new(&mut volume, 0..=100).vertical())
});
self.volume.store(volume, Ordering::Relaxed);
let is_timestamped = matches!(
self.transcription_settings,
TranscriptionSettings::ShowTimeStamps
);
match self.transcription_settings {
TranscriptionSettings::None => {}
TranscriptionSettings::Allow
| TranscriptionSettings::TranscriptLabel
| TranscriptionSettings::ShowTimeStamps => {
ui.menu_button("…", |ui| {
if ui.button("Transcribe audio").clicked()
&& self.transcript_receiver.is_none()
{
self.transcription_progress = TranscriptionProgress::Reading;
let file_path = self.file_path.clone();
let (tx_transcript, rx_transcript) =
tokio::sync::mpsc::unbounded_channel();
self.transcript_receiver = Some(rx_transcript);
tokio::spawn(async move {
let _ = media_information::transcribe_audio(
&file_path,
is_timestamped,
Some(tx_transcript),
)
.await;
});
}
});
}
}
if let Some(receiver) = &mut self.transcript_receiver {
if let Ok(progress) = receiver.try_recv() {
self.transcription_progress = progress;
};
match &self.transcription_progress {
TranscriptionProgress::NoProgress => {}
TranscriptionProgress::InProgress(transcription_data) => {
if !self.transcript.contains(transcription_data) {
self.transcript.push(transcription_data.clone());
}
ui.label("Transcription in Progress");
ui.spinner();
}
TranscriptionProgress::Reading => {
ui.label("Transcription in Progress");
ui.spinner();
}
TranscriptionProgress::Finished => {
self.transcript_receiver = None;
}
};
}
});
match self.transcription_settings {
TranscriptionSettings::TranscriptLabel | TranscriptionSettings::ShowTimeStamps => {
if !self.transcript.is_empty() {
ScrollArea::vertical().show(ui, |ui| {
ui.horizontal_wrapped(|ui| {
ui.style_mut().spacing.item_spacing.x = 0.0;
for word in self.transcript.clone() {
let response = ui.add(Label::new(word.text).sense(Sense::click()));
if response.clicked() {
self.pause_player();
self.elapsed_time = word.time;
}
}
});
if self.transcription_progress == TranscriptionProgress::Finished {
ui.label("--- END OF TRANSCRIPT ---");
}
});
}
}
_ => {}
}
}
fn display_player(&mut self, ui: &mut Ui) {
match self.media_type {
MediaType::Audio => self.control_bar(ui),
MediaType::Video => self.control_bar(ui),
MediaType::Error => panic!("Can't display due to invalid file type"),
}
}
fn audio_stream(&mut self) {
if self.playback_guard {
let start_at = self.elapsed_time;
let file_path = self.file_path.clone();
let stop_audio = Arc::clone(&self.stop_playback);
let volume = Arc::clone(&self.volume);
thread::spawn(move || {
let (_stream, stream_handle) = OutputStream::try_default().unwrap();
let file = File::open(file_path).unwrap();
let sink = stream_handle.play_once(BufReader::new(file)).unwrap();
sink.try_seek(start_at).unwrap();
loop {
sink.set_volume(volume.load(Ordering::Acquire) as f32 / 100.0);
if stop_audio.load(Ordering::Relaxed) {
break;
}
}
});
}
}
fn start_stream(&mut self) {
match self.media_type {
MediaType::Audio => self.audio_stream(),
MediaType::Video => todo!(),
MediaType::Error => todo!(),
}
}
fn play_player(&mut self) {
self.player_state = PlayerState::Playing;
self.start_playback = true;
self.playback_guard = true;
self.stop_playback = Arc::new(AtomicBool::new(false));
self.start_stream();
}
fn pause_player(&mut self) {
self.player_state = PlayerState::Paused;
self.start_playback = false;
self.stop_playback.swap(true, Ordering::Relaxed);
}
fn get_elapsed_time(&mut self) -> Duration {
match self.stopwatch_instant {
Some(instant) => instant.elapsed() + self.start_time,
None => self.elapsed_time,
}
}
fn setup_stopwatch(&mut self) {
self.elapsed_time = self.get_elapsed_time();
if self.start_playback {
self.stopwatch_instant = Some(Instant::now());
self.start_time = self.elapsed_time;
self.start_playback = false;
}
if self.stop_playback.as_ref().load(Ordering::Acquire) {
self.stopwatch_instant = None;
}
}
fn add_contents(&mut self, ui: &mut Ui) -> Response {
self.set_player_scale(self.player_scale);
let (rect, response) = ui.allocate_exact_size(self.player_size, Sense::click());
if ui.is_rect_visible(rect) {
self.setup_stopwatch();
self.display_player(ui);
ui.ctx().request_repaint_after(Duration::from_millis(10));
}
response
}
pub fn ui(&mut self, ui: &mut Ui) -> Response {
self.add_contents(ui)
}
}