1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
//! Simple asynchronous task manager abstraction.
//!
//! Provides an abstraction layer over executing asynchronous tasks over multiple threads, without
//! re-creating threads, providing a speed increase over manually creating threads.
//!
//! This crate aims to provide both a lower-level, more manual API which is highly configurable,
//! and a higher-level API via the [`command-buffers`] feature.
//!
//! # Features:
//!
//! - [`queue`]\: Manages tasks once they are submitted. Efficiently spreads them across threads.
//! Prevents waiting tasks from clogging the workers, by not executing they are ready.
//!
//! - [`task`]\: Abstraction layer over functions, which can be synchronous or asynchronous, take
//! in parameters, and return a result.
//!
//! - [`sync`]\: Synchronization objects which provide a way of implementing control flow.
//!
//! - [`block`]\: Very simple 'poll to completion' awaiter.
//!
//! - [`utils`]\: Some simple async utility functions.
//!
//! - [`command buffers`]\: A higher-level API abstraction layer, which allows making custom tasks,
//! as well as chaining tasks.
//!
//! [`command buffers`]: command_buffers

use std::future::Future;
use std::sync::{Arc, RwLock};
#[cfg(feature = "main-thread")]
use std::sync::atomic::{AtomicU32, AtomicU8};
#[cfg(feature = "main-thread")]
use crossbeam_channel::unbounded as channel;
use mvutils::id_eq;
use mvutils::utils::next_id;
#[cfg(feature = "main-thread")]
use mvutils::utils::Recover;
use crate::block::Signal;
use crate::queue::Queue;
#[cfg(feature = "main-thread")]
use crate::queue::WorkerThread;
use crate::sync::{Fence, Semaphore};
use crate::task::{Task, TaskHandle, TaskState};

#[cfg(feature = "command-buffers")]
use crate::command_buffers::buffer::{CommandBuffer, CommandBufferAllocationError};

pub mod prelude;

pub(crate) mod run;
pub mod queue;
pub mod task;
pub mod sync;
pub mod block;
pub mod utils;
pub mod timer;
#[cfg(feature = "command-buffers")]
pub mod command_buffers;

/// A marker trait for types that can be used with MVSync.
pub trait MVSynced: Send + Sync + 'static {}
impl<T> MVSynced for T where T: Send + Sync + 'static {}

/// Main structure for managing multithreaded asynchronous tasks.
pub struct MVSync {
    id: u64,
    queue: Arc<Queue>,
    signal: Arc<Signal>,
    #[cfg(feature = "main-thread")]
    worker: RwLock<Option<WorkerThread>>
}

impl MVSync {
    /// Create a new MVSync instance.
    pub fn new(specs: MVSyncSpecs) -> Arc<MVSync> {
        next_id("MVSync");
        let signal = Arc::new(Signal::new());
        Arc::new(MVSync {
            id: next_id("MVSync"),
            queue: Arc::new(Queue::new(specs, vec![], signal.clone())),
            signal,
            #[cfg(feature = "main-thread")]
            worker: RwLock::new(None)
        })
    }

    /// Create a new MVSync instance.
    pub fn labelled(specs: MVSyncSpecs, labels: Vec<&'static str>) -> Arc<MVSync> {
        next_id("MVSync");
        let signal = Arc::new(Signal::new());
        Arc::new(MVSync {
            id: next_id("MVSync"),
            queue: Arc::new(Queue::new(specs, labels.into_iter().map(ToString::to_string).collect(), signal.clone())),
            signal,
            #[cfg(feature = "main-thread")]
            worker: RwLock::new(None)
        })
    }

    #[cfg(feature = "main-thread")]
    /// Registers the thread this is called from as the main thread. This allows you to run async tasks
    /// using the queue and worker system on the main thread. This will block the current thread until
    /// [`MVSync::end_main_thread`] is called and all async tasks have finished.
    ///
    /// The function parameter you provide is a `distributor`, which allows you to distribute some
    /// tasks onto the main thread. This function must return before any of the tasks start executing
    /// (or use `.await` on some non-instant future, however it is recommended you return).
    ///
    /// Only a single main thread can be registered, and all the tasks on the previous main thread
    /// must have finished before a new one is registered.
    ///
    /// If `end_when_done` is set to true, the worker will automatically end itself when there are no
    /// more tasks for it to complete, unblocking the main thread and allowing another thread to register
    /// itself as a main thread.
    pub fn register_main_thread<F: Future<Output = ()> + Send>(self: &Arc<MVSync>, init: impl FnOnce(Arc<MVSync>) -> F + Send + 'static, end_when_done: bool) {
        let mut worker = self.worker.write().recover();
        if worker.is_none() || worker.as_ref().unwrap().finished() {
            let signal = Arc::new(Signal::new());
            let (sender, receiver) = channel();
            let free_workers = Arc::new(AtomicU32::new(4294967295));
            let access = free_workers.clone();
            let signal_clone = signal.clone();
            let end = Arc::new(AtomicU8::new(0));
            let end_clone = end.clone();
            let thread = WorkerThread {
                id: next_id("MVSync"),
                sender,
                label: None,
                free_workers,
                end,
                signal
            };
            worker.replace(thread);
            let this = self.clone();
            let (task, _) = self.create_async_task(|| async move {
                init(this).await;
            });
            worker.as_ref().unwrap().send(task);
            drop(worker);
            WorkerThread::run(receiver, access, signal_clone, end_clone, end_when_done);
        }
        else {
            panic!("This MVSync instance already has a main thread registered!");
        }
    }

    /// Add a task to the main thread. This function will panic if the main thread is ended
    /// or not registered.
    #[cfg(feature = "main-thread")]
    pub fn submit_to_main_thread(self: &Arc<MVSync>, task: Task) {
        let worker = self.worker.read().recover();
        if worker.is_none() || worker.as_ref().unwrap().ended() {
            panic!("This MVSync instance does not have a main thread registered!");
        }
        else {
            worker.as_ref().unwrap().send(task);
        }
    }

    /// Ends the main thread. This function won't panic, even if nothing was actually
    /// executed due to the main thread being already ended or not registered.
    #[cfg(feature = "main-thread")]
    pub fn end_main_thread(self: &Arc<MVSync>) {
        let worker = self.worker.write().recover();
        if !(worker.is_none() || worker.as_ref().unwrap().ended()) {
            worker.as_ref().unwrap().end();
        }
    }

    /// Checks if the main thread ended.
    #[cfg(feature = "main-thread")]
    pub fn main_thread_ended(self: &Arc<MVSync>) -> bool {
        let worker = self.worker.write().recover();
        worker.is_none() || worker.as_ref().unwrap().ended()
    }

    /// Checks if the main thread finished running all tasks.
    #[cfg(feature = "main-thread")]
    pub fn main_thread_finished(self: &Arc<MVSync>) -> bool {
        let worker = self.worker.write().recover();
        worker.is_none() || worker.as_ref().unwrap().finished()
    }

    /// Get the MVSync queue bound to this [`MVSync`] instance.
    pub fn get_queue(self: &Arc<MVSync>) -> Arc<Queue> {
        self.queue.clone()
    }

    /// Create a [`Semaphore`]
    pub fn create_semaphore(self: &Arc<MVSync>) -> Arc<Semaphore> {
        Arc::new(Semaphore::new())
    }

    /// Create a [`Fence`]
    pub fn create_fence(self: &Arc<MVSync>) -> Arc<Fence> {
        Arc::new(Fence::new())
    }

    #[cfg(feature = "command-buffers")]
    /// Allocate a new [`CommandBuffer`] that can be used to record commands.
    ///
    /// # Returns:
    /// - [`Ok(CommandBuffer)`] if the command buffer was successfully allocated.
    /// - [`Err(CommandBufferAllocationError)`] if the command buffer could not be allocated on the heap.
    pub fn allocate_command_buffer(self: &Arc<MVSync>) -> Result<CommandBuffer, CommandBufferAllocationError> {
        CommandBuffer::new(self.signal.clone())
    }

    /// Create a new [`Task`], wrapping a synchronous function that returns a value.
    pub fn create_task<T: MVSynced>(self: &Arc<MVSync>, function: impl FnOnce() -> T + Send + 'static) -> (Task, TaskHandle<T>) {
        let buffer = Arc::new(RwLock::new(None));
        let state = Arc::new(RwLock::new(TaskState::Pending));
        let signal = Arc::new(Signal::new());
        let result = TaskHandle::new(buffer.clone(), state.clone(), signal.clone());
        let task = Task::from_function(function, buffer, state, [signal, self.signal.clone()]);
        (task, result)
    }

    /// Create a new [`Task`], wrapping a synchronous function that takes in a parameter from a
    /// previous function,  returning a value.
    pub fn create_continuation<T: MVSynced, R: MVSynced>(self: &Arc<MVSync>, function: impl FnOnce(T) -> R + Send + 'static, predecessor: TaskHandle<T>) -> (Task, TaskHandle<R>) {
        let buffer = Arc::new(RwLock::new(None));
        let state = Arc::new(RwLock::new(TaskState::Pending));
        let signal = Arc::new(Signal::new());
        let result = TaskHandle::new(buffer.clone(), state.clone(), signal.clone());
        let task = Task::from_continuation(function, buffer, state, [signal, self.signal.clone()], predecessor);
        (task, result)
    }

    /// Create a new [`Task`], wrapping an asynchronous function that returns a value.
    pub fn create_async_task<T: MVSynced, F: Future<Output = T> + Send>(self: &Arc<MVSync>, function: impl FnOnce() -> F + Send + 'static) -> (Task, TaskHandle<T>) {
        let buffer = Arc::new(RwLock::new(None));
        let state = Arc::new(RwLock::new(TaskState::Pending));
        let signal = Arc::new(Signal::new());
        let result = TaskHandle::new(buffer.clone(), state.clone(), signal.clone());
        let task = Task::from_async(function, buffer, state, [signal, self.signal.clone()]);
        (task, result)
    }

    /// Create a new [`Task`], wrapping a future that returns a value.
    pub fn create_future_task<T: MVSynced>(self: &Arc<MVSync>, function: impl Future<Output = T> + Send + 'static) -> (Task, TaskHandle<T>) {
        let buffer = Arc::new(RwLock::new(None));
        let state = Arc::new(RwLock::new(TaskState::Pending));
        let signal = Arc::new(Signal::new());
        let result = TaskHandle::new(buffer.clone(), state.clone(), signal.clone());
        let task = Task::from_future(function, buffer, state, [signal, self.signal.clone()]);
        (task, result)
    }

    /// Create a new [`Task`], wrapping an asynchronous function that takes in a parameter from a
    /// previous function,  returning a value.
    pub fn create_async_continuation<T: MVSynced, R: MVSynced, F: Future<Output = R> + Send>(self: &Arc<MVSync>, function: impl FnOnce(T) -> F + Send + 'static, predecessor: TaskHandle<T>) -> (Task, TaskHandle<R>) {
        let buffer = Arc::new(RwLock::new(None));
        let state = Arc::new(RwLock::new(TaskState::Pending));
        let signal = Arc::new(Signal::new());
        let result = TaskHandle::new(buffer.clone(), state.clone(), signal.clone());
        let task = Task::from_async_continuation(function, buffer, state, [signal, self.signal.clone()], predecessor);
        (task, result)
    }
}

id_eq!(MVSync);

/// A struct with configuration parameters (specifications) for MVSync.
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
pub struct MVSyncSpecs {
    /// How many threads to create to handle task execution. One extra thread is created to handle
    /// distributing the tasks between the other threads and storing functions that are not ready
    /// to execute yet.
    pub thread_count: u32,

    /// How many asynchronous workers to create per thread. This does not increase the speed, and is
    /// only useful if you plan to use this to wait for events, like networking requests.
    pub workers_per_thread: u32,
}

impl MVSyncSpecs {
    pub fn max_performance(workers_per_thread: u32) -> Self {
        let threads = num_cpus::get();
        MVSyncSpecs {
            thread_count: threads as u32 - 2,
            workers_per_thread
        }
    }

    pub fn all_cores(workers_per_thread: u32) -> Self {
        let threads = num_cpus::get();
        MVSyncSpecs {
            thread_count: threads as u32,
            workers_per_thread
        }
    }
}

impl Default for MVSyncSpecs {
    fn default() -> Self {
        MVSyncSpecs {
            thread_count: 1,
            workers_per_thread: 1
        }
    }
}


#[cfg(test)]
mod tests {
    use crate::{MVSync, MVSyncSpecs};
    use crate::utils::async_yield;

    #[test]
    fn it_works() {
        let sync = MVSync::new(MVSyncSpecs {
            thread_count: 1,
            workers_per_thread: 2
        });

        sync.register_main_thread(|sync| async move {
            let (a, _) = sync.create_async_task(|| async move {
                run("A").await;
            });

            let (b, _) = sync.create_async_task(|| async move {
                run("B").await;
            });

            sync.submit_to_main_thread(a);
            sync.submit_to_main_thread(b);
        }, true);
    }

    async fn run(name: &str) {
        for i in 0..10 {
            println!("{}: {}", name, i);
            async_yield().await;
        }
    }
}