use crate::runtime::repl::Repl;
use crate::runtime::replay::{Event, ReplSession, ReplayValidator};
use anyhow::Result;
use regex::Regex;
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
use std::collections::{HashMap, HashSet};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Assignment {
pub id: String,
pub title: String,
pub description: String,
pub setup: AssignmentSetup,
pub tasks: Vec<Task>,
pub constraints: AssignmentConstraints,
pub rubric: GradingRubric,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AssignmentSetup {
pub prelude_code: Vec<String>,
pub provided_functions: HashMap<String, String>,
pub immutable_bindings: HashSet<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Task {
pub id: String,
pub description: String,
pub points: u32,
pub test_cases: Vec<TestCase>,
pub hidden_cases: Vec<TestCase>,
pub requirements: Vec<Requirement>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TestCase {
pub input: String,
pub expected: ExpectedBehavior,
pub points: u32,
pub timeout_ms: u64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum ExpectedBehavior {
ExactOutput(String),
Pattern(String), TypeSignature(String),
Predicate(PredicateCheck),
PerformanceBound { max_ns: u64, max_bytes: usize },
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PredicateCheck {
pub name: String,
pub check_fn: String, }
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum Requirement {
UseRecursion,
NoLoops,
UseHigherOrderFunctions,
TypeSafe,
PureFunction,
TailRecursive,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AssignmentConstraints {
pub max_time_ms: u64,
pub max_memory_mb: usize,
pub allowed_imports: Vec<String>,
pub forbidden_keywords: Vec<String>,
pub performance: Option<PerformanceConstraints>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PerformanceConstraints {
pub max_cpu_ms: u64,
pub max_heap_mb: usize,
pub complexity_bound: String, }
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GradingRubric {
pub categories: Vec<RubricCategory>,
pub late_penalty: Option<LatePenalty>,
pub bonus_criteria: Vec<BonusCriterion>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct RubricCategory {
pub name: String,
pub weight: f32,
pub criteria: Vec<Criterion>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Criterion {
pub description: String,
pub max_points: u32,
pub evaluation: CriterionEvaluation,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum CriterionEvaluation {
Automatic(AutomaticCheck),
Manual(String), Hybrid {
auto_weight: f32,
manual_weight: f32,
},
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum AutomaticCheck {
TestsPassed,
CodeQuality { min_score: f32 },
Documentation { required_sections: Vec<String> },
Performance { metric: String, threshold: f64 },
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LatePenalty {
pub grace_hours: u32,
pub penalty_per_day: f32,
pub max_days_late: u32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BonusCriterion {
pub description: String,
pub points: u32,
pub check: BonusCheck,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum BonusCheck {
ExtraFeature(String),
Optimization { improvement_percent: f32 },
CreativeSolution,
}
pub struct GradingEngine {
pub replay_validator: ReplayValidator,
pub plagiarism_detector: PlagiarismDetector,
pub secure_sandbox: SecureSandbox,
}
impl Default for GradingEngine {
fn default() -> Self {
Self::new()
}
}
impl GradingEngine {
pub fn new() -> Self {
Self {
replay_validator: ReplayValidator::new(true),
plagiarism_detector: PlagiarismDetector::new(),
secure_sandbox: SecureSandbox::new(),
}
}
pub fn grade_submission(
&mut self,
assignment: &Assignment,
submission: &ReplSession,
) -> GradeReport {
let mut report = GradeReport::new(assignment.id.clone());
if !self.verify_no_tampering(submission) {
report.mark_invalid("Session integrity check failed");
return report;
}
let mut repl = match self.secure_sandbox.create_isolated_repl() {
Ok(r) => r,
Err(e) => {
report.mark_invalid(&format!("Failed to create sandbox: {e}"));
return report;
}
};
if let Err(e) = self.load_setup(&mut repl, &assignment.setup) {
report.mark_invalid(&format!("Failed to load setup: {e}"));
return report;
}
for task in &assignment.tasks {
let task_grade = self.grade_task(&mut repl, task, submission);
report.add_task_grade(task_grade);
}
report.rubric_score = self.evaluate_rubric(&assignment.rubric, submission);
if let Some(perf) = &assignment.constraints.performance {
report.performance_score = self.measure_performance(submission, perf);
}
report.originality_score = self.plagiarism_detector.analyze(submission);
report.calculate_final_grade();
report
}
fn verify_no_tampering(&self, session: &ReplSession) -> bool {
let mut prev_timestamp = 0u64;
for event in &session.timeline {
if event.timestamp_ns < prev_timestamp {
return false; }
prev_timestamp = event.timestamp_ns;
}
true
}
fn load_setup(&self, repl: &mut Repl, setup: &AssignmentSetup) -> Result<()> {
for code in &setup.prelude_code {
repl.eval(code)?;
}
for (name, code) in &setup.provided_functions {
repl.eval(&format!("let {name} = {code}"))?;
}
Ok(())
}
fn grade_task(&mut self, repl: &mut Repl, task: &Task, _submission: &ReplSession) -> TaskGrade {
let mut grade = TaskGrade::new(task.id.clone());
for test in &task.test_cases {
let result = self.run_test_case(repl, test);
grade.add_test_result(test.input.clone(), result);
}
for test in &task.hidden_cases {
let result = self.run_test_case(repl, test);
grade.add_hidden_result(test.input.clone(), result);
}
for req in &task.requirements {
if self.check_requirement(repl, req) {
grade.requirements_met.insert(format!("{req:?}"));
}
}
grade.calculate_score(task.points);
grade
}
fn run_test_case(&self, repl: &mut Repl, test: &TestCase) -> TestResult {
let start = std::time::Instant::now();
let output = match repl.eval(&test.input) {
Ok(out) => out,
Err(e) => {
return TestResult {
passed: false,
points_earned: 0,
feedback: format!("Error: {e}"),
execution_time_ms: start.elapsed().as_millis() as u64,
};
}
};
let execution_time_ms = start.elapsed().as_millis() as u64;
if execution_time_ms > test.timeout_ms {
return TestResult {
passed: false,
points_earned: 0,
feedback: format!("Timeout: {}ms > {}ms", execution_time_ms, test.timeout_ms),
execution_time_ms,
};
}
let (passed, feedback) = match &test.expected {
ExpectedBehavior::ExactOutput(expected) => {
let passed = output == *expected;
let feedback = if passed {
"Correct output".to_string()
} else {
format!("Expected '{expected}', got '{output}'")
};
(passed, feedback)
}
ExpectedBehavior::Pattern(pattern) => {
let regex = Regex::new(pattern).unwrap_or_else(|_| {
Regex::new(".*").expect("Default regex '.*' should always be valid")
});
let passed = regex.is_match(&output);
let feedback = if passed {
"Output matches pattern".to_string()
} else {
format!("Output doesn't match pattern: {pattern}")
};
(passed, feedback)
}
ExpectedBehavior::TypeSignature(expected_type) => {
let passed = output.contains(expected_type);
let feedback = if passed {
"Type signature correct".to_string()
} else {
format!("Expected type {expected_type}")
};
(passed, feedback)
}
_ => (false, "Unsupported check".to_string()),
};
TestResult {
passed,
points_earned: if passed { test.points } else { 0 },
feedback,
execution_time_ms,
}
}
fn check_requirement(&self, _repl: &Repl, req: &Requirement) -> bool {
match req {
Requirement::UseRecursion => true, Requirement::NoLoops => true, Requirement::UseHigherOrderFunctions => true, Requirement::TypeSafe => true, Requirement::PureFunction => true, Requirement::TailRecursive => true, }
}
fn evaluate_rubric(&self, rubric: &GradingRubric, _submission: &ReplSession) -> f32 {
let mut total_score = 0.0;
let mut total_weight = 0.0;
for category in &rubric.categories {
let category_score = self.evaluate_category(category);
total_score += category_score * category.weight;
total_weight += category.weight;
}
if total_weight > 0.0 {
(total_score / total_weight) * 100.0
} else {
0.0
}
}
fn evaluate_category(&self, category: &RubricCategory) -> f32 {
let mut earned = 0u32;
let mut possible = 0u32;
for criterion in &category.criteria {
possible += criterion.max_points;
earned += self.evaluate_criterion(criterion);
}
if possible > 0 {
earned as f32 / possible as f32
} else {
0.0
}
}
fn evaluate_criterion(&self, criterion: &Criterion) -> u32 {
match &criterion.evaluation {
CriterionEvaluation::Automatic(check) => {
match check {
AutomaticCheck::TestsPassed => criterion.max_points,
AutomaticCheck::CodeQuality { min_score } => {
if *min_score <= 0.8 {
criterion.max_points
} else {
0
}
}
_ => 0,
}
}
CriterionEvaluation::Manual(_) => 0, CriterionEvaluation::Hybrid { auto_weight, .. } => {
(criterion.max_points as f32 * auto_weight) as u32
}
}
}
fn measure_performance(
&self,
session: &ReplSession,
constraints: &PerformanceConstraints,
) -> f32 {
let mut score: f32 = 100.0;
let total_cpu_ns: u64 = session
.timeline
.iter()
.filter_map(|e| {
if let Event::ResourceUsage { cpu_ns, .. } = &e.event {
Some(*cpu_ns)
} else {
None
}
})
.sum();
let cpu_ms = total_cpu_ns / 1_000_000;
if cpu_ms > constraints.max_cpu_ms {
score -= 20.0;
}
let max_heap: usize = session
.timeline
.iter()
.filter_map(|e| {
if let Event::ResourceUsage { heap_bytes, .. } = &e.event {
Some(*heap_bytes)
} else {
None
}
})
.max()
.unwrap_or(0);
let heap_mb = max_heap / (1024 * 1024);
if heap_mb > constraints.max_heap_mb {
score -= 20.0;
}
score.max(0.0).min(100.0)
}
}
pub struct PlagiarismDetector {
known_submissions: Vec<AstFingerprint>,
}
#[derive(Debug, Clone)]
pub struct AstFingerprint {
pub hash: String,
pub structure: Vec<String>,
pub complexity: usize,
}
impl Default for PlagiarismDetector {
fn default() -> Self {
Self::new()
}
}
impl PlagiarismDetector {
pub fn new() -> Self {
Self {
known_submissions: Vec::new(),
}
}
pub fn analyze(&self, submission: &ReplSession) -> f32 {
let fingerprint = self.generate_fingerprint(submission);
for known in &self.known_submissions {
let similarity = self.compute_similarity(&fingerprint, known);
if similarity > 0.85 {
return 100.0 * (1.0 - similarity); }
}
100.0 }
fn generate_fingerprint(&self, session: &ReplSession) -> AstFingerprint {
let mut hasher = Sha256::new();
let mut structure = Vec::new();
for event in &session.timeline {
if let Event::Input { text, .. } = &event.event {
hasher.update(text.as_bytes());
structure.push(self.extract_structure(text));
}
}
AstFingerprint {
hash: format!("{:x}", hasher.finalize()),
structure,
complexity: session.timeline.len(),
}
}
fn extract_structure(&self, code: &str) -> String {
let mut patterns = Vec::new();
if code.contains("fn ") || code.contains("fun ") {
patterns.push("FN");
}
if code.contains("if ") {
patterns.push("IF");
}
if code.contains("for ") || code.contains("while ") {
patterns.push("LOOP");
}
if code.contains("match ") {
patterns.push("MATCH");
}
patterns.join("-")
}
fn compute_similarity(&self, fp1: &AstFingerprint, fp2: &AstFingerprint) -> f32 {
if fp1.hash == fp2.hash {
return 1.0; }
let common = fp1
.structure
.iter()
.zip(fp2.structure.iter())
.filter(|(a, b)| a == b)
.count();
let total = fp1.structure.len().max(fp2.structure.len());
if total > 0 {
#[allow(clippy::cast_precision_loss)]
let ratio = common as f32 / total as f32;
ratio
} else {
0.0
}
}
}
pub struct SecureSandbox {
#[allow(dead_code)]
resource_limits: ResourceLimits,
}
#[derive(Debug, Clone)]
pub struct ResourceLimits {
pub max_heap_mb: usize,
pub max_stack_kb: usize,
pub max_cpu_ms: u64,
}
impl Default for SecureSandbox {
fn default() -> Self {
Self::new()
}
}
impl SecureSandbox {
pub fn new() -> Self {
Self {
resource_limits: ResourceLimits {
max_heap_mb: 100,
max_stack_kb: 8192,
max_cpu_ms: 5000,
},
}
}
pub fn create_isolated_repl(&self) -> Result<Repl> {
Repl::new(std::env::temp_dir())
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GradeReport {
pub assignment_id: String,
pub submission_time: String,
pub task_grades: Vec<TaskGrade>,
pub rubric_score: f32,
pub performance_score: f32,
pub originality_score: f32,
pub final_grade: f32,
pub feedback: Vec<String>,
pub violations: Vec<String>,
pub is_valid: bool,
}
impl GradeReport {
pub fn new(assignment_id: String) -> Self {
Self {
assignment_id,
submission_time: chrono::Utc::now().to_rfc3339(),
task_grades: Vec::new(),
rubric_score: 0.0,
performance_score: 100.0,
originality_score: 100.0,
final_grade: 0.0,
feedback: Vec::new(),
violations: Vec::new(),
is_valid: true,
}
}
pub fn mark_invalid(&mut self, reason: &str) {
self.is_valid = false;
self.violations.push(reason.to_string());
self.final_grade = 0.0;
}
pub fn add_task_grade(&mut self, grade: TaskGrade) {
self.task_grades.push(grade);
}
pub fn calculate_final_grade(&mut self) {
if !self.is_valid {
self.final_grade = 0.0;
return;
}
let task_score: f32 = if self.task_grades.is_empty() {
0.0
} else {
let earned: u32 = self.task_grades.iter().map(|g| g.points_earned).sum();
let possible: u32 = self.task_grades.iter().map(|g| g.points_possible).sum();
if possible > 0 {
(earned as f32 / possible as f32) * 100.0
} else {
0.0
}
};
self.final_grade = task_score * 0.6
+ self.rubric_score * 0.2
+ self.performance_score * 0.1
+ self.originality_score * 0.1;
if self.final_grade >= 90.0 {
self.feedback.push("Excellent work!".to_string());
} else if self.final_grade >= 80.0 {
self.feedback.push("Good job!".to_string());
} else if self.final_grade >= 70.0 {
self.feedback.push("Satisfactory work.".to_string());
} else {
self.feedback.push("Needs improvement.".to_string());
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TaskGrade {
pub task_id: String,
pub points_earned: u32,
pub points_possible: u32,
pub test_results: Vec<(String, TestResult)>,
pub hidden_results: Vec<(String, TestResult)>,
pub requirements_met: HashSet<String>,
}
impl TaskGrade {
pub fn new(task_id: String) -> Self {
Self {
task_id,
points_earned: 0,
points_possible: 0,
test_results: Vec::new(),
hidden_results: Vec::new(),
requirements_met: HashSet::new(),
}
}
pub fn add_test_result(&mut self, input: String, result: TestResult) {
self.test_results.push((input, result));
}
pub fn add_hidden_result(&mut self, input: String, result: TestResult) {
self.hidden_results.push((input, result));
}
pub fn calculate_score(&mut self, max_points: u32) {
self.points_possible = max_points;
let test_points: u32 = self.test_results.iter().map(|(_, r)| r.points_earned).sum();
let hidden_points: u32 = self
.hidden_results
.iter()
.map(|(_, r)| r.points_earned)
.sum();
self.points_earned = (test_points + hidden_points).min(max_points);
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TestResult {
pub passed: bool,
pub points_earned: u32,
pub feedback: String,
pub execution_time_ms: u64,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_grading_engine_creation() {
let engine = GradingEngine::new();
assert!(engine.replay_validator.strict_mode);
}
#[test]
fn test_grade_report() {
let mut report = GradeReport::new("test_assignment".to_string());
assert!(report.is_valid);
assert_eq!(report.final_grade, 0.0);
report.mark_invalid("Test violation");
assert!(!report.is_valid);
assert_eq!(report.violations.len(), 1);
}
#[test]
fn test_plagiarism_detection() {
let detector = PlagiarismDetector::new();
let session = ReplSession {
version: crate::runtime::replay::SemVer::new(1, 0, 0),
metadata: crate::runtime::replay::SessionMetadata {
session_id: "test".to_string(),
created_at: "2025-08-28T10:00:00Z".to_string(),
ruchy_version: "1.23.0".to_string(),
student_id: Some("student1".to_string()),
assignment_id: Some("hw1".to_string()),
tags: vec![],
},
environment: crate::runtime::replay::Environment {
seed: 42,
feature_flags: vec![],
resource_limits: crate::runtime::replay::ResourceLimits {
heap_mb: 100,
stack_kb: 8192,
cpu_ms: 5000,
},
},
timeline: vec![],
checkpoints: std::collections::BTreeMap::new(),
};
let score = detector.analyze(&session);
assert_eq!(score, 100.0); }
#[test]
fn test_assignment_creation() {
let assignment = Assignment {
id: "hw001".to_string(),
title: "Introduction to Ruchy".to_string(),
description: "Basic programming exercises".to_string(),
setup: AssignmentSetup {
prelude_code: vec!["let pi = 3.15159".to_string()],
provided_functions: HashMap::new(),
immutable_bindings: HashSet::new(),
},
tasks: vec![],
constraints: AssignmentConstraints {
max_time_ms: 5000,
max_memory_mb: 100,
allowed_imports: vec![],
forbidden_keywords: vec!["eval".to_string()],
performance: None,
},
rubric: GradingRubric {
categories: vec![],
late_penalty: Some(LatePenalty {
grace_hours: 24,
penalty_per_day: 10.0,
max_days_late: 7,
}),
bonus_criteria: vec![],
},
};
assert_eq!(assignment.id, "hw001");
assert_eq!(assignment.title, "Introduction to Ruchy");
assert_eq!(assignment.constraints.max_time_ms, 5000);
}
#[test]
fn test_task_with_test_cases() {
let task = Task {
id: "task_1".to_string(),
description: "Implement fibonacci function".to_string(),
points: 20,
test_cases: vec![TestCase {
input: "fib(5)".to_string(),
expected: ExpectedBehavior::ExactOutput("5".to_string()),
points: 10,
timeout_ms: 1000,
}],
hidden_cases: vec![TestCase {
input: "fib(10)".to_string(),
expected: ExpectedBehavior::ExactOutput("55".to_string()),
points: 10,
timeout_ms: 1000,
}],
requirements: vec![Requirement::UseRecursion],
};
assert_eq!(task.id, "task_1");
assert_eq!(task.points, 20);
assert_eq!(task.test_cases.len(), 1);
assert_eq!(task.hidden_cases.len(), 1);
assert_eq!(task.requirements.len(), 1);
}
#[test]
fn test_expected_behavior_variants() {
let behaviors = vec![
ExpectedBehavior::ExactOutput("42".to_string()),
ExpectedBehavior::Pattern(r"Result: \d+".to_string()),
ExpectedBehavior::TypeSignature("int -> int".to_string()),
ExpectedBehavior::Predicate(PredicateCheck {
name: "is_even".to_string(),
check_fn: "x % 2 == 0".to_string(),
}),
ExpectedBehavior::PerformanceBound {
max_ns: 1_000_000,
max_bytes: 1024,
},
];
for behavior in behaviors {
match behavior {
ExpectedBehavior::ExactOutput(s) => assert!(!s.is_empty()),
ExpectedBehavior::Pattern(p) => assert!(!p.is_empty()),
ExpectedBehavior::TypeSignature(t) => assert!(!t.is_empty()),
ExpectedBehavior::Predicate(pred) => assert!(!pred.name.is_empty()),
ExpectedBehavior::PerformanceBound { max_ns, max_bytes } => {
assert!(max_ns > 0);
assert!(max_bytes > 0);
}
}
}
}
#[test]
fn test_requirements_enum() {
let requirements = [
Requirement::UseRecursion,
Requirement::NoLoops,
Requirement::UseHigherOrderFunctions,
Requirement::TypeSafe,
Requirement::PureFunction,
Requirement::TailRecursive,
];
assert_eq!(requirements.len(), 6);
for (i, req1) in requirements.iter().enumerate() {
for (j, req2) in requirements.iter().enumerate() {
if i != j {
assert!(!matches!(
(req1, req2),
(Requirement::UseRecursion, Requirement::UseRecursion)
));
}
}
}
}
#[test]
fn test_grading_rubric() {
let rubric = GradingRubric {
categories: vec![
RubricCategory {
name: "Correctness".to_string(),
weight: 0.5,
criteria: vec![Criterion {
description: "All tests pass".to_string(),
max_points: 50,
evaluation: CriterionEvaluation::Automatic(AutomaticCheck::TestsPassed),
}],
},
RubricCategory {
name: "Style".to_string(),
weight: 0.3,
criteria: vec![],
},
],
late_penalty: Some(LatePenalty {
grace_hours: 0,
penalty_per_day: 5.0,
max_days_late: 10,
}),
bonus_criteria: vec![],
};
assert_eq!(rubric.categories.len(), 2);
assert_eq!(rubric.categories[0].weight, 0.5);
assert_eq!(rubric.categories[1].weight, 0.3);
if let Some(penalty) = &rubric.late_penalty {
assert_eq!(penalty.penalty_per_day, 5.0);
assert_eq!(penalty.grace_hours, 0);
} else {
panic!("Expected late penalty");
}
}
#[test]
fn test_performance_constraints() {
let constraints = PerformanceConstraints {
max_cpu_ms: 1000,
max_heap_mb: 50,
complexity_bound: "O(n log n)".to_string(),
};
assert_eq!(constraints.max_cpu_ms, 1000);
assert_eq!(constraints.max_heap_mb, 50);
assert_eq!(constraints.complexity_bound, "O(n log n)");
}
#[test]
fn test_auto_grader_initialization() {
let assignment = Assignment {
id: "test".to_string(),
title: "Test Assignment".to_string(),
description: String::new(),
setup: AssignmentSetup {
prelude_code: vec![],
provided_functions: HashMap::new(),
immutable_bindings: HashSet::new(),
},
tasks: vec![],
constraints: AssignmentConstraints {
max_time_ms: 5000,
max_memory_mb: 100,
allowed_imports: vec![],
forbidden_keywords: vec![],
performance: None,
},
rubric: GradingRubric {
categories: vec![],
late_penalty: None,
bonus_criteria: vec![],
},
};
assert_eq!(assignment.id, "test");
}
#[test]
fn test_predicate_check() {
let predicate = PredicateCheck {
name: "is_prime".to_string(),
check_fn: "fn(n) { n > 1 && (2..n).all(|i| n % i != 0) }".to_string(),
};
assert_eq!(predicate.name, "is_prime");
assert!(!predicate.check_fn.is_empty());
}
#[test]
fn test_assignment_setup_with_immutable_bindings() {
let mut immutable = HashSet::new();
immutable.insert("PI".to_string());
immutable.insert("E".to_string());
let mut provided = HashMap::new();
provided.insert("helper".to_string(), "fn helper(x) { x * 2 }".to_string());
let setup = AssignmentSetup {
prelude_code: vec![
"let PI = 3.15159".to_string(),
"let E = 2.71828".to_string(),
],
provided_functions: provided,
immutable_bindings: immutable,
};
assert_eq!(setup.prelude_code.len(), 2);
assert_eq!(setup.provided_functions.len(), 1);
assert_eq!(setup.immutable_bindings.len(), 2);
assert!(setup.immutable_bindings.contains("PI"));
}
fn make_test_result(passed: bool, points: u32) -> TestResult {
TestResult {
passed,
points_earned: points,
feedback: if passed {
"Test passed".to_string()
} else {
"Test failed".to_string()
},
execution_time_ms: 10,
}
}
#[test]
fn test_task_grade_new() {
let grade = TaskGrade::new("task_001".to_string());
assert_eq!(grade.task_id, "task_001");
assert_eq!(grade.points_earned, 0);
assert_eq!(grade.points_possible, 0);
assert!(grade.test_results.is_empty());
assert!(grade.hidden_results.is_empty());
assert!(grade.requirements_met.is_empty());
}
#[test]
fn test_task_grade_add_test_result_passed() {
let mut grade = TaskGrade::new("task_001".to_string());
grade.add_test_result("1 + 1".to_string(), make_test_result(true, 10));
assert_eq!(grade.test_results.len(), 1);
assert!(grade.test_results.iter().any(|(k, _)| k == "1 + 1"));
}
#[test]
fn test_task_grade_add_test_result_failed() {
let mut grade = TaskGrade::new("task_001".to_string());
grade.add_test_result("1 + 1".to_string(), make_test_result(false, 0));
assert_eq!(grade.test_results.len(), 1);
}
#[test]
fn test_task_grade_add_hidden_result() {
let mut grade = TaskGrade::new("task_001".to_string());
grade.add_hidden_result("fib(10)".to_string(), make_test_result(true, 10));
assert_eq!(grade.hidden_results.len(), 1);
assert!(grade.hidden_results.iter().any(|(k, _)| k == "fib(10)"));
}
#[test]
fn test_task_grade_calculate_score() {
let mut grade = TaskGrade::new("task_001".to_string());
grade.add_test_result("test1".to_string(), make_test_result(true, 10));
grade.add_test_result("test2".to_string(), make_test_result(true, 10));
grade.add_hidden_result("hidden1".to_string(), make_test_result(true, 10));
grade.calculate_score(30);
assert_eq!(grade.points_possible, 30);
assert_eq!(grade.points_earned, 30);
}
#[test]
fn test_task_grade_calculate_score_partial() {
let mut grade = TaskGrade::new("task_001".to_string());
grade.add_test_result("test1".to_string(), make_test_result(true, 10));
grade.add_test_result("test2".to_string(), make_test_result(false, 0));
grade.calculate_score(20);
assert_eq!(grade.points_possible, 20);
assert_eq!(grade.points_earned, 10);
}
#[test]
fn test_test_result_struct() {
let result = TestResult {
passed: true,
points_earned: 10,
feedback: "Good work!".to_string(),
execution_time_ms: 50,
};
assert!(result.passed);
assert_eq!(result.points_earned, 10);
assert_eq!(result.feedback, "Good work!");
assert_eq!(result.execution_time_ms, 50);
}
#[test]
fn test_test_result_failed() {
let result = TestResult {
passed: false,
points_earned: 0,
feedback: "Expected 42, got 41".to_string(),
execution_time_ms: 25,
};
assert!(!result.passed);
assert_eq!(result.points_earned, 0);
}
#[test]
fn test_grade_report_add_task_grade() {
let mut report = GradeReport::new("test_assignment".to_string());
let mut task_grade = TaskGrade::new("task_1".to_string());
task_grade.add_test_result("test".to_string(), make_test_result(true, 10));
task_grade.calculate_score(10);
report.add_task_grade(task_grade);
assert_eq!(report.task_grades.len(), 1);
assert_eq!(report.task_grades[0].task_id, "task_1");
}
#[test]
fn test_grade_report_calculate_final_grade() {
let mut report = GradeReport::new("test_assignment".to_string());
let mut task1 = TaskGrade::new("task_1".to_string());
task1.add_test_result("t1".to_string(), make_test_result(true, 50));
task1.calculate_score(50);
report.add_task_grade(task1);
let mut task2 = TaskGrade::new("task_2".to_string());
task2.add_test_result("t2".to_string(), make_test_result(true, 50));
task2.calculate_score(50);
report.add_task_grade(task2);
report.calculate_final_grade();
assert!(report.final_grade >= 0.0);
}
#[test]
fn test_grade_report_calculate_final_grade_partial() {
let mut report = GradeReport::new("test_assignment".to_string());
let mut task1 = TaskGrade::new("task_1".to_string());
task1.points_earned = 25;
task1.points_possible = 50;
report.add_task_grade(task1);
let mut task2 = TaskGrade::new("task_2".to_string());
task2.points_earned = 50;
task2.points_possible = 50;
report.add_task_grade(task2);
report.calculate_final_grade();
assert!(report.final_grade >= 0.0);
}
#[test]
fn test_grade_report_invalid_report() {
let mut report = GradeReport::new("test".to_string());
report.mark_invalid("Plagiarism detected");
report.mark_invalid("Time limit exceeded");
assert!(!report.is_valid);
assert_eq!(report.violations.len(), 2);
}
#[test]
fn test_rubric_category_creation() {
let category = RubricCategory {
name: "Code Quality".to_string(),
weight: 0.25,
criteria: vec![
Criterion {
description: "Code is readable".to_string(),
max_points: 10,
evaluation: CriterionEvaluation::Manual("Check readability".to_string()),
},
Criterion {
description: "All tests pass".to_string(),
max_points: 10,
evaluation: CriterionEvaluation::Automatic(AutomaticCheck::TestsPassed),
},
],
};
assert_eq!(category.name, "Code Quality");
assert_eq!(category.weight, 0.25);
assert_eq!(category.criteria.len(), 2);
}
#[test]
fn test_bonus_criterion() {
let bonus = BonusCriterion {
description: "Creative solution".to_string(),
points: 5,
check: BonusCheck::CreativeSolution,
};
assert_eq!(bonus.points, 5);
assert!(!bonus.description.is_empty());
}
#[test]
fn test_late_penalty_calculation() {
let penalty = LatePenalty {
grace_hours: 12,
penalty_per_day: 10.0,
max_days_late: 5,
};
assert_eq!(penalty.grace_hours, 12);
assert_eq!(penalty.penalty_per_day, 10.0);
assert_eq!(penalty.max_days_late, 5);
}
#[test]
fn test_automatic_check_tests_passed() {
let check = AutomaticCheck::TestsPassed;
assert!(matches!(check, AutomaticCheck::TestsPassed));
}
#[test]
fn test_automatic_check_code_quality() {
let check = AutomaticCheck::CodeQuality { min_score: 0.8 };
match check {
AutomaticCheck::CodeQuality { min_score } => assert_eq!(min_score, 0.8),
_ => panic!("Expected CodeQuality"),
}
}
#[test]
fn test_automatic_check_performance() {
let check = AutomaticCheck::Performance {
metric: "execution_time".to_string(),
threshold: 100.0,
};
match check {
AutomaticCheck::Performance { metric, threshold } => {
assert_eq!(metric, "execution_time");
assert_eq!(threshold, 100.0);
}
_ => panic!("Expected Performance"),
}
}
#[test]
fn test_plagiarism_detector_empty_session() {
let detector = PlagiarismDetector::new();
let session = ReplSession {
version: crate::runtime::replay::SemVer::new(1, 0, 0),
metadata: crate::runtime::replay::SessionMetadata {
session_id: "test_empty".to_string(),
created_at: "2025-08-28T10:00:00Z".to_string(),
ruchy_version: "1.23.0".to_string(),
student_id: None,
assignment_id: None,
tags: vec![],
},
environment: crate::runtime::replay::Environment {
seed: 0,
feature_flags: vec![],
resource_limits: crate::runtime::replay::ResourceLimits {
heap_mb: 100,
stack_kb: 8192,
cpu_ms: 5000,
},
},
timeline: vec![],
checkpoints: std::collections::BTreeMap::new(),
};
let score = detector.analyze(&session);
assert_eq!(score, 100.0);
}
#[test]
fn test_criterion_evaluation_variants() {
let auto_eval = CriterionEvaluation::Automatic(AutomaticCheck::TestsPassed);
let manual_eval = CriterionEvaluation::Manual("Grade code style".to_string());
let hybrid_eval = CriterionEvaluation::Hybrid {
auto_weight: 0.6,
manual_weight: 0.4,
};
assert!(matches!(auto_eval, CriterionEvaluation::Automatic(_)));
assert!(matches!(manual_eval, CriterionEvaluation::Manual(_)));
assert!(matches!(hybrid_eval, CriterionEvaluation::Hybrid { .. }));
}
#[test]
fn test_bonus_check_variants() {
let checks = vec![
BonusCheck::ExtraFeature("Dark mode".to_string()),
BonusCheck::Optimization {
improvement_percent: 50.0,
},
BonusCheck::CreativeSolution,
];
assert_eq!(checks.len(), 3);
}
}
#[cfg(test)]
mod property_tests_assessment {
use proptest::proptest;
proptest! {
#[test]
fn test_new_never_panics(input: String) {
let _input = if input.len() > 100 { &input[..100] } else { &input[..] };
let _ = std::panic::catch_unwind(|| {
});
}
}
}