Compare commits

...

2 Commits

Author SHA1 Message Date
Oystein Kristoffer Tveit f6682b18dd
WIP: add more tests
Build and test / check (push) Failing after 2m12s Details
Build and test / build (push) Successful in 2m54s Details
Build docs / docs (push) Successful in 3m25s Details
Build and test / test (push) Successful in 6m15s Details
2024-04-30 22:06:12 +02:00
Oystein Kristoffer Tveit c5d40a68a6
WIP: workflow: generate test reports 2024-04-30 22:06:10 +02:00
7 changed files with 356 additions and 56 deletions

View File

@ -16,13 +16,12 @@ jobs:
with:
toolchain: nightly
override: true
components: rustfmt, clippy
- name: Cache dependencies
uses: Swatinem/rust-cache@v2
- name: Build
run: cargo build --all-features --verbose
run: cargo build --all-features --verbose --release
check:
runs-on: ubuntu-latest-personal
@ -49,16 +48,67 @@ jobs:
runs-on: ubuntu-latest-personal
steps:
- uses: actions/checkout@v3
- uses: cargo-bins/cargo-binstall@main
- name: Install mpv
run: apt-get update && apt-get install -y mpv
- name: Install latest nightly toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: nightly
override: true
components: rustfmt, clippy
components: llvm-tools-preview
- name: Cache dependencies
uses: Swatinem/rust-cache@v2
- name: Create necessary directories
run: mkdir -p target/test-report
- name: Run tests
run: cargo test --all-features --verbose
run: |
cargo test --all-features --release --no-fail-fast -- -Zunstable-options --format json --report-time \
| tee target/test-report/test-report.json
env:
RUSTFLAGS: "-Cinstrument-coverage"
LLVM_PROFILE_FILE: "target/coverage/%p-%m.profraw"
- name: Install markdown-test-report
run: cargo binstall -y markdown-test-report
- name: Generate test report
run: markdown-test-report target/test-report/test-report.json --output target/test-report/test-report.md
- name: Upload test report
uses: actions/upload-artifact@v3
with:
name: test-report.md
path: target/test-report/test-report.md
- name: Install grcov
run: cargo binstall -y grcov
- name: Generate coverage report
run: |
grcov \
--source-dir . \
--binary-path ./target/release/deps/ \
--excl-start 'mod test* \{' \
--ignore 'tests/*' \
--ignore "*test.rs" \
--ignore "*tests.rs" \
--ignore "*github.com*" \
--ignore "*libcore*" \
--ignore "*rustc*" \
--ignore "*liballoc*" \
--ignore "*cargo*" \
-t html \
-o ./target/coverage/html \
target/coverage/
- name: Upload coverage report
uses: actions/upload-artifact@v3
with:
name: coverage
path: target/coverage/html

View File

@ -0,0 +1,44 @@
name: "Build docs"
on:
push:
branches:
- master
jobs:
docs:
runs-on: ubuntu-latest-personal
steps:
- uses: actions/checkout@v3
- name: Install latest nightly toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: nightly
override: true
- name: Cache dependencies
uses: Swatinem/rust-cache@v2
- name: Build docs
run: cargo doc --all-features --document-private-items --release
- name: Install rsync
run: apt-get update && apt-get install -y rsync
- name: Install SSH key
run: |
mkdir -p ~/.ssh
cat <<EOF >~/.ssh/key
${{ secrets.OYSTEIKT_GITEA_WEBDOCS_SSH_KEY }}
EOF
chmod 600 ~/.ssh/key
- name: Deploy docs to https://pvv.ntnu.no/~oysteikt/mpvipc/${{ gitea.ref_name }}/
run: |
rsync \
--archive \
--compress \
--verbose \
--mkpath \
--rsh="ssh -oBatchMode=yes -oStrictHostKeyChecking=accept-new -i ~/.ssh/key" \
"target/doc/" \
"oysteikt@microbel.pvv.ntnu.no:mpvipc/${{ gitea.ref_name }}/"

View File

@ -22,7 +22,8 @@ tokio-stream = { version = "0.1.15", features = ["sync"] }
[dev-dependencies]
env_logger = "0.10.0"
test-log = "0.2.15"
tokio = { version = "1.37.0", features = ["rt-multi-thread", "time"] }
tokio = { version = "1.37.0", features = ["rt-multi-thread", "time", "process"] }
uuid = { version = "1.8.0", features = ["v4"] }
[lib]
doctest = false

44
run.sh Executable file
View File

@ -0,0 +1,44 @@
#!/usr/bin/env bash
rm -rf target
mkdir -p target/test-report
export RUSTFLAGS="-Cinstrument-coverage"
export LLVM_PROFILE_FILE="target/coverage/prof/%p-%m.profraw"
rustup override set nightly
echo "Running tests..."
cargo test --all-features --release --no-fail-fast -- -Z unstable-options --report-time --format json | tee target/test-report/test-report.json
echo "Generating test report..."
markdown-test-report target/test-report/test-report.json --output target/test-report/test-report.md
echo "Generating test report HTML..."
pandoc target/test-report/test-report.md -o target/test-report/test-report.html
# rustup override set stable
echo "Removing unused profraw files..."
for file in target/coverage/prof/*.profraw; do
~/.rustup/toolchains/nightly-x86_64-unknown-linux-gnu/lib/rustlib/x86_64-unknown-linux-gnu/bin/llvm-profdata show "$file" 1>/dev/null 2>/dev/null || rm -f "$file"
done
echo "Generating coverage report..."
grcov \
--source-dir . \
--binary-path ./target/release/deps/ \
--excl-start 'mod test* \{' \
--ignore 'tests/*' \
--ignore "*test.rs" \
--ignore "*tests.rs" \
--ignore "*github.com*" \
--ignore "*libcore*" \
--ignore "*rustc*" \
--ignore "*liballoc*" \
--ignore "*cargo*" \
-t html \
-o ./target/coverage/html \
target/coverage/prof
rustup override set nightly

View File

@ -68,7 +68,7 @@ pub(crate) trait IntoRawCommandPart {
}
/// Generic data type representing all possible data types that mpv can return.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum MpvDataType {
Array(Vec<MpvDataType>),
Bool(bool),

View File

@ -136,61 +136,27 @@ pub(crate) fn json_map_to_hashmap(
}
pub(crate) fn json_array_to_vec(array: &[Value]) -> Vec<MpvDataType> {
let mut output: Vec<MpvDataType> = Vec::new();
if !array.is_empty() {
match array[0] {
Value::Array(_) => {
for entry in array {
if let Value::Array(ref a) = *entry {
output.push(MpvDataType::Array(json_array_to_vec(a)));
}
array
.into_iter()
.map(|entry| match entry {
Value::Array(a) => MpvDataType::Array(json_array_to_vec(&a)),
Value::Bool(b) => MpvDataType::Bool(*b),
Value::Number(n) => {
if n.is_u64() {
MpvDataType::Usize(n.as_u64().unwrap() as usize)
} else if n.is_f64() {
MpvDataType::Double(n.as_f64().unwrap())
} else {
panic!("unimplemented number");
}
}
Value::Bool(_) => {
for entry in array {
if let Value::Bool(ref b) = *entry {
output.push(MpvDataType::Bool(*b));
}
}
}
Value::Number(_) => {
for entry in array {
if let Value::Number(ref n) = *entry {
if n.is_u64() {
output.push(MpvDataType::Usize(n.as_u64().unwrap() as usize));
} else if n.is_f64() {
output.push(MpvDataType::Double(n.as_f64().unwrap()));
} else {
panic!("unimplemented number");
}
}
}
}
Value::Object(_) => {
for entry in array {
if let Value::Object(ref map) = *entry {
output.push(MpvDataType::HashMap(json_map_to_hashmap(map)));
}
}
}
Value::String(_) => {
for entry in array {
if let Value::String(ref s) = *entry {
output.push(MpvDataType::String(s.to_string()));
}
}
}
Value::Object(o) => MpvDataType::HashMap(json_map_to_hashmap(&o)),
Value::String(s) => MpvDataType::String(s.to_owned()),
Value::Null => {
unimplemented!();
}
}
}
output
})
.collect()
}
pub(crate) fn json_array_to_playlist(array: &[Value]) -> Vec<PlaylistEntry> {
@ -217,3 +183,137 @@ pub(crate) fn json_array_to_playlist(array: &[Value]) -> Vec<PlaylistEntry> {
}
output
}
#[cfg(test)]
mod test {
use super::*;
use crate::MpvDataType;
use serde_json::json;
use std::collections::HashMap;
#[test]
fn test_json_map_to_hashmap() {
let json = json!({
"array": [1, 2, 3],
"bool": true,
"double": 1.0,
"usize": 1,
"string": "string",
"object": {
"key": "value"
}
});
let mut expected = HashMap::new();
expected.insert(
"array".to_string(),
MpvDataType::Array(vec![
MpvDataType::Usize(1),
MpvDataType::Usize(2),
MpvDataType::Usize(3),
]),
);
expected.insert("bool".to_string(), MpvDataType::Bool(true));
expected.insert("double".to_string(), MpvDataType::Double(1.0));
expected.insert("usize".to_string(), MpvDataType::Usize(1));
expected.insert(
"string".to_string(),
MpvDataType::String("string".to_string()),
);
expected.insert(
"object".to_string(),
MpvDataType::HashMap(HashMap::from([(
"key".to_string(),
MpvDataType::String("value".to_string()),
)])),
);
assert_eq!(json_map_to_hashmap(&json.as_object().unwrap()), expected);
}
#[test]
#[should_panic]
fn test_json_map_to_hashmap_fail_on_null() {
json_map_to_hashmap(
json!({
"null": null
})
.as_object()
.unwrap(),
);
}
#[test]
fn test_json_array_to_vec() {
let json = json!([
[1, 2, 3],
true,
1.0,
1,
"string",
{
"key": "value"
}
]);
println!("{:?}", json.as_array().unwrap());
println!("{:?}", json_array_to_vec(&json.as_array().unwrap()));
let expected = vec![
MpvDataType::Array(vec![
MpvDataType::Usize(1),
MpvDataType::Usize(2),
MpvDataType::Usize(3),
]),
MpvDataType::Bool(true),
MpvDataType::Double(1.0),
MpvDataType::Usize(1),
MpvDataType::String("string".to_string()),
MpvDataType::HashMap(HashMap::from([(
"key".to_string(),
MpvDataType::String("value".to_string()),
)])),
];
assert_eq!(json_array_to_vec(&json.as_array().unwrap()), expected);
}
#[test]
#[should_panic]
fn test_json_array_to_vec_fail_on_null() {
json_array_to_vec(json!([null]).as_array().unwrap().as_slice());
}
#[test]
fn test_json_array_to_playlist() {
let json = json!([
{
"filename": "file1",
"title": "title1",
"current": true
},
{
"filename": "file2",
"title": "title2",
"current": false
}
]);
let expected = vec![
PlaylistEntry {
id: 0,
filename: "file1".to_string(),
title: "title1".to_string(),
current: true,
},
PlaylistEntry {
id: 1,
filename: "file2".to_string(),
title: "title2".to_string(),
current: false,
},
];
assert_eq!(json_array_to_playlist(&json.as_array().unwrap()), expected);
}
}

61
tests/integration.rs Normal file
View File

@ -0,0 +1,61 @@
use mpvipc::{Error, Mpv, MpvExt};
use std::path::Path;
use tokio::{
process::{Child, Command},
time::{sleep, timeout, Duration},
};
#[cfg(target_family = "unix")]
async fn spawn_headless_mpv() -> Result<(Child, Mpv), Error> {
let socket_path_str = format!("/tmp/mpv-ipc-{}", uuid::Uuid::new_v4());
let socket_path = Path::new(&socket_path_str);
let process_handle = Command::new("mpv")
.arg("--no-config")
.arg("--idle")
.arg("--no-video")
.arg("--no-audio")
.arg(format!(
"--input-ipc-server={}",
&socket_path.to_str().unwrap()
))
.spawn()
.expect("Failed to start mpv");
if timeout(Duration::from_millis(500), async {
while !&socket_path.exists() {
sleep(Duration::from_millis(10)).await;
}
})
.await
.is_err()
{
panic!("Failed to create mpv socket at {:?}", &socket_path);
}
let mpv = Mpv::connect(socket_path.to_str().unwrap()).await.unwrap();
Ok((process_handle, mpv))
}
#[tokio::test]
#[cfg(target_family = "unix")]
async fn test_get_mpv_version() {
let (mut proc, mpv) = spawn_headless_mpv().await.unwrap();
let version: String = mpv.get_property("mpv-version").await.unwrap();
assert!(version.starts_with("mpv"));
mpv.kill().await.unwrap();
proc.kill().await.unwrap();
}
#[tokio::test]
#[cfg(target_family = "unix")]
async fn test_set_property() {
let (mut proc, mpv) = spawn_headless_mpv().await.unwrap();
mpv.set_property("pause", true).await.unwrap();
let paused: bool = mpv.get_property("pause").await.unwrap();
assert_eq!(paused, true);
mpv.kill().await.unwrap();
proc.kill().await.unwrap();
}