35 Commits

Author SHA1 Message Date
  Philipp Oppermann 95d6c27f8f Use atomic to store and update pid 5 hours ago
  Philipp Oppermann 796f51496c Merge branch 'main' into restart-failed-nodes 5 hours ago
  github-actions[bot] b00b4adee5
chore: Update Cargo.lock (#1263) 8 hours ago
  Pratyksh Gupta 3b49a44d1e
feat: implement dora node list command with metrics and filtering (#1202) (#1262) 9 hours ago
  Haixuan Xavier Tao e5a11b1892
Fix serialization of `LogLevelOrStdout` to match deserialization (#1258) 3 days ago
  Pratyksh Gupta 24c2f3a742
fix: update macOS runner from macos-13/macos-14 to macos-15 (#1256) 4 days ago
  Pratyksh Gupta e7f295984e
feat: implement dora inspect top command for real-time node resource monitoring (#1251) 4 days ago
  Philipp Oppermann a6805c1f54
Remove custom `Deserialize` implementation for `LogLevelOrStdout` 4 days ago
  Philipp Oppermann e93604e342
Print message on failed deserialization 4 days ago
  Philipp Oppermann a53024bd6a
Fix serialization of `LogLevelOrStdout` to match deserialization 4 days ago
  Nitish cec84bb0fc
Fix environment activation command in README (#1257) 5 days ago
  github-actions[bot] 158204a8f8
chore: Update Cargo.lock (#1254) 6 days ago
  Philipp Oppermann c77b3f42b5
Update to `macos-14` in `cross-check` CI job (#1255) 6 days ago
  sjfhsjfh 8a7830ca1f
Fix topic statistcs (#1247) 1 week ago
  Philipp Oppermann abc00082ba
chore: Update Cargo.lock (#1246) 1 week ago
  Dora Bot 41d187decd chore: Update Cargo.lock 2 weeks ago
  Haixuan Xavier Tao ddb64e81c2
Python node event mutex (#1244) 2 weeks ago
  Oliver Ortlieb 896516089e chore: rustfmt 2 weeks ago
  Oliver Ortlieb aa9db52b13 chore: use Arc<Mutex> 2 weeks ago
  Oliver Ortlieb 41087a2d5e fix: type errors after merge 2 weeks ago
  Oliver Ortlieb 496f75ef0b
Merge branch 'main' into python-node-event-mutex 2 weeks ago
  Drin e14d802647
Standalone ros2 example (#1236) 2 weeks ago
  Philipp Oppermann 19082e1b9a
Add is empty method to the API (#1242) 2 weeks ago
  haixuantao 32bc10c4f2 Add is empty method 3 weeks ago
  Haixuan Xavier Tao 2143f93b97
Add completions subcommand (#1240) 2 weeks ago
  Haixuan Xavier Tao 7da7887752
add `try_recv` method for python (#1241) 2 weeks ago
  Oliver Ortlieb ec1c8f5dde chore: rustfmt 2 weeks ago
  Oliver Ortlieb 929a625445 fix: add mutex around events 2 weeks ago
  Oliver Ortlieb 8f64fef486 fix: change get_mut and callers to take &self instead of &mut self 2 weeks ago
  Oliver Ortlieb 15b8e1a3c2 chore: add example dataflow 2 weeks ago
  haixuantao 1ea1b482fa Make try_recv not error out 2 weeks ago
  drindr 51ef62dd7c Completions command: autodetect shell and more help info 2 weeks ago
  haixuantao 7c128114e7 Improve documentation 2 weeks ago
  haixuantao bd695c420b add try_recv method for python 3 weeks ago
  drindr d3c4da9c6f Add completions subcommand 3 weeks ago
61 changed files with 2692 additions and 422 deletions
Split View
  1. +9
    -12
      .github/workflows/ci.yml
  2. +1
    -1
      .github/workflows/pip-release.yml
  3. +325
    -219
      Cargo.lock
  4. +1
    -16
      Cargo.toml
  5. +85
    -22
      apis/python/node/src/lib.rs
  6. +1
    -1
      apis/python/operator/src/lib.rs
  7. +6
    -0
      apis/rust/node/src/event_stream/mod.rs
  8. +2
    -0
      binaries/cli/Cargo.toml
  9. +84
    -0
      binaries/cli/src/command/completion.rs
  10. +19
    -0
      binaries/cli/src/command/inspect/mod.rs
  11. +442
    -0
      binaries/cli/src/command/inspect/top.rs
  12. +14
    -0
      binaries/cli/src/command/mod.rs
  13. +186
    -0
      binaries/cli/src/command/node/list.rs
  14. +19
    -0
      binaries/cli/src/command/node/mod.rs
  15. +308
    -53
      binaries/cli/src/command/topic/hz.rs
  16. +60
    -0
      binaries/coordinator/src/lib.rs
  17. +25
    -8
      binaries/coordinator/src/listener.rs
  18. +10
    -1
      binaries/coordinator/src/run/mod.rs
  19. +1
    -0
      binaries/daemon/Cargo.toml
  20. +120
    -2
      binaries/daemon/src/lib.rs
  21. +17
    -7
      binaries/daemon/src/spawn/prepared.rs
  22. +14
    -0
      examples/python-concurrent-rw/dataflow.yml
  23. +11
    -0
      examples/python-concurrent-rw/pyproject.toml
  24. +47
    -0
      examples/python-concurrent-rw/receive_data.py
  25. +4
    -3
      examples/python-drain/receive_data.py
  26. +1
    -1
      examples/python-operator-dataflow/README.md
  27. +0
    -15
      examples/python-ros2-dataflow/run.rs
  28. +0
    -0
      examples/ros2-bridge/c++/turtle/.gitignore
  29. +0
    -0
      examples/ros2-bridge/c++/turtle/README.md
  30. +0
    -0
      examples/ros2-bridge/c++/turtle/dataflow.yml
  31. +5
    -0
      examples/ros2-bridge/c++/turtle/node-rust-api/main.cc
  32. +32
    -5
      examples/ros2-bridge/c++/turtle/run.rs
  33. +2
    -4
      examples/ros2-bridge/python/turtle/README.md
  34. +0
    -0
      examples/ros2-bridge/python/turtle/control_node.py
  35. +0
    -0
      examples/ros2-bridge/python/turtle/dataflow.yml
  36. +0
    -0
      examples/ros2-bridge/python/turtle/random_turtle.py
  37. +41
    -0
      examples/ros2-bridge/python/turtle/run.rs
  38. +23
    -2
      examples/ros2-bridge/rust/rust-ros2-example-node/Cargo.toml
  39. +125
    -0
      examples/ros2-bridge/rust/rust-ros2-example-node/src/service_client.rs
  40. +106
    -0
      examples/ros2-bridge/rust/rust-ros2-example-node/src/service_server.rs
  41. +104
    -0
      examples/ros2-bridge/rust/rust-ros2-example-node/src/topic_pub.rs
  42. +91
    -0
      examples/ros2-bridge/rust/rust-ros2-example-node/src/topic_sub.rs
  43. +5
    -2
      examples/ros2-bridge/rust/rust-ros2-example-node/src/turtle.rs
  44. +7
    -0
      examples/ros2-bridge/rust/service-client/dataflow.yml
  45. +39
    -0
      examples/ros2-bridge/rust/service-client/run.rs
  46. +7
    -0
      examples/ros2-bridge/rust/service-server/dataflow.yml
  47. +63
    -0
      examples/ros2-bridge/rust/service-server/run.rs
  48. +7
    -0
      examples/ros2-bridge/rust/topic-pub/dataflow.yml
  49. +39
    -0
      examples/ros2-bridge/rust/topic-pub/run.rs
  50. +7
    -0
      examples/ros2-bridge/rust/topic-sub/dataflow.yml
  51. +39
    -0
      examples/ros2-bridge/rust/topic-sub/run.rs
  52. +0
    -0
      examples/ros2-bridge/rust/turtle/README.md
  53. +9
    -0
      examples/ros2-bridge/rust/turtle/dataflow.yml
  54. +41
    -0
      examples/ros2-bridge/rust/turtle/run.rs
  55. +0
    -9
      examples/rust-ros2-dataflow/dataflow.yml
  56. +0
    -15
      examples/rust-ros2-dataflow/run.rs
  57. +38
    -0
      libraries/extensions/ros2-bridge/Cargo.toml
  58. +1
    -0
      libraries/message/src/cli_to_coordinator.rs
  59. +5
    -24
      libraries/message/src/common.rs
  60. +25
    -0
      libraries/message/src/coordinator_to_cli.rs
  61. +19
    -0
      libraries/message/src/daemon_to_coordinator.rs

+ 9
- 12
.github/workflows/ci.yml View File

@@ -223,9 +223,8 @@ jobs:
env:
QT_QPA_PLATFORM: offscreen
run: |
source /opt/ros/humble/setup.bash && ros2 run turtlesim turtlesim_node &
source /opt/ros/humble/setup.bash && ros2 run examples_rclcpp_minimal_service service_main &
cargo run --example rust-ros2-dataflow --features="ros2-examples"
source /opt/ros/humble/setup.bash &&
cargo run -p dora-ros2-bridge --example rust-ros2-dataflow --features="ros2-examples"
- uses: actions/setup-python@v5
if: runner.os != 'Windows'
with:
@@ -239,9 +238,8 @@ jobs:
env:
QT_QPA_PLATFORM: offscreen
run: |
# Reset only the turtlesim instance as it is not destroyed at the end of the previous job
source /opt/ros/humble/setup.bash && ros2 service call /reset std_srvs/srv/Empty &
uv venv --seed -p 3.12
source /opt/ros/humble/setup.bash &&
uv venv --seed -p 3.12
## If not Windows
if [ "$RUNNER_OS" != "Windows" ]; then
source .venv/bin/activate
@@ -250,15 +248,14 @@ jobs:
fi
uv pip install -e apis/python/node
uv pip install pyarrow
cargo run --example python-ros2-dataflow --features="ros2-examples"
cargo run -p dora-ros2-bridge --example python-ros2-dataflow --features="ros2-examples"
- name: "c++-ros2-dataflow"
timeout-minutes: 30
env:
QT_QPA_PLATFORM: offscreen
run: |
# Reset only the turtlesim instance as it is not destroyed at the end of the previous job
source /opt/ros/humble/setup.bash && ros2 service call /reset std_srvs/srv/Empty &
cargo run --example cxx-ros2-dataflow --features="ros2-examples"
source /opt/ros/humble/setup.bash &&
cargo run -p dora-ros2-bridge --example cxx-ros2-dataflow --features="ros2-examples"

bench:
name: "Bench"
@@ -567,9 +564,9 @@ jobs:
target: armv7-unknown-linux-musleabihf
- runner: ubuntu-22.04
target: x86_64-pc-windows-gnu
- runner: macos-13
- runner: macos-15
target: aarch64-apple-darwin
- runner: macos-13
- runner: macos-15
target: x86_64-apple-darwin
fail-fast: false
steps:


+ 1
- 1
.github/workflows/pip-release.yml View File

@@ -203,7 +203,7 @@ jobs:
fail-fast: false
matrix:
platform:
- runner: macos-13
- runner: macos-15
target: aarch64
repository:
- path: apis/python/node


+ 325
- 219
Cargo.lock View File

@@ -476,9 +476,9 @@ checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"

[[package]]
name = "base64ct"
version = "1.8.0"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba"
checksum = "0e050f626429857a27ddccb31e0aca21356bfa709c04041aefddac081a8f068a"

[[package]]
name = "benchmark-example-node"
@@ -596,9 +596,9 @@ dependencies = [

[[package]]
name = "cc"
version = "1.2.47"
version = "1.2.49"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd405d82c84ff7f35739f175f67d8b9fb7687a0e84ccdc78bd3568839827cf07"
checksum = "90583009037521a116abf44494efecd645ba48b6622457080f080b85544e2215"
dependencies = [
"find-msvc-tools",
"jobserver",
@@ -711,6 +711,15 @@ dependencies = [
"strsim",
]

[[package]]
name = "clap_complete"
version = "4.5.61"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39615915e2ece2550c0149addac32fb5bd312c657f43845bb9088cb9c8a7c992"
dependencies = [
"clap",
]

[[package]]
name = "clap_derive"
version = "4.5.49"
@@ -869,9 +878,9 @@ dependencies = [

[[package]]
name = "convert_case"
version = "0.7.1"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb402b8d4c85569410425650ce3eddc7d698ed96d39a73f941b08fb63082f1e7"
checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9"
dependencies = [
"unicode-segmentation",
]
@@ -1020,7 +1029,7 @@ checksum = "829d955a0bb380ef178a640b91779e3987da38c9aea133b20614cfed8cdea9c6"
dependencies = [
"bitflags 2.10.0",
"crossterm_winapi",
"mio 1.1.0",
"mio 1.1.1",
"parking_lot",
"rustix 0.38.44",
"signal-hook",
@@ -1038,7 +1047,7 @@ dependencies = [
"crossterm_winapi",
"derive_more",
"document-features",
"mio 1.1.0",
"mio 1.1.1",
"parking_lot",
"rustix 1.1.2",
"signal-hook",
@@ -1132,9 +1141,9 @@ dependencies = [

[[package]]
name = "cxx"
version = "1.0.189"
version = "1.0.192"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b788601e7e3e6944d9b37efbae0bee7ee44d9aab533838d4854f631534a1a49"
checksum = "bbda285ba6e5866529faf76352bdf73801d9b44a6308d7cd58ca2379f378e994"
dependencies = [
"cc",
"cxx-build",
@@ -1147,9 +1156,9 @@ dependencies = [

[[package]]
name = "cxx-build"
version = "1.0.189"
version = "1.0.192"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e11d62eb0de451f6d3aa83f2cec0986af61c23bd7515f1e2d6572c6c9e53c96"
checksum = "af9efde466c5d532d57efd92f861da3bdb7f61e369128ce8b4c3fe0c9de4fa4d"
dependencies = [
"cc",
"codespan-reporting",
@@ -1162,9 +1171,9 @@ dependencies = [

[[package]]
name = "cxxbridge-cmd"
version = "1.0.189"
version = "1.0.192"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a368ed4a0fd83ebd3f2808613842d942a409c41cc24cd9d83f1696a00d78afe"
checksum = "3efb93799095bccd4f763ca07997dc39a69e5e61ab52d2c407d4988d21ce144d"
dependencies = [
"clap",
"codespan-reporting",
@@ -1176,15 +1185,15 @@ dependencies = [

[[package]]
name = "cxxbridge-flags"
version = "1.0.189"
version = "1.0.192"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9571a7c69f236d7202f517553241496125ed56a86baa1ce346d02aa72357c74"
checksum = "3092010228026e143b32a4463ed9fa8f86dca266af4bf5f3b2a26e113dbe4e45"

[[package]]
name = "cxxbridge-macro"
version = "1.0.189"
version = "1.0.192"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eba2aaae28ca1d721d3f364bb29d51811921e7194c08bb9eaf745c8ab8d81309"
checksum = "31d72ebfcd351ae404fb00ff378dfc9571827a00722c9e735c9181aec320ba0a"
dependencies = [
"indexmap 2.12.1",
"proc-macro2",
@@ -1314,24 +1323,56 @@ dependencies = [
"syn 2.0.111",
]

[[package]]
name = "derive_builder"
version = "0.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "507dfb09ea8b7fa618fcf76e953f4f5e192547945816d5358edffe39f6f94947"
dependencies = [
"derive_builder_macro",
]

[[package]]
name = "derive_builder_core"
version = "0.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8"
dependencies = [
"darling 0.20.11",
"proc-macro2",
"quote",
"syn 2.0.111",
]

[[package]]
name = "derive_builder_macro"
version = "0.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c"
dependencies = [
"derive_builder_core",
"syn 2.0.111",
]

[[package]]
name = "derive_more"
version = "2.0.1"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678"
checksum = "10b768e943bed7bf2cab53df09f4bc34bfd217cdb57d971e769874c9a6710618"
dependencies = [
"derive_more-impl",
]

[[package]]
name = "derive_more-impl"
version = "2.0.1"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3"
checksum = "6d286bfdaf75e988b4a78e013ecd79c581e06399ab53fbacd2d916c2f904f30b"
dependencies = [
"convert_case",
"proc-macro2",
"quote",
"rustc_version",
"syn 2.0.111",
]

@@ -1451,6 +1492,7 @@ dependencies = [
"arrow-json",
"chrono",
"clap",
"clap_complete",
"colored 2.2.0",
"communication-layer-request-reply",
"crossterm 0.29.0",
@@ -1482,6 +1524,7 @@ dependencies = [
"serde",
"serde_json",
"serde_yaml 0.9.34+deprecated",
"sysinfo 0.36.1",
"tabwriter",
"termcolor",
"tokio",
@@ -1571,6 +1614,7 @@ dependencies = [
"serde_yaml 0.9.34+deprecated",
"shared-memory-server",
"shellexpand 3.1.1",
"sysinfo 0.36.1",
"tokio",
"tokio-stream",
"tracing",
@@ -1794,12 +1838,15 @@ name = "dora-ros2-bridge"
version = "0.3.13"
dependencies = [
"array-init",
"dora-cli",
"dora-daemon",
"dora-ros2-bridge-msg-gen",
"dunce",
"eyre",
"flume 0.11.1",
"futures",
"futures-timer",
"process-wrap",
"rand 0.8.5",
"ros2-client",
"rust-format",
@@ -2423,6 +2470,18 @@ dependencies = [
"wasm-bindgen",
]

[[package]]
name = "getset"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9cf0fc11e47561d47397154977bc219f4cf809b2974facc3ccb3b89e2436f912"
dependencies = [
"proc-macro-error2",
"proc-macro2",
"quote",
"syn 2.0.111",
]

[[package]]
name = "git-version"
version = "0.3.9"
@@ -2659,7 +2718,7 @@ dependencies = [
"tokio",
"tokio-rustls",
"tower-service",
"webpki-roots 1.0.4",
"webpki-roots",
]

[[package]]
@@ -2677,9 +2736,9 @@ dependencies = [

[[package]]
name = "hyper-util"
version = "0.1.18"
version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52e9a2a24dc5c6821e71a7030e1e14b7b632acac55c40e9d2e082c621261bb56"
checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f"
dependencies = [
"base64",
"bytes",
@@ -2771,9 +2830,9 @@ checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a"

[[package]]
name = "icu_properties"
version = "2.1.1"
version = "2.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e93fcd3157766c0c8da2f8cff6ce651a31f0810eaa1c51ec363ef790bbb5fb99"
checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec"
dependencies = [
"icu_collections",
"icu_locale_core",
@@ -2785,9 +2844,9 @@ dependencies = [

[[package]]
name = "icu_properties_data"
version = "2.1.1"
version = "2.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02845b3647bb045f1100ecd6480ff52f34c35f82d9880e029d329c21d1054899"
checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af"

[[package]]
name = "icu_provider"
@@ -2955,9 +3014,9 @@ dependencies = [

[[package]]
name = "instability"
version = "0.3.9"
version = "0.3.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "435d80800b936787d62688c927b6490e887c7ef5ff9ce922c6c6050fca75eb9a"
checksum = "6778b0196eefee7df739db78758e5cf9b37412268bfa5650bfeed028aed20d9c"
dependencies = [
"darling 0.20.11",
"indoc",
@@ -3130,9 +3189,9 @@ dependencies = [

[[package]]
name = "js-sys"
version = "0.3.82"
version = "0.3.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b011eec8cc36da2aab2d5cff675ec18454fad408585853910a202391cf9f8e65"
checksum = "464a3709c7f55f1f721e5389aa6ea4e3bc6aba669353300af094b29ffbdde1d8"
dependencies = [
"once_cell",
"wasm-bindgen",
@@ -3271,9 +3330,9 @@ dependencies = [

[[package]]
name = "libc"
version = "0.2.177"
version = "0.2.178"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976"
checksum = "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091"

[[package]]
name = "libgit2-sys"
@@ -3393,14 +3452,14 @@ checksum = "11d3d7f243d5c5a8b9bb5d6dd2b1602c0cb0b9db1621bafc7ed66e35ff9fe092"

[[package]]
name = "local-ip-address"
version = "0.6.5"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "656b3b27f8893f7bbf9485148ff9a65f019e3f33bd5cdc87c83cab16b3fd9ec8"
checksum = "0a60bf300a990b2d1ebdde4228e873e8e4da40d834adbf5265f3da1457ede652"
dependencies = [
"libc",
"neli",
"thiserror 2.0.17",
"windows-sys 0.59.0",
"windows-sys 0.61.2",
]

[[package]]
@@ -3414,11 +3473,11 @@ dependencies = [

[[package]]
name = "log"
version = "0.4.28"
version = "0.4.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432"
checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
dependencies = [
"serde",
"serde_core",
]

[[package]]
@@ -3571,9 +3630,9 @@ dependencies = [

[[package]]
name = "mio"
version = "1.1.0"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69d83b0086dc8ecf3ce9ae2874b2d1290252e2a30720bea58a5c6639b0092873"
checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc"
dependencies = [
"libc",
"log",
@@ -3648,27 +3707,31 @@ checksum = "27b02d87554356db9e9a873add8782d4ea6e3e58ea071a9adb9a2e8ddb884a8b"

[[package]]
name = "neli"
version = "0.6.5"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93062a0dce6da2517ea35f301dfc88184ce18d3601ec786a727a87bf535deca9"
checksum = "e23bebbf3e157c402c4d5ee113233e5e0610cc27453b2f07eefce649c7365dcc"
dependencies = [
"bitflags 2.10.0",
"byteorder",
"derive_builder",
"getset",
"libc",
"log",
"neli-proc-macros",
"parking_lot",
]

[[package]]
name = "neli-proc-macros"
version = "0.1.4"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c8034b7fbb6f9455b2a96c19e6edf8dc9fc34c70449938d8ee3b4df363f61fe"
checksum = "05d8d08c6e98f20a62417478ebf7be8e1425ec9acecc6f63e22da633f6b71609"
dependencies = [
"either",
"proc-macro2",
"quote",
"serde",
"syn 1.0.109",
"syn 2.0.111",
]

[[package]]
@@ -3974,6 +4037,16 @@ version = "4.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef25abbcd74fb2609453eb695bd2f860d389e457f67dc17cafc8b8cbc89d0c33"

[[package]]
name = "objc2-io-kit"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33fafba39597d6dc1fb709123dfa8289d39406734be322956a69f0931c73bb15"
dependencies = [
"libc",
"objc2-core-foundation",
]

[[package]]
name = "oid-registry"
version = "0.8.1"
@@ -4127,7 +4200,7 @@ checksum = "3ff095ac36df870a11380877fb7e9b1e7529abfe994fd06a6d6d17ca1c77d30b"
dependencies = [
"eyre",
"opentelemetry 0.29.1",
"sysinfo",
"sysinfo 0.34.2",
"tokio",
"tracing",
]
@@ -4509,6 +4582,28 @@ dependencies = [
"toml_edit",
]

[[package]]
name = "proc-macro-error-attr2"
version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5"
dependencies = [
"proc-macro2",
"quote",
]

[[package]]
name = "proc-macro-error2"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802"
dependencies = [
"proc-macro-error-attr2",
"proc-macro2",
"quote",
"syn 2.0.111",
]

[[package]]
name = "proc-macro2"
version = "1.0.103"
@@ -4956,9 +5051,9 @@ checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58"

[[package]]
name = "reqwest"
version = "0.12.24"
version = "0.12.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f"
checksum = "3b4c14b2d9afca6a60277086b0cc6a6ae0b568f6f7916c943a8cdc79f8be240f"
dependencies = [
"base64",
"bytes",
@@ -4992,7 +5087,7 @@ dependencies = [
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
"webpki-roots 1.0.4",
"webpki-roots",
]

[[package]]
@@ -5021,14 +5116,15 @@ dependencies = [

[[package]]
name = "ron"
version = "0.11.0"
version = "0.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db09040cc89e461f1a265139777a2bde7f8d8c67c4936f700c63ce3e2904d468"
checksum = "fd490c5b18261893f14449cbd28cb9c0b637aebf161cd77900bfdedaff21ec32"
dependencies = [
"base64",
"bitflags 2.10.0",
"once_cell",
"serde",
"serde_derive",
"typeid",
"unicode-ident",
]

@@ -5129,7 +5225,7 @@ dependencies = [
]

[[package]]
name = "rust-ros2-dataflow-example-node"
name = "rust-ros2-example-node"
version = "0.3.13"
dependencies = [
"dora-node-api",
@@ -5247,7 +5343,7 @@ dependencies = [
"once_cell",
"ring",
"rustls-pki-types",
"rustls-webpki 0.103.8",
"rustls-webpki",
"subtle",
"zeroize",
]
@@ -5275,9 +5371,9 @@ dependencies = [

[[package]]
name = "rustls-pki-types"
version = "1.13.0"
version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94182ad936a0c91c324cd46c6511b9510ed16af436d7b5bab34beab0afd55f7a"
checksum = "708c0f9d5f54ba0272468c1d306a52c495b31fa155e91bc25371e6df7996908c"
dependencies = [
"web-time",
"zeroize",
@@ -5297,7 +5393,7 @@ dependencies = [
"rustls",
"rustls-native-certs",
"rustls-platform-verifier-android",
"rustls-webpki 0.103.8",
"rustls-webpki",
"security-framework",
"security-framework-sys",
"webpki-root-certs",
@@ -5310,17 +5406,6 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f"

[[package]]
name = "rustls-webpki"
version = "0.102.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9"
dependencies = [
"ring",
"rustls-pki-types",
"untrusted",
]

[[package]]
name = "rustls-webpki"
version = "0.103.8"
@@ -5357,7 +5442,7 @@ dependencies = [
"paste",
"safer_ffi-proc_macros",
"scopeguard",
"stabby",
"stabby 36.2.2",
"uninit",
"unwind_safe",
"with_builtin_macros",
@@ -5394,19 +5479,6 @@ dependencies = [
"windows-sys 0.61.2",
]

[[package]]
name = "schemars"
version = "0.8.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615"
dependencies = [
"dyn-clone",
"either",
"schemars_derive 0.8.22",
"serde",
"serde_json",
]

[[package]]
name = "schemars"
version = "0.9.0"
@@ -5426,24 +5498,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9558e172d4e8533736ba97870c4b2cd63f84b382a3d6eb063da41b91cce17289"
dependencies = [
"dyn-clone",
"either",
"ref-cast",
"schemars_derive 1.1.0",
"schemars_derive",
"serde",
"serde_json",
]

[[package]]
name = "schemars_derive"
version = "0.8.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32e265784ad618884abaea0600a9adf15393368d840e0222d101a072f3f7534d"
dependencies = [
"proc-macro2",
"quote",
"serde_derive_internals",
"syn 2.0.111",
]

[[package]]
name = "schemars_derive"
version = "1.1.0"
@@ -5660,9 +5721,9 @@ dependencies = [

[[package]]
name = "serde_with"
version = "3.16.0"
version = "3.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10574371d41b0d9b2cff89418eda27da52bcaff2cc8741db26382a77c29131f1"
checksum = "4fa237f2807440d238e0364a218270b98f767a00d3dada77b1c53ae88940e2e7"
dependencies = [
"base64",
"chrono",
@@ -5679,9 +5740,9 @@ dependencies = [

[[package]]
name = "serde_with_macros"
version = "3.16.0"
version = "3.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08a72d8216842fdd57820dc78d840bef99248e35fb2554ff923319e60f2d686b"
checksum = "52a8e3ca0ca629121f70ab50f95249e5a6f925cc0f6ffe8256c45b728875706c"
dependencies = [
"darling 0.21.3",
"proc-macro2",
@@ -5828,7 +5889,7 @@ checksum = "b75a19a7a740b25bc7944bdee6172368f988763b744e3d4dfe753f6b4ece40cc"
dependencies = [
"libc",
"mio 0.8.11",
"mio 1.1.0",
"mio 1.1.1",
"signal-hook",
]

@@ -5853,9 +5914,9 @@ dependencies = [

[[package]]
name = "simd-adler32"
version = "0.3.7"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe"
checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2"

[[package]]
name = "simdutf8"
@@ -5973,7 +6034,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89b7e94eaf470c2e76b5f15fb2fb49714471a36cc512df5ee231e62e82ec79f8"
dependencies = [
"rustversion",
"stabby-abi",
"stabby-abi 36.2.2",
]

[[package]]
name = "stabby"
version = "72.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "976399a0c48ea769ef7f5dc303bb88240ab8d84008647a6b2303eced3dab3945"
dependencies = [
"rustversion",
"stabby-abi 72.1.1",
]

[[package]]
@@ -5985,7 +6056,19 @@ dependencies = [
"rustc_version",
"rustversion",
"sha2-const-stable",
"stabby-macros",
"stabby-macros 36.2.2",
]

[[package]]
name = "stabby-abi"
version = "72.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f7b54832a9a1f92a0e55e74a5c0332744426edc515bb3fbad82f10b874a87f0d"
dependencies = [
"rustc_version",
"rustversion",
"sha2-const-stable",
"stabby-macros 72.1.1",
]

[[package]]
@@ -6001,6 +6084,19 @@ dependencies = [
"syn 1.0.109",
]

[[package]]
name = "stabby-macros"
version = "72.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a768b1e51e4dbfa4fa52ae5c01241c0a41e2938fdffbb84add0c8238092f9091"
dependencies = [
"proc-macro-crate",
"proc-macro2",
"quote",
"rand 0.8.5",
"syn 1.0.109",
]

[[package]]
name = "stable_deref_trait"
version = "1.2.1"
@@ -6102,6 +6198,20 @@ dependencies = [
"windows 0.57.0",
]

[[package]]
name = "sysinfo"
version = "0.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "252800745060e7b9ffb7b2badbd8b31cfa4aa2e61af879d0a3bf2a317c20217d"
dependencies = [
"libc",
"memchr",
"ntapi",
"objc2-core-foundation",
"objc2-io-kit",
"windows 0.61.3",
]

[[package]]
name = "tabwriter"
version = "1.4.1"
@@ -6318,7 +6428,7 @@ checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408"
dependencies = [
"bytes",
"libc",
"mio 1.1.0",
"mio 1.1.1",
"parking_lot",
"pin-project-lite",
"signal-hook-registry",
@@ -6396,9 +6506,9 @@ dependencies = [

[[package]]
name = "toml_edit"
version = "0.23.7"
version = "0.23.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6485ef6d0d9b5d0ec17244ff7eb05310113c3f316f2d14200d4de56b3cb98f8d"
checksum = "5d7cbc3b4b49633d57a0509303158ca50de80ae32c265093b24c414705807832"
dependencies = [
"indexmap 2.12.1",
"toml_datetime",
@@ -6478,9 +6588,9 @@ dependencies = [

[[package]]
name = "tower-http"
version = "0.6.7"
version = "0.6.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9cf146f99d442e8e68e585f5d798ccd3cad9a7835b917e09728880a862706456"
checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8"
dependencies = [
"bitflags 2.10.0",
"bytes",
@@ -6508,9 +6618,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"

[[package]]
name = "tracing"
version = "0.1.41"
version = "0.1.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
checksum = "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647"
dependencies = [
"log",
"pin-project-lite",
@@ -6520,9 +6630,9 @@ dependencies = [

[[package]]
name = "tracing-attributes"
version = "0.1.30"
version = "0.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903"
checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da"
dependencies = [
"proc-macro2",
"quote",
@@ -6531,9 +6641,9 @@ dependencies = [

[[package]]
name = "tracing-core"
version = "0.1.34"
version = "0.1.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
checksum = "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c"
dependencies = [
"once_cell",
"valuable",
@@ -6580,9 +6690,9 @@ dependencies = [

[[package]]
name = "tracing-subscriber"
version = "0.3.20"
version = "0.3.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5"
checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e"
dependencies = [
"matchers",
"nu-ansi-term",
@@ -6629,6 +6739,12 @@ version = "2.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ea3136b675547379c4bd395ca6b938e5ad3c3d20fad76e7fe85f9e0d011419c"

[[package]]
name = "typeid"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c"

[[package]]
name = "typenum"
version = "1.19.0"
@@ -6746,13 +6862,13 @@ checksum = "0976c77def3f1f75c4ef892a292c31c0bbe9e3d0702c63044d7c76db298171a3"

[[package]]
name = "unzip-n"
version = "0.1.2"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2e7e85a0596447f0f2ac090e16bc4c516c6fe91771fb0c0ccf7fa3dae896b9c"
checksum = "3b5bb2756c16fb66f80cfbf5fb0e0c09a7001e739f453c9ec241b9c8b1556fda"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
"syn 2.0.111",
]

[[package]]
@@ -6793,23 +6909,23 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"

[[package]]
name = "uuid"
version = "1.18.1"
version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2"
checksum = "e2e054861b4bd027cd373e18e8d8d8e6548085000e41290d95ce0c373a654b4a"
dependencies = [
"getrandom 0.3.4",
"js-sys",
"rand 0.9.2",
"serde",
"serde_core",
"uuid-macro-internal",
"wasm-bindgen",
]

[[package]]
name = "uuid-macro-internal"
version = "1.18.1"
version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9384a660318abfbd7f8932c34d67e4d1ec511095f95972ddc01e19d7ba8413f"
checksum = "39d11901c36b3650df7acb0f9ebe624f35b5ac4e1922ecd3c57f444648429594"
dependencies = [
"proc-macro2",
"quote",
@@ -6900,9 +7016,9 @@ dependencies = [

[[package]]
name = "wasm-bindgen"
version = "0.2.105"
version = "0.2.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da95793dfc411fbbd93f5be7715b0578ec61fe87cb1a42b12eb625caa5c5ea60"
checksum = "0d759f433fa64a2d763d1340820e46e111a7a5ab75f993d1852d70b03dbb80fd"
dependencies = [
"cfg-if 1.0.4",
"once_cell",
@@ -6913,9 +7029,9 @@ dependencies = [

[[package]]
name = "wasm-bindgen-futures"
version = "0.4.55"
version = "0.4.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "551f88106c6d5e7ccc7cd9a16f312dd3b5d36ea8b4954304657d5dfba115d4a0"
checksum = "836d9622d604feee9e5de25ac10e3ea5f2d65b41eac0d9ce72eb5deae707ce7c"
dependencies = [
"cfg-if 1.0.4",
"js-sys",
@@ -6926,9 +7042,9 @@ dependencies = [

[[package]]
name = "wasm-bindgen-macro"
version = "0.2.105"
version = "0.2.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04264334509e04a7bf8690f2384ef5265f05143a4bff3889ab7a3269adab59c2"
checksum = "48cb0d2638f8baedbc542ed444afc0644a29166f1595371af4fecf8ce1e7eeb3"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
@@ -6936,9 +7052,9 @@ dependencies = [

[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.105"
version = "0.2.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "420bc339d9f322e562942d52e115d57e950d12d88983a14c79b86859ee6c7ebc"
checksum = "cefb59d5cd5f92d9dcf80e4683949f15ca4b511f4ac0a6e14d4e1ac60c6ecd40"
dependencies = [
"bumpalo",
"proc-macro2",
@@ -6949,18 +7065,18 @@ dependencies = [

[[package]]
name = "wasm-bindgen-shared"
version = "0.2.105"
version = "0.2.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76f218a38c84bcb33c25ec7059b07847d465ce0e0a76b995e134a45adcb6af76"
checksum = "cbc538057e648b67f72a982e708d485b2efa771e1ac05fec311f9f63e5800db4"
dependencies = [
"unicode-ident",
]

[[package]]
name = "web-sys"
version = "0.3.82"
version = "0.3.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a1f95c0d03a47f4ae1f7a64643a6bb97465d9b740f0fa8f90ea33915c99a9a1"
checksum = "9b32828d774c412041098d182a8b38b16ea816958e07cf40eec2bc080ae137ac"
dependencies = [
"js-sys",
"wasm-bindgen",
@@ -7002,15 +7118,6 @@ dependencies = [
"rustls-pki-types",
]

[[package]]
name = "webpki-roots"
version = "0.26.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9"
dependencies = [
"webpki-roots 1.0.4",
]

[[package]]
name = "webpki-roots"
version = "1.0.4"
@@ -7634,9 +7741,9 @@ checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650"

[[package]]
name = "winnow"
version = "0.7.13"
version = "0.7.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf"
checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829"
dependencies = [
"memchr",
]
@@ -7750,15 +7857,16 @@ dependencies = [

[[package]]
name = "zenoh"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5df040795169d4b6e4fc8a5f9dce81b5210d1d606399634ec380deda30c347f"
checksum = "4e2df46d36ba8d173f18e9cce5011117fed81a90a22bc3cfe8bbcfaf66b54114"
dependencies = [
"ahash",
"arc-swap",
"async-trait",
"bytes",
"const_format",
"flate2",
"flume 0.11.1",
"futures",
"git-version",
@@ -7767,7 +7875,6 @@ dependencies = [
"lazy_static",
"nonempty-collections",
"once_cell",
"paste",
"petgraph",
"phf",
"rand 0.8.5",
@@ -7802,18 +7909,18 @@ dependencies = [

[[package]]
name = "zenoh-buffers"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "510921897b03793f399b7eaec176355a12293afa1e791c3f844ef12fa08aecc4"
checksum = "82d610eaa2099c16844b092c2b16605d115f354e332eb1d37e9af46e5430ecac"
dependencies = [
"zenoh-collections",
]

[[package]]
name = "zenoh-codec"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "585a77eff7a781726aeeafab52ad175be0dcfc92376a756c23a51f6701a73ba0"
checksum = "7f1cff28c30bca3d07e7b84f71f1c9b81d505449f6fec36a023cc8b601c01150"
dependencies = [
"tracing",
"uhlc 0.8.2",
@@ -7823,18 +7930,18 @@ dependencies = [

[[package]]
name = "zenoh-collections"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa17a7ecf321aba18eabf99a37390b7a6524e826b6f8cb38f9361479ece28887"
checksum = "a72cac7d4bbe99e41ef46c175e0dd2933e883b17cef3e60151e4461b5a088ad3"
dependencies = [
"ahash",
]

[[package]]
name = "zenoh-config"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c02801ef8fd8d0b10437055dac10fd961fdc3a6356439dd52ee627e7f31b9295"
checksum = "df65e4ab337079d897aaaa6d01474b4f8683daaa05a8671b18db34e71a872af7"
dependencies = [
"json5",
"nonempty-collections",
@@ -7857,9 +7964,9 @@ dependencies = [

[[package]]
name = "zenoh-core"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "325645d09d0f74f48051b51c2ab73e270c16d9d7d408c41eb47ed8f067451cd9"
checksum = "869deb22305e8f7a6aa324142ab6717f0cc979be8e4a2de646e8f31313224a1c"
dependencies = [
"lazy_static",
"tokio",
@@ -7869,9 +7976,9 @@ dependencies = [

[[package]]
name = "zenoh-crypto"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5244d615594f1c2149cba6177f15e76cc825e2839230e26693f2b723367b6c2f"
checksum = "7f5b09559108c9e11128730d5a6f551dbcc226adfd82423439ad7eeed024c736"
dependencies = [
"aes",
"hmac",
@@ -7883,15 +7990,15 @@ dependencies = [

[[package]]
name = "zenoh-keyexpr"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c9495c0ca85331f87442b465760a7fb7334370354742a4201b719cc2533afee7"
checksum = "b8b754ea6460c4c3c5dd67e6a8c62bfc076c16f8b794898fb3ea6f0039a5939e"
dependencies = [
"getrandom 0.2.16",
"hashbrown 0.16.1",
"keyed-set",
"rand 0.8.5",
"schemars 0.8.22",
"schemars 1.1.0",
"serde",
"token-cell",
"zenoh-result",
@@ -7899,9 +8006,9 @@ dependencies = [

[[package]]
name = "zenoh-link"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "001a7f07522b06ae6364ae787ab83eddf4e43f56f7f138fe3e54ea123ee3523b"
checksum = "0d0c04d554b67600967cb520061ef9f635caf7704978357334e66cd7179a204a"
dependencies = [
"zenoh-config",
"zenoh-link-commons",
@@ -7918,9 +8025,9 @@ dependencies = [

[[package]]
name = "zenoh-link-commons"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5be05b499b789afb2024de4253e12ccaa3e296f66a0db6147deb9acec491f622"
checksum = "52d1c652a706da1da46eae54a65c4b658153cdebfa213f061beea3bef71088f2"
dependencies = [
"async-trait",
"base64",
@@ -7930,7 +8037,7 @@ dependencies = [
"rustls",
"rustls-pemfile",
"rustls-pki-types",
"rustls-webpki 0.102.8",
"rustls-webpki",
"secrecy",
"serde",
"socket2 0.5.10",
@@ -7938,7 +8045,7 @@ dependencies = [
"tokio",
"tokio-util",
"tracing",
"webpki-roots 0.26.11",
"webpki-roots",
"x509-parser",
"zenoh-buffers",
"zenoh-codec",
@@ -7952,22 +8059,22 @@ dependencies = [

[[package]]
name = "zenoh-link-quic"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4168034bb7005d313ba834d105ede7c7f9fac48684f80a140fccb8302e9ca4aa"
checksum = "0069d889388d06e56eb86139021a1af12b9846222847aeabaa05eb92fc2ee108"
dependencies = [
"async-trait",
"base64",
"quinn",
"rustls",
"rustls-pemfile",
"rustls-webpki 0.102.8",
"rustls-webpki",
"secrecy",
"time",
"tokio",
"tokio-util",
"tracing",
"webpki-roots 0.26.11",
"webpki-roots",
"zenoh-config",
"zenoh-core",
"zenoh-link-commons",
@@ -7978,14 +8085,14 @@ dependencies = [

[[package]]
name = "zenoh-link-quic_datagram"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b954594b5283afcbfc65a5090829b7a09701a29c99ad528fc09cdcef607a4e51"
checksum = "dc81e512b51cf9b6ab479ba11be70e7a4ce072c0e985271e09c88e4bbff591b3"
dependencies = [
"async-trait",
"quinn",
"rustls",
"rustls-webpki 0.102.8",
"rustls-webpki",
"time",
"tokio",
"tokio-util",
@@ -7999,9 +8106,9 @@ dependencies = [

[[package]]
name = "zenoh-link-tcp"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b968d3506682350760fb69c07b3c9aea07f351ac39993c7d948bdc5a19d5645"
checksum = "279ff42ab4a65508069596045ab8a3c6fb03f94343c290a9b75f9568c49094e4"
dependencies = [
"async-trait",
"socket2 0.5.10",
@@ -8017,16 +8124,16 @@ dependencies = [

[[package]]
name = "zenoh-link-tls"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a810a4ba44011cc4c75713eadcf1a03fb2e22f0907d62f9b12c04d42ba1d596f"
checksum = "d3172615126b64456beb0c537351eb9c715f20f641e37ae8f84cb209816d205a"
dependencies = [
"async-trait",
"base64",
"rustls",
"rustls-pemfile",
"rustls-pki-types",
"rustls-webpki 0.102.8",
"rustls-webpki",
"secrecy",
"socket2 0.5.10",
"time",
@@ -8035,7 +8142,7 @@ dependencies = [
"tokio-rustls",
"tokio-util",
"tracing",
"webpki-roots 0.26.11",
"webpki-roots",
"x509-parser",
"zenoh-config",
"zenoh-core",
@@ -8047,9 +8154,9 @@ dependencies = [

[[package]]
name = "zenoh-link-udp"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4178b940cc613b4d0db3f06ceb71377199cacb22087f589bc60d1b4df6a2365"
checksum = "11006545a887167d431e8c3f2867ad386e7d678ee8babd3df4a7537b168e47de"
dependencies = [
"async-trait",
"libc",
@@ -8069,9 +8176,9 @@ dependencies = [

[[package]]
name = "zenoh-link-unixsock_stream"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "486de15a9f4ecc6d5839ec0153450bafee202355ea20810e81c65d70045d210b"
checksum = "ab57f1a899f869dea28168d6ab46a820b0d94e025e1db8cf8e686fc08b4f5de3"
dependencies = [
"async-trait",
"nix 0.29.0",
@@ -8088,9 +8195,9 @@ dependencies = [

[[package]]
name = "zenoh-link-ws"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1cb73dab1b8b8406ac288586727590e6830cc35a25f3c31ffb3d88518446f7e9"
checksum = "41c398aaee46ce19ca01c709ea38985da7da6b4ffb803c6b5c07a56e8a31f8b4"
dependencies = [
"async-trait",
"futures-util",
@@ -8109,9 +8216,9 @@ dependencies = [

[[package]]
name = "zenoh-macros"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2bbff17ef687e10ed4f6a956fb78fc97205a4a6242c657638e8e5da726840aca"
checksum = "21df1681ca014a4bd69db91d356bcf699d346f854ff8a6b73123cee75f5073cd"
dependencies = [
"proc-macro2",
"quote",
@@ -8121,14 +8228,14 @@ dependencies = [

[[package]]
name = "zenoh-plugin-trait"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "968571b0b585c5be1f19c553b368994424a869af46a4785bf5a3248194a97172"
checksum = "4651bc51951eb004e71bba71583f9ea60240bdd11351b61dbd6956e906088567"
dependencies = [
"git-version",
"libloading 0.8.9",
"serde",
"stabby",
"stabby 72.1.1",
"tracing",
"zenoh-config",
"zenoh-keyexpr",
@@ -8139,9 +8246,9 @@ dependencies = [

[[package]]
name = "zenoh-protocol"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "879a66ef51831f54f5f824bf9005bf73129bc2485053311ffb7a245f907f721c"
checksum = "ab17d39b24f6fbd0a24e03a5ab96c236d3d5ebb5071978338c9bc3fe1ccff626"
dependencies = [
"const_format",
"rand 0.8.5",
@@ -8154,18 +8261,18 @@ dependencies = [

[[package]]
name = "zenoh-result"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0f564d3324e443d0d48a4656d89bab1d0b263660fb7e17ba149c464e62f94bd"
checksum = "1b7d23a2f69c4b96ae95ac2f80808cce5f2ed9697ba37e769a75428fb6806a57"
dependencies = [
"anyhow",
]

[[package]]
name = "zenoh-runtime"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3a5cef13fd469aeb146518532751dbecd2b0f99ac56b6df667f954f11348dcf"
checksum = "a71f356bb63c1c9d37cb4bc89112e83a9fd6079458d1440232d67ca41a52c687"
dependencies = [
"lazy_static",
"ron",
@@ -8178,9 +8285,9 @@ dependencies = [

[[package]]
name = "zenoh-sync"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04a23123511fb7ce5990a778dcdcf41b5129ad8c5ef8d1e7ff719a538c3270e3"
checksum = "e3b1113111e247d6b1ebd5d406b43e23e78faca2c7525dca22a2a075c64787bd"
dependencies = [
"arc-swap",
"event-listener",
@@ -8193,9 +8300,9 @@ dependencies = [

[[package]]
name = "zenoh-task"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ad997618409140b23a2ef0957f7995afec2667ff3951f580de8a506c7814e41"
checksum = "7a1f690f81704b1e934b9fb7a63ce544af73521ca9d11b5a439cdfc74108dd28"
dependencies = [
"futures",
"tokio",
@@ -8207,16 +8314,15 @@ dependencies = [

[[package]]
name = "zenoh-transport"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2abd4521cd556093161156fc0799a506743304e1510fc7ed83adff99ca601f34"
checksum = "41cdc4dcb4a68161afe25e44871a66a5c3c347d86a77628ab101d859a9518db9"
dependencies = [
"async-trait",
"crossbeam-utils",
"flume 0.11.1",
"lazy_static",
"lz4_flex",
"paste",
"rand 0.8.5",
"ringbuffer-spsc",
"rsa",
@@ -8242,9 +8348,9 @@ dependencies = [

[[package]]
name = "zenoh-util"
version = "1.6.2"
version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42a736159ed0935c9ef258c151067ccce40f95f08d7f9360993e858e268b0a9d"
checksum = "ea9b52e8beceffc0deca01e10ca726c604286a1ac475f95acdd87ee55f18b120"
dependencies = [
"async-trait",
"const_format",
@@ -8255,7 +8361,7 @@ dependencies = [
"libc",
"libloading 0.8.9",
"pnet_datalink",
"schemars 0.8.22",
"schemars 1.1.0",
"serde",
"serde_json",
"shellexpand 3.1.1",
@@ -8269,18 +8375,18 @@ dependencies = [

[[package]]
name = "zerocopy"
version = "0.8.30"
version = "0.8.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ea879c944afe8a2b25fef16bb4ba234f47c694565e97383b36f3a878219065c"
checksum = "fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3"
dependencies = [
"zerocopy-derive",
]

[[package]]
name = "zerocopy-derive"
version = "0.8.30"
version = "0.8.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf955aa904d6040f70dc8e9384444cb1030aed272ba3cb09bbc4ab9e7c1f34f5"
checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a"
dependencies = [
"proc-macro2",
"quote",


+ 1
- 16
Cargo.toml View File

@@ -17,7 +17,7 @@ members = [
"examples/rust-dataflow/status-node",
"examples/rust-dataflow/sink",
"examples/rust-dataflow/sink-dynamic",
"examples/rust-ros2-dataflow/node",
"examples/ros2-bridge/rust/rust-ros2-example-node",
"examples/benchmark/node",
"examples/benchmark/sink",
"examples/multiple-daemons/node",
@@ -132,11 +132,6 @@ path = "examples/rust-dataflow/run.rs"
name = "rust-dataflow-git"
path = "examples/rust-dataflow-git/run.rs"

[[example]]
name = "rust-ros2-dataflow"
path = "examples/rust-ros2-dataflow/run.rs"
required-features = ["ros2-examples"]

# TODO: Fix example #192
[[example]]
name = "rust-dataflow-url"
@@ -154,11 +149,6 @@ path = "examples/c++-arrow-dataflow/run.rs"
name = "python-dataflow"
path = "examples/python-dataflow/run.rs"

[[example]]
name = "python-ros2-dataflow"
path = "examples/python-ros2-dataflow/run.rs"
required-features = ["ros2-examples"]

[[example]]
name = "python-operator-dataflow"
path = "examples/python-operator-dataflow/run.rs"
@@ -175,11 +165,6 @@ path = "examples/multiple-daemons/run.rs"
name = "cmake-dataflow"
path = "examples/cmake-dataflow/run.rs"

[[example]]
name = "cxx-ros2-dataflow"
path = "examples/c++-ros2-dataflow/run.rs"
required-features = ["ros2-examples"]

# The profile that 'dist' will build with
[profile.dist]
inherits = "release"


+ 85
- 22
apis/python/node/src/lib.rs View File

@@ -4,16 +4,18 @@ use std::env::current_dir;
use std::path::PathBuf;
use std::sync::Arc;
use std::time::Duration;
use tokio::sync::Mutex;

use arrow::pyarrow::{FromPyArrow, ToPyArrow};
use dora_download::download_file;
use dora_node_api::dora_core::config::NodeId;
use dora_node_api::dora_core::descriptor::source_is_url;
use dora_node_api::merged::{MergeExternalSend, MergedEvent};
use dora_node_api::{DataflowId, DoraNode, EventStream};
use dora_node_api::{DataflowId, DoraNode, EventStream, TryRecvError};
use dora_operator_api_python::{DelayedCleanup, NodeCleanupHandle, PyEvent, pydict_to_metadata};
use dora_ros2_bridge_python::Ros2Subscription;
use eyre::{Context, ContextCompat};

use futures::{Stream, StreamExt};
use pyo3::prelude::*;
use pyo3::types::{PyBytes, PyDict};
@@ -134,7 +136,7 @@ impl Node {

Ok(Node {
events: Events {
inner: EventsInner::Dora(events),
inner: Arc::new(Mutex::new(EventsInner::Dora(events))),
_cleanup_handle: cleanup_handle,
},
dataflow_id,
@@ -166,7 +168,7 @@ impl Node {
/// :rtype: dict
#[pyo3(signature = (timeout=None))]
#[allow(clippy::should_implement_trait)]
pub fn next(&mut self, py: Python, timeout: Option<f32>) -> PyResult<Option<Py<PyDict>>> {
pub fn next(&self, py: Python, timeout: Option<f32>) -> PyResult<Option<Py<PyDict>>> {
let event = py.allow_threads(|| self.events.recv(timeout.map(Duration::from_secs_f32)));
if let Some(event) = event {
let dict = event
@@ -178,8 +180,18 @@ impl Node {
}
}

/// `.drain()` gives you all available inputs that the node has received.
/// It does not block until the next event becomes available.
///
/// ```python
/// events = node.drain()
/// for event in events:
/// print(event)
/// ```
///
/// :rtype: list[dict]
#[allow(clippy::should_implement_trait)]
pub fn drain(&mut self, py: Python) -> PyResult<Vec<Py<PyDict>>> {
pub fn drain(&self, py: Python) -> PyResult<Vec<Py<PyDict>>> {
let events = self
.events
.drain()
@@ -195,6 +207,34 @@ impl Node {
Ok(events)
}

/// `.try_recv()` gives you the next input in the queue that the node has received.
/// It does not block until the next event becomes available.
///
/// ```python
/// event = events.try_recv()
/// print(event)
/// ```
///
/// :rtype: dict
#[allow(clippy::should_implement_trait)]
pub fn try_recv(&mut self, py: Python) -> Option<Py<PyDict>> {
match self.events.try_recv() {
Ok(event) => match event.to_py_dict(py) {
Ok(dict) => Some(dict),
Err(_) => None,
},
Err(_) => None,
}
}

/// Check if there are any buffered events in the event stream.
///
/// :rtype: bool
#[allow(clippy::should_implement_trait)]
pub fn is_empty(&self) -> bool {
self.events.is_empty()
}

/// `.recv_async()` gives you the next input that the node has received asynchronously.
/// It does not blocks until the next event becomes available.
/// You can use timeout in seconds to return if no input is available.
@@ -214,7 +254,7 @@ impl Node {
/// :rtype: dict
#[pyo3(signature = (timeout=None))]
#[allow(clippy::should_implement_trait)]
pub async fn recv_async(&mut self, timeout: Option<f32>) -> PyResult<Option<Py<PyDict>>> {
pub async fn recv_async(&self, timeout: Option<f32>) -> PyResult<Option<Py<PyDict>>> {
let event = self
.events
.recv_async_timeout(timeout.map(Duration::from_secs_f32))
@@ -225,6 +265,7 @@ impl Node {
let dict = event
.to_py_dict(py)
.context("Could not convert event into a dict")?;

Ok(Some(dict))
})
} else {
@@ -245,7 +286,7 @@ impl Node {
/// Default behaviour is to timeout after 2 seconds.
///
/// :rtype: dict
pub fn __next__(&mut self, py: Python) -> PyResult<Option<Py<PyDict>>> {
pub fn __next__(&self, py: Python) -> PyResult<Option<Py<PyDict>>> {
self.next(py, None)
}

@@ -285,7 +326,7 @@ impl Node {
/// :rtype: None
#[pyo3(signature = (output_id, data, metadata=None))]
pub fn send_output(
&mut self,
&self,
output_id: String,
data: PyObject,
metadata: Option<Bound<'_, PyDict>>,
@@ -317,7 +358,7 @@ impl Node {
/// This method returns the parsed dataflow YAML file.
///
/// :rtype: dict
pub fn dataflow_descriptor(&mut self, py: Python) -> eyre::Result<PyObject> {
pub fn dataflow_descriptor(&self, py: Python) -> eyre::Result<PyObject> {
Ok(
pythonize::pythonize(py, &self.node.get_mut().dataflow_descriptor()?)
.map(|x| x.unbind())?,
@@ -327,7 +368,7 @@ impl Node {
/// Returns the node configuration.
///
/// :rtype: dict
pub fn node_config(&mut self, py: Python) -> eyre::Result<PyObject> {
pub fn node_config(&self, py: Python) -> eyre::Result<PyObject> {
Ok(pythonize::pythonize(py, &self.node.get_mut().node_config()).map(|x| x.unbind())?)
}

@@ -343,10 +384,7 @@ impl Node {
///
/// :type subscription: dora.Ros2Subscription
/// :rtype: None
pub fn merge_external_events(
&mut self,
subscription: &mut Ros2Subscription,
) -> eyre::Result<()> {
pub fn merge_external_events(&self, subscription: &mut Ros2Subscription) -> eyre::Result<()> {
let subscription = subscription.into_stream()?;
let stream = futures::stream::poll_fn(move |cx| {
let s = subscription.as_stream().map(|item| {
@@ -365,12 +403,13 @@ impl Node {
});

// take out the event stream and temporarily replace it with a dummy
let mut inner = self.events.inner.blocking_lock();
let events = std::mem::replace(
&mut self.events.inner,
&mut *inner,
EventsInner::Merged(Box::new(futures::stream::empty())),
);
// update self.events with the merged stream
self.events.inner = EventsInner::Merged(events.merge_external_send(Box::pin(stream)));
*inner = EventsInner::Merged(events.merge_external_send(Box::pin(stream)));

Ok(())
}
@@ -385,13 +424,14 @@ fn err_to_pyany(err: eyre::Report, gil: Python<'_>) -> Py<PyAny> {
}

struct Events {
inner: EventsInner,
inner: Arc<Mutex<EventsInner>>,
_cleanup_handle: NodeCleanupHandle,
}

impl Events {
fn recv(&mut self, timeout: Option<Duration>) -> Option<PyEvent> {
let event = match &mut self.inner {
fn recv(&self, timeout: Option<Duration>) -> Option<PyEvent> {
let mut inner = self.inner.blocking_lock();
let event = match &mut *inner {
EventsInner::Dora(events) => match timeout {
Some(timeout) => events.recv_timeout(timeout).map(MergedEvent::Dora),
None => events.recv().map(MergedEvent::Dora),
@@ -401,8 +441,20 @@ impl Events {
event.map(|event| PyEvent { event })
}

async fn recv_async_timeout(&mut self, timeout: Option<Duration>) -> Option<PyEvent> {
let event = match &mut self.inner {
fn try_recv(&self) -> Result<PyEvent, TryRecvError> {
let mut inner = self.inner.blocking_lock();
let event = match &mut *inner {
EventsInner::Dora(events) => events.try_recv().map(MergedEvent::Dora),
EventsInner::Merged(_events) => {
todo!("try_recv on external event stream is not yet implemented!")
}
};
event.map(|event| PyEvent { event })
}

async fn recv_async_timeout(&self, timeout: Option<Duration>) -> Option<PyEvent> {
let mut inner = self.inner.lock().await;
let event = match &mut *inner {
EventsInner::Dora(events) => match timeout {
Some(timeout) => events
.recv_async_timeout(timeout)
@@ -415,8 +467,9 @@ impl Events {
event.map(|event| PyEvent { event })
}

fn drain(&mut self) -> Option<Vec<PyEvent>> {
match &mut self.inner {
fn drain(&self) -> Option<Vec<PyEvent>> {
let mut inner = self.inner.blocking_lock();
match &mut *inner {
EventsInner::Dora(events) => match events.drain() {
Some(items) => {
return Some(
@@ -434,6 +487,16 @@ impl Events {
}
};
}

fn is_empty(&self) -> bool {
let inner = self.inner.blocking_lock();
match &*inner {
EventsInner::Dora(events) => events.is_empty(),
EventsInner::Merged(_events) => {
todo!("is_empty on external event stream is not yet implemented!")
}
}
}
}

#[allow(clippy::large_enum_variant)]


+ 1
- 1
apis/python/operator/src/lib.rs View File

@@ -40,7 +40,7 @@ impl<T> DelayedCleanup<T> {
CleanupHandle(self.0.clone())
}

pub fn get_mut(&mut self) -> std::sync::MutexGuard<T> {
pub fn get_mut(&self) -> std::sync::MutexGuard<T> {
self.0.try_lock().expect("failed to lock DelayedCleanup")
}
}


+ 6
- 0
apis/rust/node/src/event_stream/mod.rs View File

@@ -334,6 +334,11 @@ impl EventStream {
event.map(Self::convert_event_item)
}

/// Check if there are any buffered events in the scheduler or the receiver.
pub fn is_empty(&self) -> bool {
self.scheduler.is_empty() & self.receiver.is_empty()
}

fn add_event(&mut self, event: EventItem) {
self.record_event(&event).unwrap();
self.scheduler.add_event(event);
@@ -506,6 +511,7 @@ impl EventStream {
}

/// No event is available right now or the event stream has been closed.
#[derive(Debug)]
pub enum TryRecvError {
/// No new event is available right now.
Empty,


+ 2
- 0
binaries/cli/Cargo.toml View File

@@ -23,6 +23,7 @@ python = ["pyo3"]
[dependencies]
arrow = { workspace = true }
clap = { version = "4.0.3", features = ["derive"] }
clap_complete = "4.5.61"
eyre = "0.6.8"
dora-core = { workspace = true, features = ["zenoh"] }
dora-message = { workspace = true }
@@ -55,6 +56,7 @@ colored = "2.1.0"
crossterm = "0.29.0"
ratatui = "0.29.0"
itertools = "0.14"
sysinfo = "0.36.1"

env_logger = "0.11.3"
self_update = { version = "0.42.0", features = [


+ 84
- 0
binaries/cli/src/command/completion.rs View File

@@ -0,0 +1,84 @@
use clap::{CommandFactory, ValueEnum};
use clap_complete::Shell;

use crate::command::Executable;
use sysinfo;

#[derive(Debug, clap::Args)]
#[command(after_help = r#"
USAGE:
eval $(dora completion) # Auto-detect shell
eval $(dora completion <SHELL>) # Specify shell explicitly

PERSIST COMPLETION:

Bash:
echo 'eval "$(dora completion bash)"' >> ~/.bashrc
# Then restart shell

Zsh:
echo 'eval "$(dora completion zsh)"' >> ~/.zshrc
# Then restart shell

# If you get 'command not found: compdef', add this before the eval line:
autoload -Uz compinit
compinit

Fish:
# Add to ~/.config/fish/config.fish
if status is-interactive
eval "$(dora completion fish)"
end
# Then restart shell

"#)]
pub struct Completion {
/// The shell to generate the script for
#[arg(value_enum)]
shell: Option<Shell>,
}
impl Executable for Completion {
fn execute(self) -> eyre::Result<()> {
let shell = if let Some(sh) = self.shell {
sh
} else {
get_shell().map_err(|e| {
eyre::eyre!(
"Please specify the shell via parameter. Unable to get the current shell. ({})",
e
)
})?
};
clap_complete::generate(
shell,
&mut crate::Args::command(),
"dora",
&mut std::io::stdout(),
);
Ok(())
}
}

fn get_shell() -> eyre::Result<Shell> {
let pid =
sysinfo::get_current_pid().map_err(|_| eyre::eyre!("Unable to get the current PID"))?;
let system = sysinfo::System::new_all();
let process = system
.process(pid)
.ok_or(eyre::eyre!("Unable to get the current process"))?;
let parent_pid = process
.parent()
.ok_or(eyre::eyre!("Unable to get the parent process PID"))?;
let parent_process = system
.process(parent_pid)
.ok_or(eyre::eyre!("Unable to get the parent process"))?;
let current_shell = Shell::from_str(
parent_process
.name()
.to_str()
.ok_or(eyre::eyre!("Unable to get the shell name"))?,
true,
)
.map_err(|_| eyre::eyre!("Unable to get the shell type"))?;
Ok(current_shell)
}

+ 19
- 0
binaries/cli/src/command/inspect/mod.rs View File

@@ -0,0 +1,19 @@
mod top;

use clap::Subcommand;

use super::Executable;

#[derive(Debug, Subcommand)]
pub enum Inspect {
/// Real-time monitor node resource usage (similar to Linux top)
Top(top::Top),
}

impl Executable for Inspect {
fn execute(self) -> eyre::Result<()> {
match self {
Inspect::Top(args) => args.execute(),
}
}
}

+ 442
- 0
binaries/cli/src/command/inspect/top.rs View File

@@ -0,0 +1,442 @@
use std::{
io,
time::{Duration, Instant},
};

use clap::Args;
use crossterm::{
event::{self, DisableMouseCapture, EnableMouseCapture, Event, KeyCode, KeyEventKind},
execute,
terminal::{EnterAlternateScreen, LeaveAlternateScreen, disable_raw_mode, enable_raw_mode},
};
use dora_core::topics::DORA_COORDINATOR_PORT_CONTROL_DEFAULT;
use dora_message::{
cli_to_coordinator::ControlRequest,
coordinator_to_cli::{ControlRequestReply, NodeInfo},
id::NodeId,
};
use eyre::{Context, eyre};
use ratatui::{
Frame, Terminal,
backend::{Backend, CrosstermBackend},
layout::{Constraint, Layout},
style::{Color, Modifier, Style},
widgets::{Block, Borders, Cell, Row, Table, TableState},
};
use uuid::Uuid;

use crate::{LOCALHOST, common::connect_to_coordinator};

use super::super::{Executable, default_tracing};

/// Real-time monitor node resource usage (similar to Linux top)
///
/// Metrics are collected by daemons and reported to the coordinator,
/// so this works for distributed dataflows across multiple machines.
///
/// Note:
/// - Values are averaged over the last refresh period
/// - CPU percentage is of a single core (values can add to more than 100% if multiple cores are used)
/// - Nodes can run on different machines with potentially different CPUs, so percentages are not comparable across machines
#[derive(Debug, Args)]
pub struct Top {
/// Address of the dora coordinator
#[clap(long, value_name = "IP", default_value_t = LOCALHOST)]
pub coordinator_addr: std::net::IpAddr,
/// Port number of the coordinator control server
#[clap(long, value_name = "PORT", default_value_t = DORA_COORDINATOR_PORT_CONTROL_DEFAULT)]
pub coordinator_port: u16,
/// Refresh interval in seconds
#[clap(long, value_name = "SECONDS", default_value_t = 2)]
pub refresh_interval: u64,
}

impl Executable for Top {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;

// Setup terminal
enable_raw_mode()?;
let mut stdout = io::stdout();
execute!(stdout, EnterAlternateScreen, EnableMouseCapture)?;
let backend = CrosstermBackend::new(stdout);
let mut terminal = Terminal::new(backend)?;

// Create app and run it
let refresh_duration = Duration::from_secs(self.refresh_interval);
let res = run_app(
&mut terminal,
self.coordinator_addr,
self.coordinator_port,
refresh_duration,
);

// Restore terminal
disable_raw_mode()?;
execute!(
terminal.backend_mut(),
LeaveAlternateScreen,
DisableMouseCapture
)?;
terminal.show_cursor()?;

if let Err(err) = res {
eprintln!("Error: {err:?}");
}

Ok(())
}
}

#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum SortColumn {
Node,
Cpu,
Memory,
}

struct App {
node_stats: Vec<NodeStats>,
table_state: TableState,
sort_column: SortColumn,
sort_ascending: bool,
}

#[derive(Debug, Clone)]
struct NodeStats {
#[allow(dead_code)]
dataflow_id: Uuid,
dataflow_name: String,
node_id: NodeId,
pid: Option<u32>,
cpu_usage: f32,
memory_mb: f64,
disk_read_mb_s: Option<f64>,
disk_write_mb_s: Option<f64>,
}

impl App {
fn new() -> Self {
let mut table_state = TableState::default();
table_state.select(Some(0));
Self {
node_stats: Vec::new(),
table_state,
sort_column: SortColumn::Cpu,
sort_ascending: false,
}
}

fn next(&mut self) {
if self.node_stats.is_empty() {
return;
}
let i = match self.table_state.selected() {
Some(i) => {
if i >= self.node_stats.len() - 1 {
0
} else {
i + 1
}
}
None => 0,
};
self.table_state.select(Some(i));
}

fn previous(&mut self) {
if self.node_stats.is_empty() {
return;
}
let i = match self.table_state.selected() {
Some(i) => {
if i == 0 {
self.node_stats.len() - 1
} else {
i - 1
}
}
None => 0,
};
self.table_state.select(Some(i));
}

fn toggle_sort(&mut self, column: SortColumn) {
if self.sort_column == column {
self.sort_ascending = !self.sort_ascending;
} else {
self.sort_column = column;
self.sort_ascending = false;
}
self.sort();
}

fn sort(&mut self) {
match self.sort_column {
SortColumn::Node => {
self.node_stats.sort_by(|a, b| {
let cmp = a.node_id.as_ref().cmp(b.node_id.as_ref());
if self.sort_ascending {
cmp
} else {
cmp.reverse()
}
});
}
SortColumn::Cpu => {
self.node_stats.sort_by(|a, b| {
let cmp = a
.cpu_usage
.partial_cmp(&b.cpu_usage)
.unwrap_or(std::cmp::Ordering::Equal);
if self.sort_ascending {
cmp
} else {
cmp.reverse()
}
});
}
SortColumn::Memory => {
self.node_stats.sort_by(|a, b| {
let cmp = a
.memory_mb
.partial_cmp(&b.memory_mb)
.unwrap_or(std::cmp::Ordering::Equal);
if self.sort_ascending {
cmp
} else {
cmp.reverse()
}
});
}
}
}

fn update_stats(&mut self, node_infos: Vec<NodeInfo>) {
self.node_stats.clear();

// Use daemon-provided metrics (works for distributed nodes!)
for node_info in node_infos {
let (pid, cpu_usage, memory_mb, disk_read_mb_s, disk_write_mb_s) =
if let Some(metrics) = &node_info.metrics {
(
Some(metrics.pid),
metrics.cpu_usage,
metrics.memory_mb,
metrics.disk_read_mb_s,
metrics.disk_write_mb_s,
)
} else {
(None, 0.0, 0.0, None, None)
};

self.node_stats.push(NodeStats {
dataflow_id: node_info.dataflow_id,
dataflow_name: node_info
.dataflow_name
.unwrap_or_else(|| "<unnamed>".to_string()),
node_id: node_info.node_id,
pid,
cpu_usage,
memory_mb,
disk_read_mb_s,
disk_write_mb_s,
});
}

self.sort();
}
}

fn run_app<B: Backend>(
terminal: &mut Terminal<B>,
coordinator_addr: std::net::IpAddr,
coordinator_port: u16,
refresh_duration: Duration,
) -> eyre::Result<()> {
let mut app = App::new();
let mut last_update = Instant::now();
let mut node_infos: Vec<NodeInfo> = Vec::new();

// Reuse coordinator connection
let mut session = connect_to_coordinator((coordinator_addr, coordinator_port).into())
.wrap_err("Failed to connect to coordinator")?;

// Query node info once initially
let request = ControlRequest::GetNodeInfo;
let reply_raw = session
.request(&serde_json::to_vec(&request).unwrap())
.wrap_err("failed to send initial request to coordinator")?;

let reply: ControlRequestReply =
serde_json::from_slice(&reply_raw).wrap_err("failed to parse reply")?;

node_infos = match reply {
ControlRequestReply::NodeInfoList(infos) => infos,
ControlRequestReply::Error(err) => {
return Err(eyre!("coordinator error: {err}"));
}
_ => {
return Err(eyre!("unexpected reply from coordinator"));
}
};

loop {
terminal.draw(|f| ui(f, &mut app, refresh_duration))?;

let timeout = refresh_duration
.checked_sub(last_update.elapsed())
.unwrap_or(Duration::from_millis(100));

if event::poll(timeout)? {
if let Event::Key(key) = event::read()? {
if key.kind == KeyEventKind::Press {
match key.code {
KeyCode::Char('q') | KeyCode::Esc => {
return Ok(());
}
KeyCode::Down | KeyCode::Char('j') => {
app.next();
}
KeyCode::Up | KeyCode::Char('k') => {
app.previous();
}
KeyCode::Char('n') => {
app.toggle_sort(SortColumn::Node);
}
KeyCode::Char('c') => {
app.toggle_sort(SortColumn::Cpu);
}
KeyCode::Char('m') => {
app.toggle_sort(SortColumn::Memory);
}
KeyCode::Char('r') => {
// Force refresh by resetting last_update
last_update = Instant::now()
.checked_sub(refresh_duration)
.unwrap_or(Instant::now());
}
_ => {}
}
}
}
}

// Update data if refresh interval has passed
if last_update.elapsed() >= refresh_duration {
// Query node info every refresh interval to get updated metrics
let request = ControlRequest::GetNodeInfo;
let reply_raw = session
.request(&serde_json::to_vec(&request).unwrap())
.wrap_err("failed to send request to coordinator")?;

let reply: ControlRequestReply =
serde_json::from_slice(&reply_raw).wrap_err("failed to parse reply")?;

match reply {
ControlRequestReply::NodeInfoList(infos) => {
node_infos = infos;
}
ControlRequestReply::Error(err) => {
return Err(eyre!("coordinator error: {err}"));
}
_ => {
return Err(eyre!("unexpected reply from coordinator"));
}
}

// Update stats with current node info
app.update_stats(node_infos.clone());
last_update = Instant::now();
}
}
}

fn ui(f: &mut Frame, app: &mut App, refresh_duration: Duration) {
let chunks = Layout::default()
.constraints([Constraint::Min(0)])
.split(f.area());

let sort_indicator = |col: SortColumn| {
if app.sort_column == col {
if app.sort_ascending { " ▲" } else { " ▼" }
} else {
""
}
};

let header_strings = vec![
format!("NODE{}", sort_indicator(SortColumn::Node)),
"DATAFLOW".to_string(),
"PID".to_string(),
format!("CPU%{}", sort_indicator(SortColumn::Cpu)),
format!("MEMORY (MB){}", sort_indicator(SortColumn::Memory)),
"I/O READ (MB/s)".to_string(),
"I/O WRITE (MB/s)".to_string(),
];

let header_cells = header_strings.iter().map(|h| {
Cell::from(h.as_str()).style(
Style::default()
.fg(Color::Yellow)
.add_modifier(Modifier::BOLD),
)
});

let header = Row::new(header_cells).height(1).bottom_margin(1);

let rows = app.node_stats.iter().map(|stats| {
let cells = vec![
Cell::from(stats.node_id.as_ref()),
Cell::from(stats.dataflow_name.as_str()),
Cell::from(
stats
.pid
.map(|p| p.to_string())
.unwrap_or_else(|| "N/A".to_string()),
),
Cell::from(format!("{:.1}%", stats.cpu_usage)),
Cell::from(format!("{:.1}", stats.memory_mb)),
Cell::from(
stats
.disk_read_mb_s
.map(|v| format!("{:.1}", v))
.unwrap_or_else(|| "N/A".to_string()),
),
Cell::from(
stats
.disk_write_mb_s
.map(|v| format!("{:.1}", v))
.unwrap_or_else(|| "N/A".to_string()),
),
];
Row::new(cells).height(1)
});

let widths = [
Constraint::Percentage(20),
Constraint::Percentage(20),
Constraint::Percentage(8),
Constraint::Percentage(12),
Constraint::Percentage(12),
Constraint::Percentage(14),
Constraint::Percentage(14),
];

let title = format!(
" Dora Inspect Top - Refreshing every {}s (q: quit, n/c/m: sort, r: refresh nodes) ",
refresh_duration.as_secs()
);

let table = Table::new(rows, widths)
.header(header)
.block(Block::default().borders(Borders::ALL).title(title))
.row_highlight_style(
Style::default()
.bg(Color::DarkGray)
.add_modifier(Modifier::BOLD),
)
.highlight_symbol(">> ");

f.render_stateful_widget(table, chunks[0], &mut app.table_state);
}

+ 14
- 0
binaries/cli/src/command/mod.rs View File

@@ -1,12 +1,15 @@
mod build;
mod check;
mod completion;
mod coordinator;
mod daemon;
mod destroy;
mod graph;
mod inspect;
mod list;
mod logs;
mod new;
mod node;
mod run;
mod runtime;
mod self_;
@@ -20,14 +23,17 @@ pub use run::{run, run_func};

use build::Build;
use check::Check;
use completion::Completion;
use coordinator::Coordinator;
use daemon::Daemon;
use destroy::Destroy;
use eyre::Context;
use graph::Graph;
use inspect::Inspect;
use list::ListArgs;
use logs::LogsArgs;
use new::NewArgs;
use node::Node;
use run::Run;
use runtime::Runtime;
use self_::SelfSubCommand;
@@ -58,12 +64,17 @@ pub enum Command {
// Stats,
// Get,
// Upgrade,
#[clap(subcommand)]
Inspect(Inspect),
Daemon(Daemon),
Runtime(Runtime),
Coordinator(Coordinator),
#[clap(subcommand)]
Topic(Topic),
#[clap(subcommand)]
Node(Node),

Completion(Completion),
Self_ {
#[clap(subcommand)]
command: SelfSubCommand,
@@ -102,10 +113,13 @@ impl Executable for Command {
Command::Stop(args) => args.execute(),
Command::List(args) => args.execute(),
Command::Logs(args) => args.execute(),
Command::Inspect(args) => args.execute(),
Command::Daemon(args) => args.execute(),
Command::Self_ { command } => command.execute(),
Command::Runtime(args) => args.execute(),
Command::Topic(args) => args.execute(),
Command::Node(args) => args.execute(),
Command::Completion(args) => args.execute(),
}
}
}

+ 186
- 0
binaries/cli/src/command/node/list.rs View File

@@ -0,0 +1,186 @@
use std::io::Write;

use clap::Args;
use serde::Serialize;
use tabwriter::TabWriter;
use uuid::Uuid;

use crate::{
command::{Executable, default_tracing},
common::CoordinatorOptions,
formatting::OutputFormat,
};
use communication_layer_request_reply::TcpRequestReplyConnection;
use dora_message::{
cli_to_coordinator::ControlRequest,
coordinator_to_cli::{ControlRequestReply, NodeInfo},
};
use eyre::{Context, bail};

/// List all currently running nodes and their status.
///
/// Examples:
///
/// List all nodes:
/// dora node list
///
/// List nodes in a specific dataflow:
/// dora node list --dataflow my-dataflow
///
/// List nodes as JSON:
/// dora node list --format json
#[derive(Debug, Args)]
#[clap(verbatim_doc_comment)]
pub struct List {
/// Filter by dataflow name or UUID
#[clap(long, short = 'd', value_name = "NAME_OR_UUID")]
pub dataflow: Option<String>,

/// Output format
#[clap(long, value_name = "FORMAT", default_value_t = OutputFormat::Table)]
pub format: OutputFormat,

#[clap(flatten)]
coordinator: CoordinatorOptions,
}

impl Executable for List {
fn execute(self) -> eyre::Result<()> {
default_tracing()?;

let mut session = self.coordinator.connect()?;
list(session.as_mut(), self.dataflow, self.format)
}
}

#[derive(Serialize)]
struct OutputEntry {
node: String,
status: String,
pid: String,
cpu: String,
memory: String,
#[serde(skip_serializing_if = "Option::is_none")]
dataflow: Option<String>,
}

fn list(
session: &mut TcpRequestReplyConnection,
dataflow_filter: Option<String>,
format: OutputFormat,
) -> eyre::Result<()> {
// Request node information from coordinator
let reply_raw = session
.request(&serde_json::to_vec(&ControlRequest::GetNodeInfo).unwrap())
.wrap_err("failed to send GetNodeInfo request")?;

let reply: ControlRequestReply =
serde_json::from_slice(&reply_raw).wrap_err("failed to parse reply")?;

let node_infos = match reply {
ControlRequestReply::NodeInfoList(infos) => infos,
ControlRequestReply::Error(err) => bail!("{err}"),
other => bail!("unexpected reply: {other:?}"),
};

// Filter by dataflow if specified
let filtered_nodes: Vec<NodeInfo> = if let Some(ref filter) = dataflow_filter {
// Try to parse as UUID first
let filter_uuid = Uuid::parse_str(filter).ok();

node_infos
.into_iter()
.filter(|node| {
// Match by UUID or name
if let Some(uuid) = filter_uuid {
node.dataflow_id == uuid
} else {
node.dataflow_name.as_deref() == Some(filter)
}
})
.collect()
} else {
node_infos
};

// Convert to output entries
let entries: Vec<OutputEntry> = filtered_nodes
.into_iter()
.map(|node| {
let (status, pid, cpu, memory) = if let Some(metrics) = node.metrics {
(
"Running".to_string(),
metrics.pid.to_string(),
format!("{:.1}%", metrics.cpu_usage),
format!("{:.0} MB", metrics.memory_mb),
)
} else {
// Node exists but no metrics available (might be starting or error state)
(
"Unknown".to_string(),
"-".to_string(),
"-".to_string(),
"-".to_string(),
)
};

OutputEntry {
node: node.node_id.to_string(),
status,
pid,
cpu,
memory,
dataflow: if dataflow_filter.is_none() {
Some(
node.dataflow_name
.unwrap_or_else(|| node.dataflow_id.to_string()),
)
} else {
None
},
}
})
.collect();

match format {
OutputFormat::Table => {
let mut tw = TabWriter::new(std::io::stdout().lock());

// Write header
if dataflow_filter.is_none() {
tw.write_all(b"NODE\tSTATUS\tPID\tCPU\tMEMORY\tDATAFLOW\n")?;
} else {
tw.write_all(b"NODE\tSTATUS\tPID\tCPU\tMEMORY\n")?;
}

// Write entries
for entry in entries {
if let Some(ref dataflow) = entry.dataflow {
tw.write_all(
format!(
"{}\t{}\t{}\t{}\t{}\t{}\n",
entry.node, entry.status, entry.pid, entry.cpu, entry.memory, dataflow
)
.as_bytes(),
)?;
} else {
tw.write_all(
format!(
"{}\t{}\t{}\t{}\t{}\n",
entry.node, entry.status, entry.pid, entry.cpu, entry.memory
)
.as_bytes(),
)?;
}
}
tw.flush()?;
}
OutputFormat::Json => {
for entry in entries {
println!("{}", serde_json::to_string(&entry)?);
}
}
}

Ok(())
}

+ 19
- 0
binaries/cli/src/command/node/mod.rs View File

@@ -0,0 +1,19 @@
use crate::command::Executable;

mod list;

pub use list::List;

/// Manage and inspect dataflow nodes.
#[derive(Debug, clap::Subcommand)]
pub enum Node {
List(List),
}

impl Executable for Node {
fn execute(self) -> eyre::Result<()> {
match self {
Node::List(cmd) => cmd.execute(),
}
}
}

+ 308
- 53
binaries/cli/src/command/topic/hz.rs View File

@@ -22,10 +22,11 @@ use crate::{
common::CoordinatorOptions,
};

/// Measure topic publish frequency (Hz).
/// Measure topic publish intervals.
///
/// Subscribe to one or more outputs and display per-topic statistics (average,
/// min, max, stddev) over a sliding window.
/// Subscribe to one or more outputs and display per-topic interval statistics
/// (average, min, max, stddev) over a sliding window. Average frequency (Hz)
/// is derived from the average interval.
///
/// If no `DATA` is provided, all outputs from the selected dataflow will be
/// echoed.
@@ -117,46 +118,55 @@ impl HzStats {
}
}

fn calculate(&self) -> Option<Stats> {
let intervals = self
.timestamps
fn intervals_ms(&self) -> Vec<f64> {
// Return inter-arrival times in milliseconds for the current window
self.timestamps
.lock()
.unwrap()
.iter()
.tuple_windows()
.filter_map(|(a, b)| {
let interval = b.duration_since(*a).as_secs_f64();
if interval > 0.0 {
Some(1.0 / interval)
} else {
None
}
let dt = b.duration_since(*a).as_secs_f64() * 1000.0;
if dt > 0.0 { Some(dt) } else { None }
})
.collect::<Vec<_>>();
.collect()
}

fn calculate(&self) -> Option<Stats> {
let intervals = self.intervals_ms();
if intervals.is_empty() {
return None;
}

let sum: f64 = intervals.iter().sum();
let avg = sum / intervals.len() as f64;
let avg_ms = sum / intervals.len() as f64;

let min = intervals.iter().cloned().fold(f64::INFINITY, f64::min);
let max = intervals.iter().cloned().fold(f64::NEG_INFINITY, f64::max);
let min_ms = intervals.iter().cloned().fold(f64::INFINITY, f64::min);
let max_ms = intervals.iter().cloned().fold(f64::NEG_INFINITY, f64::max);

let variance =
intervals.iter().map(|hz| (hz - avg).powi(2)).sum::<f64>() / intervals.len() as f64;
let std = variance.sqrt();
intervals.iter().map(|x| (x - avg_ms).powi(2)).sum::<f64>() / intervals.len() as f64;
let std_ms = variance.sqrt();

let avg_hz = if avg_ms > 0.0 { 1000.0 / avg_ms } else { 0.0 };

Some(Stats { avg, min, max, std })
Some(Stats {
avg_ms,
avg_hz,
min_ms,
max_ms,
std_ms,
})
}
}

#[derive(Debug)]
struct Stats {
avg: f64,
min: f64,
max: f64,
std: f64,
avg_ms: f64,
avg_hz: f64,
min_ms: f64,
max_ms: f64,
std_ms: f64,
}

async fn run_hz(
@@ -166,31 +176,101 @@ async fn run_hz(
outputs: BTreeSet<TopicIdentifier>,
coordinator_addr: IpAddr,
) -> eyre::Result<()> {
let stats = outputs
// Add a synthetic aggregate entry ("<ALL>") that merges all outputs
let mut topics_with_all = Vec::with_capacity(outputs.len() + 1);
topics_with_all.push(TopicIdentifier {
node_id: "<ALL>".to_string().into(),
data_id: "*".to_string().into(),
});
topics_with_all.extend(outputs.into_iter());

let stats = topics_with_all
.iter()
.map(|topic| (topic, Arc::new(HzStats::new(window))))
.collect::<Vec<_>>();

terminal.draw(|f| ui(f, &stats))?;
let mut selected: usize = 0;
// Ssub-window for instantaneous rate (Hz)
let sub_window = Duration::from_millis(1000);
// Keep a flowing sparkline per topic for recent rates
let mut rate_series: Vec<VecDeque<u64>> = vec![VecDeque::with_capacity(240); stats.len()];
// Start time to decide whether full window elapsed
let start = Instant::now();

terminal.draw(|f| {
ui(
f,
&stats,
selected,
&rate_series,
start,
Duration::from_secs(window as u64),
)
})?;

let zenoh_session = open_zenoh_session(Some(coordinator_addr))
.await
.context("failed to open zenoh session")?;

// Spawn subscribers for each output
for (topic, hz_stats) in &stats {
// Aggregator is at index 0
let all_stats = stats[0].1.clone();
for (i, (topic, hz_stats)) in stats.iter().enumerate() {
if i == 0 {
continue;
}
let zenoh_session = zenoh_session.clone();
let topic = (*topic).clone();
let hz_stats = hz_stats.clone();
let all_stats_cloned = all_stats.clone();
tokio::spawn(async move {
if let Err(e) = subscribe_output(zenoh_session, dataflow_id, &topic, hz_stats).await {
if let Err(e) = subscribe_output(
zenoh_session,
dataflow_id,
&topic,
hz_stats,
Some(all_stats_cloned),
)
.await
{
eprintln!("Error subscribing to {topic}: {e}");
}
});
}

loop {
terminal.draw(|f| ui(f, &stats))?;
// Update per-topic instantaneous rate and append to series
let now = Instant::now();
for (i, (_topic, s)) in stats.iter().enumerate() {
// count messages within sub_window
let cutoff = now - sub_window;
let mut count = 0usize;
let ts = s.timestamps.lock().unwrap();
for &t in ts.iter().rev() {
if t < cutoff {
break;
}
count += 1;
}
let hz = (count as f64) / sub_window.as_secs_f64();
let v = hz.max(0.0).round() as u64;
let buf = &mut rate_series[i];
if buf.len() >= 240 {
buf.pop_front();
}
buf.push_back(v);
}

terminal.draw(|f| {
ui(
f,
&stats,
selected,
&rate_series,
start,
Duration::from_secs(window as u64),
)
})?;

if crossterm::event::poll(Duration::from_millis(50))? {
if let Event::Key(key) = crossterm::event::read()? {
@@ -200,6 +280,24 @@ async fn run_hz(
{
break;
}

match key.code {
KeyCode::Up => {
if selected == 0 {
selected = stats.len().saturating_sub(1);
} else {
selected -= 1;
}
}
KeyCode::Down => {
if stats.is_empty() {
selected = 0;
} else {
selected = (selected + 1) % stats.len();
}
}
_ => {}
}
}
}
}
@@ -212,6 +310,7 @@ async fn subscribe_output(
dataflow_id: Uuid,
topic: &TopicIdentifier,
hz_stats: Arc<HzStats>,
aggregate: Option<Arc<HzStats>>,
) -> eyre::Result<()> {
let subscribe_topic = zenoh_output_publish_topic(dataflow_id, &topic.node_id, &topic.data_id);
let subscriber = zenoh_session
@@ -229,7 +328,11 @@ async fn subscribe_output(

match event.inner {
InterDaemonEvent::Output { .. } => {
hz_stats.record(Instant::now());
let now = Instant::now();
hz_stats.record(now);
if let Some(all) = &aggregate {
all.record(now);
}
}
InterDaemonEvent::OutputClosed { .. } => {
break;
@@ -240,34 +343,69 @@ async fn subscribe_output(
Ok(())
}

fn ui(f: &mut Frame<'_>, stats: &[(&TopicIdentifier, Arc<HzStats>)]) {
let header = Row::new(["Output", "Avg (Hz)", "Min (Hz)", "Max (Hz)", "Std (Hz)"])
.style(Style::default().fg(Color::White).bg(Color::Blue).bold())
.height(1);

let rows = stats.iter().map(|(output_name, hz_stats)| {
if let Some(stats) = hz_stats.calculate() {
Row::new([
output_name.to_string(),
format!("{:.2}", stats.avg),
format!("{:.2}", stats.min),
format!("{:.2}", stats.max),
format!("{:.2}", stats.std),
])
} else {
Row::new(
iter::once(Cow::Owned(output_name.to_string()))
.chain(iter::repeat_n(Cow::Borrowed("-"), 4)),
)
}
.height(1)
});
fn ui(
f: &mut Frame<'_>,
stats: &[(&TopicIdentifier, Arc<HzStats>)],
selected: usize,
rate_series: &[VecDeque<u64>],
start: Instant,
window_dur: Duration,
) {
// Layout: table | charts | footer
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([
Constraint::Percentage(55),
Constraint::Percentage(44),
Constraint::Length(1),
])
.split(f.area());

// Table header: interval stats in ms + derived avg Hz
let header = Row::new([
"Output", "Avg (ms)", "Avg (Hz)", "Min (ms)", "Max (ms)", "Std (ms)",
])
.style(Style::default().fg(Color::White).bg(Color::Blue).bold())
.height(1);

let rows = stats
.iter()
.enumerate()
.map(|(i, (output_name, hz_stats))| {
if let Some(s) = hz_stats.calculate() {
Row::new([
output_name.to_string(),
format!("{:.2}", s.avg_ms),
format!("{:.2}", s.avg_hz),
format!("{:.2}", s.min_ms),
format!("{:.2}", s.max_ms),
format!("{:.2}", s.std_ms),
])
.style(if i == selected {
Style::default().fg(Color::Yellow)
} else {
Style::default()
})
} else {
Row::new(
iter::once(Cow::Owned(output_name.to_string()))
.chain(iter::repeat_n(Cow::Borrowed("-"), 5)),
)
.style(if i == selected {
Style::default().fg(Color::Yellow)
} else {
Style::default()
})
}
.height(1)
});

let table = Table::new(
rows,
[
Constraint::Fill(1),
Constraint::Length(12),
Constraint::Length(10),
Constraint::Length(12),
Constraint::Length(12),
Constraint::Length(12),
@@ -283,9 +421,126 @@ fn ui(f: &mut Frame<'_>, stats: &[(&TopicIdentifier, Arc<HzStats>)]) {

f.render_widget(table, chunks[0]);

// Charts area split horizontally
let chart_chunks = Layout::default()
.direction(Direction::Horizontal)
.constraints([Constraint::Percentage(50), Constraint::Percentage(50)])
.split(chunks[1]);

// Draw charts for the selected topic if available
if let Some((name, selected_stats)) = stats.get(selected) {
// Prepare data
let intervals = selected_stats.intervals_ms();
let now = Instant::now();

// Left: Rolling rate sparkline (Hz within sub-window)
let mut series: Vec<u64> = rate_series
.get(selected)
.map(|d| d.iter().copied().collect())
.unwrap_or_default();
if series.is_empty() {
// Show hint while not enough samples
let info = Paragraph::new("Waiting for data...")
.style(Style::default().fg(Color::Gray).italic())
.block(
Block::default()
.title("Recent Rate (Hz)")
.borders(Borders::ALL),
);
f.render_widget(info, chart_chunks[0]);
} else {
// Fit series to available width
let w = chart_chunks[0].width.saturating_sub(2) as usize; // borders
if series.len() > w {
series = series[series.len() - w..].to_vec();
}
let spark = Sparkline::default()
.data(&series)
.style(Style::default().fg(Color::Cyan))
.block(
Block::default()
.title(format!("Recent Rate (Hz) — {}", name))
.borders(Borders::ALL),
);
f.render_widget(spark, chart_chunks[0]);
}

// Right: Histogram (s) using BarChart
if intervals.is_empty() {
let info = Paragraph::new("No samples for histogram")
.style(Style::default().fg(Color::Gray).italic())
.block(
Block::default()
.title("Histogram (ms)")
.borders(Borders::ALL),
);
f.render_widget(info, chart_chunks[1]);
} else {
let min = intervals.iter().cloned().fold(f64::INFINITY, f64::min);
let max = intervals.iter().cloned().fold(f64::NEG_INFINITY, f64::max);
let bins = 10usize
.max((chart_chunks[1].width as usize).saturating_sub(8) / 4)
.min(40);
let span = (max - min).max(1e-9);
let step = span / bins as f64;
let mut counts = vec![0u64; bins];
for &v in &intervals {
let mut idx = ((v - min) / step).floor() as usize;
if idx >= bins {
idx = bins - 1;
}
counts[idx] += 1;
}

let bars: Vec<Bar<'_>> = counts
.iter()
.enumerate()
.map(|(i, &c)| {
let lo = min + i as f64 * step;
let hi = lo + step;
Bar::default()
.value(c)
.label(format!("{:.3}-{:.3}", lo, hi).into())
.style(Style::default().fg(Color::Green))
})
.collect();

let group = BarGroup::default().bars(&bars);
let barchart = BarChart::default()
.block(
Block::default()
.title(format!("Histogram (ms) — min={:.2}, max={:.2}", min, max))
.borders(Borders::ALL),
)
.data(group)
.bar_width(3)
.bar_gap(1);
f.render_widget(barchart, chart_chunks[1]);
}

// If full window time not elapsed since start, render hint
if now.duration_since(start) + Duration::from_millis(1) < window_dur {
let warn = Paragraph::new(format!(
"Filling window: {:.0}/{:.0} ms",
now.duration_since(start).as_secs_f64() * 1000.0,
window_dur.as_secs_f64() * 1000.0
))
.style(Style::default().fg(Color::Yellow))
.alignment(Alignment::Center);
f.render_widget(warn, chunks[1]);
}
} else {
// Nothing selected or empty stats
let info = Paragraph::new("No topics selected")
.style(Style::default().fg(Color::Gray))
.alignment(Alignment::Center)
.block(Block::default().borders(Borders::ALL));
f.render_widget(info, chunks[1]);
}

// Footer with key hints
let footer = Paragraph::new("Exit: q | Ctrl-C | Esc")
let footer = Paragraph::new("Up/Down: Select | Exit: q / Ctrl-C / Esc")
.style(Style::default().fg(Color::Yellow))
.alignment(Alignment::Center);
f.render_widget(footer, chunks[1]);
f.render_widget(footer, chunks[2]);
}

+ 60
- 0
binaries/coordinator/src/lib.rs View File

@@ -773,6 +773,43 @@ async fn start_inner(
},
));
}
ControlRequest::GetNodeInfo => {
use dora_message::coordinator_to_cli::{NodeInfo, NodeMetricsInfo};

let mut node_infos = Vec::new();
for dataflow in running_dataflows.values() {
for (node_id, _node) in &dataflow.nodes {
// Get the specific daemon this node is running on
if let Some(daemon_id) = dataflow.node_to_daemon.get(node_id) {
// Get metrics if available
let metrics = dataflow.node_metrics.get(node_id).map(|m| {
NodeMetricsInfo {
pid: m.pid,
cpu_usage: m.cpu_usage,
// Use 1000 for MB (megabytes) instead of 1024 (mebibytes)
memory_mb: m.memory_bytes as f64 / 1000.0 / 1000.0,
disk_read_mb_s: m
.disk_read_bytes
.map(|b| b as f64 / 1000.0 / 1000.0),
disk_write_mb_s: m
.disk_write_bytes
.map(|b| b as f64 / 1000.0 / 1000.0),
}
});

node_infos.push(NodeInfo {
dataflow_id: dataflow.uuid,
dataflow_name: dataflow.name.clone(),
node_id: node_id.clone(),
daemon_id: daemon_id.clone(),
metrics,
});
}
}
}
let _ = reply_sender
.send(Ok(ControlRequestReply::NodeInfoList(node_infos)));
}
}
}
ControlEvent::Error(err) => tracing::error!("{err:?}"),
@@ -885,6 +922,17 @@ async fn start_inner(
tracing::info!("Daemon `{daemon_id}` exited");
daemon_connections.remove(&daemon_id);
}
Event::NodeMetrics {
dataflow_id,
metrics,
} => {
// Store metrics for this dataflow
if let Some(dataflow) = running_dataflows.get_mut(&dataflow_id) {
for (node_id, node_metrics) in metrics {
dataflow.node_metrics.insert(node_id, node_metrics);
}
}
}
Event::DataflowBuildResult {
build_id,
daemon_id,
@@ -1063,6 +1111,10 @@ struct RunningDataflow {
pending_daemons: BTreeSet<DaemonId>,
exited_before_subscribe: Vec<NodeId>,
nodes: BTreeMap<NodeId, ResolvedNode>,
/// Maps each node to the daemon it's running on
node_to_daemon: BTreeMap<NodeId, DaemonId>,
/// Latest metrics for each node (from daemons)
node_metrics: BTreeMap<NodeId, dora_message::daemon_to_coordinator::NodeMetrics>,

spawn_result: CachedResult,
stop_reply_senders: Vec<tokio::sync::oneshot::Sender<eyre::Result<ControlRequestReply>>>,
@@ -1454,6 +1506,7 @@ async fn start_dataflow(
uuid,
daemons,
nodes,
node_to_daemon,
} = spawn_dataflow(
build_id,
session_id,
@@ -1477,6 +1530,8 @@ async fn start_dataflow(
exited_before_subscribe: Default::default(),
daemons: daemons.clone(),
nodes,
node_to_daemon,
node_metrics: BTreeMap::new(),
spawn_result: CachedResult::default(),
stop_reply_senders: Vec::new(),
buffered_log_messages: Vec::new(),
@@ -1566,6 +1621,10 @@ pub enum Event {
daemon_id: DaemonId,
result: eyre::Result<()>,
},
NodeMetrics {
dataflow_id: uuid::Uuid,
metrics: BTreeMap<NodeId, dora_message::daemon_to_coordinator::NodeMetrics>,
},
}

impl Event {
@@ -1592,6 +1651,7 @@ impl Event {
Event::DaemonExit { .. } => "DaemonExit",
Event::DataflowBuildResult { .. } => "DataflowBuildResult",
Event::DataflowSpawnResult { .. } => "DataflowSpawnResult",
Event::NodeMetrics { .. } => "NodeMetrics",
}
}
}


+ 25
- 8
binaries/coordinator/src/listener.rs View File

@@ -42,14 +42,19 @@ pub async fn handle_connection(
break;
}
};
let message: Timestamped<CoordinatorRequest> =
match serde_json::from_slice(&raw).wrap_err("failed to deserialize message") {
Ok(e) => e,
Err(err) => {
tracing::warn!("{err:?}");
continue;
}
};
let message: Timestamped<CoordinatorRequest> = match serde_json::from_slice(&raw)
.wrap_err_with(|| {
format!(
"failed to deserialize message: {}",
String::from_utf8_lossy(&raw)
)
}) {
Ok(e) => e,
Err(err) => {
tracing::warn!("{err:?}");
continue;
}
};

if let Err(err) = clock.update_with_timestamp(&message.timestamp) {
tracing::warn!("failed to update coordinator clock: {err}");
@@ -112,6 +117,18 @@ pub async fn handle_connection(
break;
}
}
DaemonEvent::NodeMetrics {
dataflow_id,
metrics,
} => {
let event = Event::NodeMetrics {
dataflow_id,
metrics,
};
if events_tx.send(event).await.is_err() {
break;
}
}
DaemonEvent::BuildResult { build_id, result } => {
let event = Event::DataflowBuildResult {
build_id,


+ 10
- 1
binaries/coordinator/src/run/mod.rs View File

@@ -39,6 +39,8 @@ pub(super) async fn spawn_dataflow(
.into_group_map_by(|n| n.deploy.as_ref().and_then(|d| d.machine.as_ref()));

let mut daemons = BTreeSet::new();
let mut node_to_daemon = BTreeMap::new();

for (machine, nodes_on_machine) in &nodes_by_daemon {
let spawn_nodes = nodes_on_machine.iter().map(|n| n.id.clone()).collect();
tracing::debug!(
@@ -65,7 +67,12 @@ pub(super) async fn spawn_dataflow(
spawn_dataflow_on_machine(daemon_connections, machine.map(|m| m.as_str()), &message)
.await
.wrap_err_with(|| format!("failed to spawn dataflow on machine `{machine:?}`"))?;
daemons.insert(daemon_id);
daemons.insert(daemon_id.clone());

// Map each node on this machine to its daemon
for node in nodes_on_machine {
node_to_daemon.insert(node.id.clone(), daemon_id.clone());
}
}

tracing::info!("successfully triggered dataflow spawn `{uuid}`",);
@@ -74,6 +81,7 @@ pub(super) async fn spawn_dataflow(
uuid,
daemons,
nodes,
node_to_daemon,
})
}

@@ -119,4 +127,5 @@ pub struct SpawnedDataflow {
pub uuid: Uuid,
pub daemons: BTreeSet<DaemonId>,
pub nodes: BTreeMap<NodeId, ResolvedNode>,
pub node_to_daemon: BTreeMap<NodeId, DaemonId>,
}

+ 1
- 0
binaries/daemon/Cargo.toml View File

@@ -53,6 +53,7 @@ process-wrap = { version = "8.2.1", features = ["tokio1"] }
memchr = "2.7.5"
chrono = { version = "0.4", features = ["serde"] }
shellexpand = "3.1.1"
sysinfo = "0.36.1"
clonable-command = "0.2.0"

[target.'cfg(windows)'.dependencies]


+ 120
- 2
binaries/daemon/src/lib.rs View File

@@ -52,7 +52,7 @@ use std::{
pin::pin,
sync::{
Arc,
atomic::{self, AtomicBool},
atomic::{self, AtomicBool, AtomicU32},
},
time::{Duration, Instant},
};
@@ -117,6 +117,8 @@ pub struct Daemon {
sessions: BTreeMap<SessionId, BuildId>,
builds: BTreeMap<BuildId, BuildInfo>,
git_manager: GitManager,
/// System instance for metrics collection (reused across calls)
metrics_system: sysinfo::System,
}

type DaemonRunResult = BTreeMap<Uuid, BTreeMap<NodeId, Result<(), NodeError>>>;
@@ -357,6 +359,7 @@ impl Daemon {
git_manager: Default::default(),
builds,
sessions: Default::default(),
metrics_system: sysinfo::System::new(),
};

let dora_events = ReceiverStream::new(dora_events_rx);
@@ -368,7 +371,23 @@ impl Daemon {
inner: Event::HeartbeatInterval,
timestamp: watchdog_clock.new_timestamp(),
});
let events = (external_events, dora_events, watchdog_interval).merge();

let metrics_clock = daemon.clock.clone();
let metrics_interval = tokio_stream::wrappers::IntervalStream::new(tokio::time::interval(
Duration::from_secs(2), // Collect metrics every 2 seconds
))
.map(|_| Timestamped {
inner: Event::MetricsInterval,
timestamp: metrics_clock.new_timestamp(),
});

let events = (
external_events,
dora_events,
watchdog_interval,
metrics_interval,
)
.merge();
daemon.run_inner(events).await
}

@@ -426,6 +445,9 @@ impl Daemon {
}
}
}
Event::MetricsInterval => {
self.collect_and_send_metrics().await?;
}
Event::CtrlC => {
tracing::info!("received ctrlc signal -> stopping all dataflows");
for dataflow in self.running.values_mut() {
@@ -864,6 +886,99 @@ impl Daemon {
Ok(status)
}

async fn collect_and_send_metrics(&mut self) -> eyre::Result<()> {
use dora_message::daemon_to_coordinator::NodeMetrics;
use sysinfo::{Pid, ProcessRefreshKind, ProcessesToUpdate};

if self.coordinator_connection.is_none() {
return Ok(());
}

// Reuse system instance for metrics collection
let system = &mut self.metrics_system;

// Metrics are collected every 2 seconds (metrics_interval)
const METRICS_INTERVAL_SECS: f64 = 2.0;

// Collect metrics for all running dataflows
for (dataflow_id, dataflow) in &self.running {
let mut metrics = BTreeMap::new();

// Collect all PIDs for this dataflow
let pids: Vec<Pid> = dataflow
.running_nodes
.values()
.filter_map(|node| {
node.pid
.as_ref()
.map(|pid| Pid::from_u32(pid.load(atomic::Ordering::Acquire)))
})
.collect();

if !pids.is_empty() {
// Refresh process metrics (cpu, memory, disk)
let refresh_kind = ProcessRefreshKind::nothing()
.with_cpu()
.with_memory()
.with_disk_usage();
system.refresh_processes_specifics(
ProcessesToUpdate::Some(&pids),
true,
refresh_kind,
);

// Collect metrics for each node
for (node_id, running_node) in &dataflow.running_nodes {
if let Some(pid) = running_node.pid.as_ref() {
let pid = pid.load(atomic::Ordering::Acquire);
let sys_pid = Pid::from_u32(pid);
if let Some(process) = system.process(sys_pid) {
let disk_usage = process.disk_usage();
// Divide by metrics_interval to get per-second averages
metrics.insert(
node_id.clone(),
NodeMetrics {
pid,
cpu_usage: process.cpu_usage(),
memory_bytes: process.memory(),
disk_read_bytes: Some(
(disk_usage.read_bytes as f64 / METRICS_INTERVAL_SECS)
as u64,
),
disk_write_bytes: Some(
(disk_usage.written_bytes as f64 / METRICS_INTERVAL_SECS)
as u64,
),
},
);
}
}
}
}

// Send metrics to coordinator if we have any
if !metrics.is_empty() {
if let Some(connection) = &mut self.coordinator_connection {
let msg = serde_json::to_vec(&Timestamped {
inner: CoordinatorRequest::Event {
daemon_id: self.daemon_id.clone(),
event: DaemonEvent::NodeMetrics {
dataflow_id: *dataflow_id,
metrics,
},
},
timestamp: self.clock.new_timestamp(),
})?;
socket_stream_send(connection, &msg)
.await
.wrap_err("failed to send metrics to coordinator")?;
}
}
}

Ok(())
}

async fn handle_inter_daemon_event(&mut self, event: InterDaemonEvent) -> eyre::Result<()> {
match event {
InterDaemonEvent::Output {
@@ -2418,6 +2533,7 @@ fn close_input(
pub struct RunningNode {
process: Option<ProcessHandle>,
node_config: NodeConfig,
pid: Option<Arc<AtomicU32>>,
restart_policy: RestartPolicy,
/// Don't restart the node even if the restart policy says so.
///
@@ -2787,6 +2903,7 @@ pub enum Event {
Dora(DoraEvent),
DynamicNode(DynamicNodeEventWrapper),
HeartbeatInterval,
MetricsInterval,
CtrlC,
SecondCtrlC,
DaemonError(eyre::Report),
@@ -2826,6 +2943,7 @@ impl Event {
Event::Dora(_) => "Dora",
Event::DynamicNode(_) => "DynamicNode",
Event::HeartbeatInterval => "HeartbeatInterval",
Event::MetricsInterval => "MetricsInterval",
Event::CtrlC => "CtrlC",
Event::SecondCtrlC => "SecondCtrlC",
Event::DaemonError(_) => "DaemonError",


+ 17
- 7
binaries/daemon/src/spawn/prepared.rs View File

@@ -29,7 +29,7 @@ use std::{
path::{Path, PathBuf},
sync::{
Arc,
atomic::{self, AtomicBool},
atomic::{self, AtomicBool, AtomicU32},
},
};
use tokio::{
@@ -69,17 +69,25 @@ impl PreparedNode {
.await?;

let disable_restart = Arc::new(AtomicBool::new(false));
let pid = Arc::new(AtomicU32::new(0));
let running_node = RunningNode {
process: match kind {
process: match &kind {
NodeKind::Dynamic => None,
NodeKind::Spawned => Some(crate::ProcessHandle::new(op_tx)),
NodeKind::Spawned { .. } => Some(crate::ProcessHandle::new(op_tx)),
},
node_config: self.node_config.clone(),
restart_policy: self.restart_policy(),
disable_restart: disable_restart.clone(),
pid: match kind {
NodeKind::Dynamic => None,
NodeKind::Spawned { pid: new_pid } => {
pid.store(new_pid, atomic::Ordering::Release);
Some(pid.clone())
}
},
};

tokio::spawn(self.restart_loop(logger, finished_rx, disable_restart));
tokio::spawn(self.restart_loop(logger, finished_rx, disable_restart, pid));

Ok(running_node)
}
@@ -96,6 +104,7 @@ impl PreparedNode {
mut logger: NodeLogger<'static>,
mut finished_rx: oneshot::Receiver<NodeProcessFinished>,
disable_restart: Arc<AtomicBool>,
pid: Arc<AtomicU32>,
) {
loop {
let Ok(NodeProcessFinished { exit_status, op_rx }) = finished_rx.await else {
@@ -151,8 +160,9 @@ impl PreparedNode {
.spawn_inner(&mut logger, op_rx, finished_tx)
.await;
match result {
Ok(NodeKind::Spawned) => {
Ok(NodeKind::Spawned { pid: new_pid }) => {
finished_rx = finished_rx_new;
pid.store(new_pid, atomic::Ordering::Release);
}
Ok(NodeKind::Dynamic) => {
logger
@@ -512,14 +522,14 @@ impl PreparedNode {
)
});
});
Ok(NodeKind::Spawned)
Ok(NodeKind::Spawned { pid })
}
}

#[must_use]
enum NodeKind {
Dynamic,
Spawned,
Spawned { pid: u32 },
}

struct NodeProcessFinished {


+ 14
- 0
examples/python-concurrent-rw/dataflow.yml View File

@@ -0,0 +1,14 @@
nodes:
- id: node1
path: ./receive_data.py
inputs:
data: node2/data
outputs:
- data

- id: node2
path: ./receive_data.py
inputs:
data: node1/data
outputs:
- data

+ 11
- 0
examples/python-concurrent-rw/pyproject.toml View File

@@ -0,0 +1,11 @@
[project]
name = "dora-test"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.12"
dependencies = [
"asyncio>=4.0.0",
"dora-rs-cli>=0.3.13",
"numpy>=2.3.5",
]

+ 47
- 0
examples/python-concurrent-rw/receive_data.py View File

@@ -0,0 +1,47 @@
from dora import Node

import logging
import threading
import time

import numpy as np
import pyarrow as pa


def read_data_task(node, log):
"""Task that reads incoming events."""
while (event := node.next()) is not None:
if event["type"] == "INPUT":
print(f"info {event['value'].to_numpy()}")
del event
log.log(logging.INFO, "read_data_task done!")


def publish_task(node, log):
"""Task that publishes to a topic."""
while True:
time.sleep(1) # Publish every 1s
now = time.perf_counter_ns()
node.send_output("data", pa.array([np.uint64(now)]))


def main():
node = Node()
log = logging.getLogger(__name__)
# Create thread for read task
read_thread = threading.Thread(target=read_data_task, args=(node, log))
read_thread.start()
# Run publish task in a daemon thread (so it doesn't block main thread)
publish_thread = threading.Thread(target=publish_task, args=(node, log), daemon=True)
publish_thread.start()
# Wait for read thread to complete
read_thread.join()
log.log(logging.INFO, "done!")


if __name__ == "__main__":
main()

+ 4
- 3
examples/python-drain/receive_data.py View File

@@ -9,9 +9,10 @@ def main():
drained_data = node.drain()
print("drained: ", drained_data)
for _ in range(100):
_event = node.next()
# print(event)
# del event
try:
_event = node.try_recv()
except Exception as e:
print("Error receiving event:", e)
print("done!")




+ 1
- 1
examples/python-operator-dataflow/README.md View File

@@ -21,7 +21,7 @@ cargo run --example python-operator-dataflow

```bash
conda create -n example_env python=3.11
conda activate test_env
conda activate example_env
pip install -r requirements.txt
pip install -r requirements_llm.txt
```


+ 0
- 15
examples/python-ros2-dataflow/run.rs View File

@@ -1,15 +0,0 @@
use dora_cli::{build, run as dora_run};
use eyre::WrapErr;
use std::path::Path;

fn main() -> eyre::Result<()> {
let root = Path::new(env!("CARGO_MANIFEST_DIR"));
std::env::set_current_dir(root.join(file!()).parent().unwrap())
.wrap_err("failed to set working dir")?;

build("dataflow.yml".to_string(), None, None, true, true)?;

dora_run("dataflow.yml".to_string(), true)?;

Ok(())
}

examples/c++-ros2-dataflow/.gitignore → examples/ros2-bridge/c++/turtle/.gitignore View File


examples/c++-ros2-dataflow/README.md → examples/ros2-bridge/c++/turtle/README.md View File


examples/c++-ros2-dataflow/dataflow.yml → examples/ros2-bridge/c++/turtle/dataflow.yml View File


examples/c++-ros2-dataflow/node-rust-api/main.cc → examples/ros2-bridge/c++/turtle/node-rust-api/main.cc View File

@@ -72,6 +72,11 @@ int main()
example_interfaces::AddTwoInts_Request request = {.a = 4, .b = 5};
add_two_ints->send_request(request);
}
else if (ty == DoraEventType::Stop)
{
std::cout << "Received stop event" << std::endl;
break;
}
else
{
std::cerr << "Unknown event type " << static_cast<int>(ty) << std::endl;

examples/c++-ros2-dataflow/run.rs → examples/ros2-bridge/c++/turtle/run.rs View File

@@ -1,6 +1,10 @@
use eyre::{Context, bail};
use std::{env::consts::EXE_SUFFIX, path::Path};

use process_wrap::std::{
ProcessGroup, StdChildWrapper as ChildWrapper, StdCommandWrap as CommandWrap,
};

fn main() -> eyre::Result<()> {
if cfg!(windows) {
tracing::error!(
@@ -9,7 +13,7 @@ fn main() -> eyre::Result<()> {
return Ok(());
}

let root = Path::new(env!("CARGO_MANIFEST_DIR"));
let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../../");
let target = root.join("target");
std::env::set_current_dir(root.join(file!()).parent().unwrap())
.wrap_err("failed to set working dir")?;
@@ -17,14 +21,14 @@ fn main() -> eyre::Result<()> {
std::fs::create_dir_all("build")?;
let build_dir = Path::new("build");

build_package("dora-node-api-cxx", &["ros2-bridge"]);
build_package("dora-node-api-cxx", &["ros2-bridge"])?;
let node_cxxbridge = target
.join("cxxbridge")
.join("dora-node-api-cxx")
.join("install");

build_cxx_node(
root,
&root,
&[
&dunce::canonicalize(Path::new("node-rust-api").join("main.cc"))?,
&dunce::canonicalize(node_cxxbridge.join("dora-node-api.cc"))?,
@@ -44,13 +48,36 @@ fn main() -> eyre::Result<()> {
"-l",
"dora_node_api_cxx",
],
);
)?;

let dataflow_task = std::thread::spawn(|| {
dora_cli::run("dataflow.yml".to_string(), false).unwrap();
});

let mut add_service_task = run_ros_node("examples_rclcpp_minimal_service", "service_main")?;
let mut turtle_task = run_ros_node("turtlesim", "turtlesim_node")?;

dora_cli::run("dataflow.yml".to_string(), false)?;
dataflow_task
.join()
.map_err(|_| eyre::eyre!("Failed to run dataflow"))?;

add_service_task.kill()?;
turtle_task.kill()?;

Ok(())
}

fn run_ros_node(package: &str, node: &str) -> eyre::Result<Box<dyn ChildWrapper>> {
let mut command = CommandWrap::with_new("ros2", |cmd| {
cmd.arg("run");
cmd.arg(package).arg(node);
});
command.wrap(ProcessGroup::leader());
command
.spawn()
.map_err(|e| eyre::eyre!("failed to spawn ros node: {}", e))
}

fn build_package(package: &str, features: &[&str]) -> eyre::Result<()> {
let cargo = std::env::var("CARGO").unwrap();
let mut cmd = std::process::Command::new(&cargo);

examples/python-ros2-dataflow/README.md → examples/ros2-bridge/python/turtle/README.md View File

@@ -3,16 +3,14 @@
To get started:

```bash
source /opt/ros/humble/setup.bash && ros2 run turtlesim turtlesim_node &
source /opt/ros/humble/setup.bash && ros2 run examples_rclcpp_minimal_service service_main &
source /opt/ros/humble/setup.bash && ros2 run turtlesim turtlesim_node
cargo run --example python-ros2-dataflow --features="ros2-examples"
```

- alternatively:

```bash
source /opt/ros/humble/setup.bash && ros2 run turtlesim turtlesim_node &
source /opt/ros/humble/setup.bash && ros2 run examples_rclcpp_minimal_service service_main &
source /opt/ros/humble/setup.bash && ros2 run turtlesim turtlesim_node

# cd examples/python-ros2-dataflow
uv pip install -e ../.../apis/python/node --reinstall

examples/python-ros2-dataflow/control_node.py → examples/ros2-bridge/python/turtle/control_node.py View File


examples/python-ros2-dataflow/dataflow.yml → examples/ros2-bridge/python/turtle/dataflow.yml View File


examples/python-ros2-dataflow/random_turtle.py → examples/ros2-bridge/python/turtle/random_turtle.py View File


+ 41
- 0
examples/ros2-bridge/python/turtle/run.rs View File

@@ -0,0 +1,41 @@
use dora_cli::{build, run as dora_run};
use eyre::WrapErr;
use std::path::Path;

use process_wrap::std::{
ProcessGroup, StdChildWrapper as ChildWrapper, StdCommandWrap as CommandWrap,
};

fn main() -> eyre::Result<()> {
let root = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../../");
std::env::set_current_dir(root.join(file!()).parent().unwrap())
.wrap_err("failed to set working dir")?;

build("dataflow.yml".to_string(), None, None, true, true)?;

let dataflow_task = std::thread::spawn(|| {
dora_run("dataflow.yml".to_string(), true).unwrap();
});

// let mut add_service_task = run_ros_node("examples_rclcpp_minimal_service", "service_main")?;
let mut turtle_task = run_ros_node("turtlesim", "turtlesim_node")?;

dataflow_task
.join()
.map_err(|_| eyre::eyre!("Failed to run dataflow"))?;

// add_service_task.kill()?;
turtle_task.kill()?;
Ok(())
}

fn run_ros_node(package: &str, node: &str) -> eyre::Result<Box<dyn ChildWrapper>> {
let mut command = CommandWrap::with_new("ros2", |cmd| {
cmd.arg("run");
cmd.arg(package).arg(node);
});
command.wrap(ProcessGroup::leader());
command
.spawn()
.map_err(|e| eyre::eyre!("failed to spawn ros node: {}", e))
}

examples/rust-ros2-dataflow/node/Cargo.toml → examples/ros2-bridge/rust/rust-ros2-example-node/Cargo.toml View File

@@ -1,5 +1,5 @@
[package]
name = "rust-ros2-dataflow-example-node"
name = "rust-ros2-example-node"
version.workspace = true
edition.workspace = true
rust-version.workspace = true
@@ -12,7 +12,28 @@ publish = false
ros2 = []

[[bin]]
name = "rust-ros2-dataflow-example-node"
name = "rust-ros2-example-node-turtle"
path = "src/turtle.rs"
required-features = ["ros2"]

[[bin]]
name = "rust-ros2-example-node-topic-sub"
path = "src/topic_sub.rs"
required-features = ["ros2"]

[[bin]]
name = "rust-ros2-example-node-topic-pub"
path = "src/topic_pub.rs"
required-features = ["ros2"]

[[bin]]
name = "rust-ros2-example-node-service-client"
path = "src/service_client.rs"
required-features = ["ros2"]

[[bin]]
name = "rust-ros2-example-node-service-server"
path = "src/service_server.rs"
required-features = ["ros2"]

[dependencies]

+ 125
- 0
examples/ros2-bridge/rust/rust-ros2-example-node/src/service_client.rs View File

@@ -0,0 +1,125 @@
use std::time::Duration;

use dora_node_api::{self, DoraNode, Event};
use dora_ros2_bridge::{
messages::example_interfaces::service::{AddTwoInts, AddTwoIntsRequest},
ros2_client::{self, NodeOptions},
rustdds::{self, policy},
};
use eyre::{Context, eyre};
use futures::task::SpawnExt;

fn main() -> eyre::Result<()> {
let mut ros_node = init_ros_node()?;

// spawn a background spinner task that is handles service discovery (and other things)
let pool = futures::executor::ThreadPool::new()?;
let spinner = ros_node
.spinner()
.map_err(|e| eyre::eyre!("failed to create spinner: {e:?}"))?;
pool.spawn(async {
if let Err(err) = spinner.spin().await {
eprintln!("ros2 spinner failed: {err:?}");
}
})
.context("failed to spawn ros2 spinner")?;

let client = create_service_client(&mut ros_node)?; // should be after the spiner started
let (_node, mut dora_events) = DoraNode::init_from_env()?;

for i in 0..20 {
let event = match dora_events.recv() {
Some(input) => input,
None => break,
};

match event {
Event::Input {
id,
metadata: _,
data: _,
} => match id.as_str() {
"tick" => {
let a = rand::random();
let b = rand::random();
let request = AddTwoIntsRequest { a, b };

println!("tick {i}, sending {request:?}");
let req_id = client.send_request(request)?;

futures::executor::block_on(async {
let response = client
.async_receive_response(req_id)
.await
.expect("failed to receive response");
println!("tick {i}, received {response:?}");
});
}
other => eprintln!("Ignoring unexpected input `{other}`"),
},
Event::Stop(_) => {
println!("Received stop");
break;
}
other => eprintln!("Received unexpected input: {other:?}"),
}
}

Ok(())
}

fn init_ros_node() -> eyre::Result<ros2_client::Node> {
let ros_context = ros2_client::Context::new().unwrap();

ros_context
.new_node(
ros2_client::NodeName::new("/", "ros2_dora_service_client")
.map_err(|e| eyre!("failed to create ROS2 node name: {e}"))?,
NodeOptions::new().enable_rosout(true),
)
.map_err(|e| eyre::eyre!("failed to create ros2 node: {e:?}"))
}

fn create_service_client(
ros_node: &mut ros2_client::Node,
) -> eyre::Result<ros2_client::Client<AddTwoInts>> {
// create an example service client
let service_qos = {
rustdds::QosPolicyBuilder::new()
.reliability(policy::Reliability::Reliable {
max_blocking_time: rustdds::Duration::from_millis(100),
})
.history(policy::History::KeepLast { depth: 1 })
.build()
};
let add_client = ros_node
.create_client::<AddTwoInts>(
ros2_client::ServiceMapping::Enhanced,
&ros2_client::Name::new("/", "add_two_ints").unwrap(),
&ros2_client::ServiceTypeName::new("example_interfaces", "AddTwoInts"),
service_qos.clone(),
service_qos.clone(),
)
.map_err(|e| eyre::eyre!("failed to create service client: {e:?}"))?;

println!("wait for add_two_ints service");
let service_ready = async {
for _ in 0..10 {
let ready = add_client.wait_for_service(&ros_node);
futures::pin_mut!(ready);
let timeout = futures_timer::Delay::new(Duration::from_secs(2));
match futures::future::select(ready, timeout).await {
futures::future::Either::Left(((), _)) => {
println!("add_two_ints service is ready");
return Ok(());
}
futures::future::Either::Right(_) => {
println!("timeout while waiting for add_two_ints service, retrying");
}
}
}
eyre::bail!("add_two_ints service not available");
};
futures::executor::block_on(service_ready)?;
Ok(add_client)
}

+ 106
- 0
examples/ros2-bridge/rust/rust-ros2-example-node/src/service_server.rs View File

@@ -0,0 +1,106 @@
use dora_node_api::{
self, DoraNode, Event,
merged::{MergeExternal, MergedEvent},
};
use dora_ros2_bridge::{
messages::example_interfaces::service::{AddTwoInts, AddTwoIntsResponse},
ros2_client::{self, NodeOptions},
rustdds::{self, policy},
};
use eyre::{Context, eyre};
use futures::task::SpawnExt;

fn main() -> eyre::Result<()> {
let mut ros_node = init_ros_node()?;

// spawn a background spinner task that is handles service discovery (and other things)
let pool = futures::executor::ThreadPool::new()?;
let spinner = ros_node
.spinner()
.map_err(|e| eyre::eyre!("failed to create spinner: {e:?}"))?;
pool.spawn(async {
if let Err(err) = spinner.spin().await {
eprintln!("ros2 spinner failed: {err:?}");
}
})
.context("failed to spawn ros2 spinner")?;

let server = create_service_server(&mut ros_node)?; // should be after the spiner started
let (_node, dora_events) = DoraNode::init_from_env()?;
let merged_events = dora_events.merge_external(server.receive_request_stream());

let mut events = futures::executor::block_on_stream(merged_events);

let mut counter = 0;
loop {
let event = match events.next() {
Some(input) => input,
None => break,
};

match event {
MergedEvent::Dora(Event::Input {
id,
metadata: _,
data: _,
}) => match id.as_str() {
"tick" => {}
other => eprintln!("Ignoring unexpected input `{other}`"),
},
MergedEvent::Dora(Event::Stop(_)) => {
println!("Received stop");
break;
}
MergedEvent::External(Ok((req_id, req))) => {
println!("request: {} + {}", req.a, req.b);
let resp = AddTwoIntsResponse { sum: req.a + req.b };
futures::executor::block_on(server.async_send_response(req_id, resp))?;
counter += 1;
if counter > 3 {
// only 3 times, because the service discovery is slow.
break;
}
}
other => eprintln!("Received unexpected input: {other:?}"),
}
}

Ok(())
}

fn init_ros_node() -> eyre::Result<ros2_client::Node> {
let ros_context = ros2_client::Context::new().unwrap();

ros_context
.new_node(
ros2_client::NodeName::new("/", "ros2_dora_service_client")
.map_err(|e| eyre!("failed to create ROS2 node name: {e}"))?,
NodeOptions::new().enable_rosout(true),
)
.map_err(|e| eyre::eyre!("failed to create ros2 node: {e:?}"))
}

fn create_service_server(
ros_node: &mut ros2_client::Node,
) -> eyre::Result<ros2_client::Server<AddTwoInts>> {
// create an example service client
let service_qos = {
rustdds::QosPolicyBuilder::new()
.reliability(policy::Reliability::Reliable {
max_blocking_time: rustdds::Duration::from_millis(100),
})
.history(policy::History::KeepLast { depth: 1 })
.build()
};
let add_server = ros_node
.create_server::<AddTwoInts>(
ros2_client::ServiceMapping::Enhanced,
&ros2_client::Name::new("/", "add_two_ints").unwrap(),
&ros2_client::ServiceTypeName::new("example_interfaces", "AddTwoInts"),
service_qos.clone(),
service_qos.clone(),
)
.map_err(|e| eyre::eyre!("failed to create service client: {e:?}"))?;

Ok(add_server)
}

+ 104
- 0
examples/ros2-bridge/rust/rust-ros2-example-node/src/topic_pub.rs View File

@@ -0,0 +1,104 @@
use dora_node_api::{self, DoraNode, Event};
use dora_ros2_bridge::{
messages::std_msgs::msg::String as Ros2String,
ros2_client::{self, NodeOptions, ros2},
rustdds::{self, policy},
};
use eyre::{Context, eyre};
use futures::task::SpawnExt;

fn main() -> eyre::Result<()> {
let mut ros_node = init_ros_node()?;
let publisher = create_publisher(&mut ros_node)?;

// spawn a background spinner task that is handles service discovery (and other things)
let pool = futures::executor::ThreadPool::new()?;
let spinner = ros_node
.spinner()
.map_err(|e| eyre::eyre!("failed to create spinner: {e:?}"))?;
pool.spawn(async {
if let Err(err) = spinner.spin().await {
eprintln!("ros2 spinner failed: {err:?}");
}
})
.context("failed to spawn ros2 spinner")?;

let (_node, dora_events) = DoraNode::init_from_env()?;

let mut events = futures::executor::block_on_stream(dora_events);

for i in 0..20 {
let event = match events.next() {
Some(input) => input,
None => break,
};

match event {
Event::Input {
id,
metadata: _,
data: _,
} => match id.as_str() {
"tick" => {
let msg = Ros2String {
data: format!("The {i} hello from Dora"),
};
println!("tick {i}, sending {msg:?}");
publisher.publish(msg).unwrap();
}
other => eprintln!("Ignoring unexpected input `{other}`"),
},
Event::Stop(_) => {
println!("Received stop");
break;
}
other => eprintln!("Received unexpected input: {other:?}"),
}
}

Ok(())
}

fn init_ros_node() -> eyre::Result<ros2_client::Node> {
let ros_context = ros2_client::Context::new().unwrap();

ros_context
.new_node(
ros2_client::NodeName::new("/", "ros2_dora_pub")
.map_err(|e| eyre!("failed to create ROS2 node name: {e}"))?,
NodeOptions::new().enable_rosout(true),
)
.map_err(|e| eyre::eyre!("failed to create ros2 node: {e:?}"))
}

fn create_publisher(
ros_node: &mut ros2_client::Node,
) -> eyre::Result<ros2_client::Publisher<Ros2String>> {
let topic_qos: rustdds::QosPolicies = {
rustdds::QosPolicyBuilder::new()
.durability(policy::Durability::Volatile)
.liveliness(policy::Liveliness::Automatic {
lease_duration: ros2::Duration::INFINITE,
})
.reliability(policy::Reliability::Reliable {
max_blocking_time: ros2::Duration::from_millis(100),
})
.history(policy::History::KeepLast { depth: 1 })
.build()
};

let topic = ros_node
.create_topic(
&ros2_client::Name::new("/", "topic")
.map_err(|e| eyre!("failed to create ROS2 name: {e}"))?,
ros2_client::MessageTypeName::new("std_msgs", "String"),
&topic_qos,
)
.context("failed to create topic")?;

// The point here is to publish Twist for the turtle
let publisher = ros_node
.create_publisher::<Ros2String>(&topic, None)
.context("failed to create publisher")?;
Ok(publisher)
}

+ 91
- 0
examples/ros2-bridge/rust/rust-ros2-example-node/src/topic_sub.rs View File

@@ -0,0 +1,91 @@
use dora_node_api::{
self, DoraNode, Event,
merged::{MergeExternal, MergedEvent},
};
use dora_ros2_bridge::{
messages::std_msgs::msg::String as Ros2String,
ros2_client::{self, NodeOptions},
};
use eyre::{Context, eyre};
use futures::task::SpawnExt;

fn main() -> eyre::Result<()> {
let mut ros_node = init_ros_node()?;
let subscriber = create_subscriber(&mut ros_node)?;

// spawn a background spinner task that is handles service discovery (and other things)
let pool = futures::executor::ThreadPool::new()?;
let spinner = ros_node
.spinner()
.map_err(|e| eyre::eyre!("failed to create spinner: {e:?}"))?;
pool.spawn(async {
if let Err(err) = spinner.spin().await {
eprintln!("ros2 spinner failed: {err:?}");
}
})
.context("failed to spawn ros2 spinner")?;

let (_node, dora_events) = DoraNode::init_from_env()?;

let merged_events = dora_events.merge_external(Box::pin(subscriber.async_stream()));
let mut events = futures::executor::block_on_stream(merged_events);

let mut count = 0usize;
while let Some(event) = events.next() {
match event {
MergedEvent::Dora(event) => match event {
Event::Input {
id,
metadata: _,
data: _,
} => match id.as_str() {
"tick" => {}
other => eprintln!("Ignoring unexpected input `{other}`"),
},
Event::Stop(_) => {
println!("Received stop");
break;
}
other => eprintln!("Received unexpected input: {other:?}"),
},
MergedEvent::External(recv) => {
println!("received external event: {:?}", recv);
count += 1;
if count > 20 {
break;
}
}
}
}

Ok(())
}

fn init_ros_node() -> eyre::Result<ros2_client::Node> {
let ros_context = ros2_client::Context::new().unwrap();

ros_context
.new_node(
ros2_client::NodeName::new("/", "ros2_dora_sub")
.map_err(|e| eyre!("failed to create ROS2 node name: {e}"))?,
NodeOptions::new().enable_rosout(true),
)
.map_err(|e| eyre::eyre!("failed to create ros2 node: {e:?}"))
}

fn create_subscriber(
ros_node: &mut ros2_client::Node,
) -> eyre::Result<ros2_client::Subscription<Ros2String>> {
let topic = ros_node
.create_topic(
&ros2_client::Name::new("/", "topic")
.map_err(|e| eyre!("failed to create ROS2 name: {e}"))?,
ros2_client::MessageTypeName::new("std_msgs", "String"),
&Default::default(),
)
.context("failed to create topic")?;
let subscriber = ros_node
.create_subscription::<Ros2String>(&topic, None)
.context("failed to create subscription")?;
Ok(subscriber)
}

examples/rust-ros2-dataflow/node/src/main.rs → examples/ros2-bridge/rust/rust-ros2-example-node/src/turtle.rs View File

@@ -79,7 +79,7 @@ fn main() -> eyre::Result<()> {
let merged = dora_events.merge_external(Box::pin(turtle_pose_reader.async_stream()));
let mut events = futures::executor::block_on_stream(merged);

for i in 0..1000 {
for i in 0..600 {
let event = match events.next() {
Some(input) => input,
None => break,
@@ -118,7 +118,10 @@ fn main() -> eyre::Result<()> {
}
other => eprintln!("Ignoring unexpected input `{other}`"),
},
Event::Stop(_) => println!("Received stop"),
Event::Stop(_) => {
println!("Received stop");
break;
}
other => eprintln!("Received unexpected input: {other:?}"),
},
MergedEvent::External(pose) => {

+ 7
- 0
examples/ros2-bridge/rust/service-client/dataflow.yml View File

@@ -0,0 +1,7 @@
nodes:
- id: rust-node
build: cargo build -p rust-ros2-example-node --bin rust-ros2-example-node-service-client --features ros2
path: ../../../../target/debug/rust-ros2-example-node-service-client
inputs:
tick: dora/timer/millis/500
outputs:

+ 39
- 0
examples/ros2-bridge/rust/service-client/run.rs View File

@@ -0,0 +1,39 @@
use dora_cli::{build, run};
use eyre::Context;
use std::path::Path;

use process_wrap::std::{
ProcessGroup, StdChildWrapper as ChildWrapper, StdCommandWrap as CommandWrap,
};

fn main() -> eyre::Result<()> {
let root = Path::new(env!("CARGO_MANIFEST_DIR"));
std::env::set_current_dir(root.join("../../../").join(file!()).parent().unwrap())
.wrap_err("failed to set working dir")?;

build("dataflow.yml".to_string(), None, None, false, true)?;

let dataflow_task = std::thread::spawn(|| {
run("dataflow.yml".to_string(), false).unwrap();
});

let mut service_task = run_ros_node("examples_rclcpp_minimal_service", "service_main")?;

dataflow_task
.join()
.map_err(|_| eyre::eyre!("Failed to run dataflow"))?;

service_task.kill()?;
Ok(())
}

fn run_ros_node(package: &str, node: &str) -> eyre::Result<Box<dyn ChildWrapper>> {
let mut command = CommandWrap::with_new("ros2", |cmd| {
cmd.arg("run");
cmd.arg(package).arg(node);
});
command.wrap(ProcessGroup::leader());
command
.spawn()
.map_err(|e| eyre::eyre!("failed to spawn ros node: {}", e))
}

+ 7
- 0
examples/ros2-bridge/rust/service-server/dataflow.yml View File

@@ -0,0 +1,7 @@
nodes:
- id: rust-node
build: cargo build -p rust-ros2-example-node --bin rust-ros2-example-node-service-server --features ros2
path: ../../../../target/debug/rust-ros2-example-node-service-server
inputs:
tick: dora/timer/millis/500
outputs:

+ 63
- 0
examples/ros2-bridge/rust/service-server/run.rs View File

@@ -0,0 +1,63 @@
use dora_cli::{build, run};
use eyre::Context;
use std::pin::Pin;
use std::{path::Path, sync::mpsc};
use tokio;

use process_wrap::tokio::{
ProcessGroup, TokioChildWrapper as ChildWrapper, TokioCommandWrap as CommandWrap,
};

fn main() -> eyre::Result<()> {
let root = Path::new(env!("CARGO_MANIFEST_DIR"));
std::env::set_current_dir(root.join("../../../").join(file!()).parent().unwrap())
.wrap_err("failed to set working dir")?;

build("dataflow.yml".to_string(), None, None, false, true)?;

let (finish_tx, finish_rx) = mpsc::channel();
let dataflow_task = std::thread::spawn(move || {
run("dataflow.yml".to_string(), false).unwrap();
finish_tx.send(());
});

let rt = tokio::runtime::Runtime::new()?;
let (finish_async_tx, mut finish_async_rx) = tokio::sync::mpsc::channel(1);
// the task for converting std::sync::mpsc to tokio::sync::mpsc
rt.spawn(async move {
finish_rx.recv();
finish_async_tx.send(()).await;
});

// Each client_main only sends one request, and the server node will terminate after 3 times requirement
rt.block_on(async move {
let mut client_task = run_ros_node("examples_rclcpp_minimal_client", "client_main")?;
loop {
tokio::select! {
_ = finish_async_rx.recv() => {
break;
},
ret = Box::into_pin(client_task.wait()) => {
client_task = run_ros_node("examples_rclcpp_minimal_client", "client_main")?;
}
}
}
Box::into_pin(client_task.kill()).await?;
Ok::<(), eyre::Report>(())
})?;

// The dataflow_task does no need to be join() here, because the finish signal have been sent

Ok(())
}

fn run_ros_node(package: &str, node: &str) -> eyre::Result<Box<dyn ChildWrapper>> {
let mut command = CommandWrap::with_new("ros2", |cmd| {
cmd.arg("run");
cmd.arg(package).arg(node);
});
command.wrap(ProcessGroup::leader());
command
.spawn()
.map_err(|e| eyre::eyre!("failed to spawn ros node: {}", e))
}

+ 7
- 0
examples/ros2-bridge/rust/topic-pub/dataflow.yml View File

@@ -0,0 +1,7 @@
nodes:
- id: rust-node
build: cargo build -p rust-ros2-example-node --bin rust-ros2-example-node-topic-pub --features ros2
path: ../../../../target/debug/rust-ros2-example-node-topic-pub
inputs:
tick: dora/timer/millis/500
outputs:

+ 39
- 0
examples/ros2-bridge/rust/topic-pub/run.rs View File

@@ -0,0 +1,39 @@
use dora_cli::{build, run};
use eyre::Context;
use std::path::Path;

use process_wrap::std::{
ProcessGroup, StdChildWrapper as ChildWrapper, StdCommandWrap as CommandWrap,
};

fn main() -> eyre::Result<()> {
let root = Path::new(env!("CARGO_MANIFEST_DIR"));
std::env::set_current_dir(root.join("../../../").join(file!()).parent().unwrap())
.wrap_err("failed to set working dir")?;

build("dataflow.yml".to_string(), None, None, false, true)?;

let dataflow_task = std::thread::spawn(|| {
run("dataflow.yml".to_string(), false).unwrap();
});

let mut sub_task = run_ros_node("examples_rclcpp_minimal_subscriber", "subscriber_lambda")?;

dataflow_task
.join()
.map_err(|_| eyre::eyre!("Failed to run dataflow"))?;

sub_task.kill()?;
Ok(())
}

fn run_ros_node(package: &str, node: &str) -> eyre::Result<Box<dyn ChildWrapper>> {
let mut command = CommandWrap::with_new("ros2", |cmd| {
cmd.arg("run");
cmd.arg(package).arg(node);
});
command.wrap(ProcessGroup::leader());
command
.spawn()
.map_err(|e| eyre::eyre!("failed to spawn ros node: {}", e))
}

+ 7
- 0
examples/ros2-bridge/rust/topic-sub/dataflow.yml View File

@@ -0,0 +1,7 @@
nodes:
- id: rust-node
build: cargo build -p rust-ros2-example-node --bin rust-ros2-example-node-topic-sub --features ros2
path: ../../../../target/debug/rust-ros2-example-node-topic-sub
inputs:
tick: dora/timer/millis/500
outputs:

+ 39
- 0
examples/ros2-bridge/rust/topic-sub/run.rs View File

@@ -0,0 +1,39 @@
use dora_cli::{build, run};
use eyre::Context;
use std::path::Path;

use process_wrap::std::{
ProcessGroup, StdChildWrapper as ChildWrapper, StdCommandWrap as CommandWrap,
};

fn main() -> eyre::Result<()> {
let root = Path::new(env!("CARGO_MANIFEST_DIR"));
std::env::set_current_dir(root.join("../../../").join(file!()).parent().unwrap())
.wrap_err("failed to set working dir")?;

build("dataflow.yml".to_string(), None, None, false, true)?;

let dataflow_task = std::thread::spawn(|| {
run("dataflow.yml".to_string(), false).unwrap();
});

let mut pub_task = run_ros_node("examples_rclcpp_minimal_publisher", "publisher_lambda")?;

dataflow_task
.join()
.map_err(|_| eyre::eyre!("Failed to run dataflow"))?;

pub_task.kill()?;
Ok(())
}

fn run_ros_node(package: &str, node: &str) -> eyre::Result<Box<dyn ChildWrapper>> {
let mut command = CommandWrap::with_new("ros2", |cmd| {
cmd.arg("run");
cmd.arg(package).arg(node);
});
command.wrap(ProcessGroup::leader());
command
.spawn()
.map_err(|e| eyre::eyre!("failed to spawn ros node: {}", e))
}

examples/rust-ros2-dataflow/README.md → examples/ros2-bridge/rust/turtle/README.md View File


+ 9
- 0
examples/ros2-bridge/rust/turtle/dataflow.yml View File

@@ -0,0 +1,9 @@
nodes:
- id: rust-node
build: cargo build -p rust-ros2-example-node --bin rust-ros2-example-node-turtle --features ros2
path: ../../../../target/debug/rust-ros2-example-node-turtle
inputs:
tick: dora/timer/millis/500
service_timer: dora/timer/secs/1
outputs:
- pose

+ 41
- 0
examples/ros2-bridge/rust/turtle/run.rs View File

@@ -0,0 +1,41 @@
use dora_cli::{build, run};
use eyre::Context;
use std::path::Path;

use process_wrap::std::{
ProcessGroup, StdChildWrapper as ChildWrapper, StdCommandWrap as CommandWrap,
};

fn main() -> eyre::Result<()> {
let root = Path::new(env!("CARGO_MANIFEST_DIR"));
std::env::set_current_dir(root.join("../../../").join(file!()).parent().unwrap())
.wrap_err("failed to set working dir")?;

build("dataflow.yml".to_string(), None, None, false, true)?;

let dataflow_task = std::thread::spawn(|| {
run("dataflow.yml".to_string(), false).unwrap();
});

let mut add_service_task = run_ros_node("examples_rclcpp_minimal_service", "service_main")?;
let mut turtle_task = run_ros_node("turtlesim", "turtlesim_node")?;

dataflow_task
.join()
.map_err(|_| eyre::eyre!("Failed to run dataflow"))?;

add_service_task.kill()?;
turtle_task.kill()?;
Ok(())
}

fn run_ros_node(package: &str, node: &str) -> eyre::Result<Box<dyn ChildWrapper>> {
let mut command = CommandWrap::with_new("ros2", |cmd| {
cmd.arg("run");
cmd.arg(package).arg(node);
});
command.wrap(ProcessGroup::leader());
command
.spawn()
.map_err(|e| eyre::eyre!("failed to spawn ros node: {}", e))
}

+ 0
- 9
examples/rust-ros2-dataflow/dataflow.yml View File

@@ -1,9 +0,0 @@
nodes:
- id: rust-node
build: cargo build -p rust-ros2-dataflow-example-node --features ros2
path: ../../target/debug/rust-ros2-dataflow-example-node
inputs:
tick: dora/timer/millis/500
service_timer: dora/timer/secs/1
outputs:
- pose

+ 0
- 15
examples/rust-ros2-dataflow/run.rs View File

@@ -1,15 +0,0 @@
use dora_cli::{build, run};
use eyre::Context;
use std::path::Path;

fn main() -> eyre::Result<()> {
let root = Path::new(env!("CARGO_MANIFEST_DIR"));
std::env::set_current_dir(root.join(file!()).parent().unwrap())
.wrap_err("failed to set working dir")?;

build("dataflow.yml".to_string(), None, None, false, true)?;

run("dataflow.yml".to_string(), false)?;

Ok(())
}

+ 38
- 0
libraries/extensions/ros2-bridge/Cargo.toml View File

@@ -34,11 +34,49 @@ futures = { version = "0.3.21", features = ["thread-pool"] }
futures-timer = "3.0.3"

[dev-dependencies]
dora-cli = { workspace = true }
dunce = "1.0.2"
rand = "0.8.5"
futures = { version = "0.3.28", default-features = false }
process-wrap = { version = "8.2.1", features = ["std", "tokio1"] }

[build-dependencies]
dora-ros2-bridge-msg-gen = { workspace = true, optional = true }
rust-format = { version = "0.3.4", features = [
"pretty_please",
], optional = true }

[[example]]
name = "rust-ros2-dataflow"
path = "../../../examples/ros2-bridge/rust/turtle/run.rs"
required-features = ["ros2-examples"]

[[example]]
name = "rust-ros2-dataflow-topic-sub"
path = "../../../examples/ros2-bridge/rust/topic-sub/run.rs"
required-features = ["ros2-examples"]

[[example]]
name = "rust-ros2-dataflow-topic-pub"
path = "../../../examples/ros2-bridge/rust/topic-pub/run.rs"
required-features = ["ros2-examples"]

[[example]]
name = "rust-ros2-dataflow-service-client"
path = "../../../examples/ros2-bridge/rust/service-client/run.rs"
required-features = ["ros2-examples"]

[[example]]
name = "rust-ros2-dataflow-service-server"
path = "../../../examples/ros2-bridge/rust/service-server/run.rs"
required-features = ["ros2-examples"]

[[example]]
name = "python-ros2-dataflow"
path = "../../../examples/ros2-bridge/python/turtle/run.rs"
required-features = ["ros2-examples"]

[[example]]
name = "cxx-ros2-dataflow"
path = "../../../examples/ros2-bridge/c++/turtle/run.rs"
required-features = ["ros2-examples"]

+ 1
- 0
libraries/message/src/cli_to_coordinator.rs View File

@@ -90,4 +90,5 @@ pub enum ControlRequest {
level: log::LevelFilter,
},
CliAndDefaultDaemonOnSameMachine,
GetNodeInfo,
}

+ 5
- 24
libraries/message/src/common.rs View File

@@ -94,32 +94,13 @@ impl From<LogMessageHelper> for LogMessage {
}
}

impl<'de> Deserialize<'de> for LogLevelOrStdout {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;

match s.to_lowercase().as_str() {
"stdout" => Ok(LogLevelOrStdout::Stdout),
"trace" => Ok(LogLevelOrStdout::LogLevel(LogLevel::Trace)),
"debug" => Ok(LogLevelOrStdout::LogLevel(LogLevel::Debug)),
"info" => Ok(LogLevelOrStdout::LogLevel(LogLevel::Info)),
"warn" | "warning" => Ok(LogLevelOrStdout::LogLevel(LogLevel::Warn)),
"error" => Ok(LogLevelOrStdout::LogLevel(LogLevel::Error)),
_ => Err(serde::de::Error::custom(format!(
"invalid log level or stdout: '{}'. Expected one of: trace, debug, info, warn, error, stdout",
s
))),
}
}
}

#[derive(Debug, Clone, serde::Serialize, PartialEq, Eq, PartialOrd, Ord)]
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq, Eq, PartialOrd, Ord)]
#[serde(rename_all = "UPPERCASE")]
pub enum LogLevelOrStdout {
LogLevel(LogLevel),
#[serde(rename = "stdout")]
Stdout,
#[serde(untagged)]
LogLevel(LogLevel),
}

impl From<LogLevel> for LogLevelOrStdout {


+ 25
- 0
libraries/message/src/coordinator_to_cli.rs View File

@@ -46,6 +46,31 @@ pub enum ControlRequestReply {
default_daemon: Option<IpAddr>,
cli: Option<IpAddr>,
},
NodeInfoList(Vec<NodeInfo>),
}

#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
pub struct NodeInfo {
pub dataflow_id: Uuid,
pub dataflow_name: Option<String>,
pub node_id: NodeId,
pub daemon_id: DaemonId,
pub metrics: Option<NodeMetricsInfo>,
}

/// Resource metrics for a node (from daemon)
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
pub struct NodeMetricsInfo {
/// Process ID
pub pid: u32,
/// CPU usage percentage (0-100 per core)
pub cpu_usage: f32,
/// Memory usage in megabytes
pub memory_mb: f64,
/// Disk read MB/s (if available)
pub disk_read_mb_s: Option<f64>,
/// Disk write MB/s (if available)
pub disk_write_mb_s: Option<f64>,
}

#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]


+ 19
- 0
libraries/message/src/daemon_to_coordinator.rs View File

@@ -67,6 +67,25 @@ pub enum DaemonEvent {
Heartbeat,
Log(LogMessage),
Exit,
NodeMetrics {
dataflow_id: DataflowId,
metrics: BTreeMap<NodeId, NodeMetrics>,
},
}

/// Resource metrics for a node process
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct NodeMetrics {
/// Process ID
pub pid: u32,
/// CPU usage percentage (0-100 per core)
pub cpu_usage: f32,
/// Memory usage in bytes
pub memory_bytes: u64,
/// Disk read bytes per second (if available)
pub disk_read_bytes: Option<u64>,
/// Disk write bytes per second (if available)
pub disk_write_bytes: Option<u64>,
}

#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]


Loading…
Cancel
Save
Baidu
map