* [PATCH v1 1/2] dts: fabric requirements
@ 2023-04-03 11:46 Juraj Linkeš
2023-04-03 11:46 ` [PATCH v1 2/2] dts: replace pexpect with fabric Juraj Linkeš
2023-04-03 12:33 ` [PATCH v1 1/2] dts: fabric requirements Thomas Monjalon
0 siblings, 2 replies; 21+ messages in thread
From: Juraj Linkeš @ 2023-04-03 11:46 UTC (permalink / raw)
To: thomas, Honnappa.Nagarahalli, lijuan.tu, bruce.richardson,
wathsala.vithanage, jspewock, probb
Cc: dev, Juraj Linkeš
Replace pexpect with Fabric.
Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
---
dts/poetry.lock | 553 +++++++++++++++++++++++++++++++++++++++------
dts/pyproject.toml | 2 +-
2 files changed, 490 insertions(+), 65 deletions(-)
diff --git a/dts/poetry.lock b/dts/poetry.lock
index 0b2a007d4d..a800efcba1 100644
--- a/dts/poetry.lock
+++ b/dts/poetry.lock
@@ -1,3 +1,5 @@
+# This file is automatically @generated by Poetry and should not be changed by hand.
+
[[package]]
name = "attrs"
version = "22.1.0"
@@ -5,12 +7,51 @@ description = "Classes Without Boilerplate"
category = "main"
optional = false
python-versions = ">=3.5"
+files = [
+ {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"},
+ {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"},
+]
+
+[package.extras]
+dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"]
+docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
+tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"]
+tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
+
+[[package]]
+name = "bcrypt"
+version = "4.0.1"
+description = "Modern password hashing for your software and your servers"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"},
+ {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"},
+ {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"},
+ {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"},
+ {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"},
+ {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"},
+ {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"},
+ {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"},
+ {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"},
+ {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"},
+ {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"},
+ {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"},
+ {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"},
+ {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"},
+ {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"},
+ {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"},
+ {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"},
+ {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"},
+ {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"},
+ {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"},
+ {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"},
+]
[package.extras]
-dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
-docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
-tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
-tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"]
+tests = ["pytest (>=3.2.1,!=3.3.0)"]
+typecheck = ["mypy"]
[[package]]
name = "black"
@@ -19,6 +60,29 @@ description = "The uncompromising code formatter."
category = "dev"
optional = false
python-versions = ">=3.7"
+files = [
+ {file = "black-22.10.0-1fixedarch-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:5cc42ca67989e9c3cf859e84c2bf014f6633db63d1cbdf8fdb666dcd9e77e3fa"},
+ {file = "black-22.10.0-1fixedarch-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5d8f74030e67087b219b032aa33a919fae8806d49c867846bfacde57f43972ef"},
+ {file = "black-22.10.0-1fixedarch-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:197df8509263b0b8614e1df1756b1dd41be6738eed2ba9e9769f3880c2b9d7b6"},
+ {file = "black-22.10.0-1fixedarch-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:2644b5d63633702bc2c5f3754b1b475378fbbfb481f62319388235d0cd104c2d"},
+ {file = "black-22.10.0-1fixedarch-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:e41a86c6c650bcecc6633ee3180d80a025db041a8e2398dcc059b3afa8382cd4"},
+ {file = "black-22.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2039230db3c6c639bd84efe3292ec7b06e9214a2992cd9beb293d639c6402edb"},
+ {file = "black-22.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14ff67aec0a47c424bc99b71005202045dc09270da44a27848d534600ac64fc7"},
+ {file = "black-22.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:819dc789f4498ecc91438a7de64427c73b45035e2e3680c92e18795a839ebb66"},
+ {file = "black-22.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b9b29da4f564ba8787c119f37d174f2b69cdfdf9015b7d8c5c16121ddc054ae"},
+ {file = "black-22.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b49776299fece66bffaafe357d929ca9451450f5466e997a7285ab0fe28e3b"},
+ {file = "black-22.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:21199526696b8f09c3997e2b4db8d0b108d801a348414264d2eb8eb2532e540d"},
+ {file = "black-22.10.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e464456d24e23d11fced2bc8c47ef66d471f845c7b7a42f3bd77bf3d1789650"},
+ {file = "black-22.10.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9311e99228ae10023300ecac05be5a296f60d2fd10fff31cf5c1fa4ca4b1988d"},
+ {file = "black-22.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fba8a281e570adafb79f7755ac8721b6cf1bbf691186a287e990c7929c7692ff"},
+ {file = "black-22.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:915ace4ff03fdfff953962fa672d44be269deb2eaf88499a0f8805221bc68c87"},
+ {file = "black-22.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:444ebfb4e441254e87bad00c661fe32df9969b2bf224373a448d8aca2132b395"},
+ {file = "black-22.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:974308c58d057a651d182208a484ce80a26dac0caef2895836a92dd6ebd725e0"},
+ {file = "black-22.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ef3925f30e12a184889aac03d77d031056860ccae8a1e519f6cbb742736383"},
+ {file = "black-22.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:432247333090c8c5366e69627ccb363bc58514ae3e63f7fc75c54b1ea80fa7de"},
+ {file = "black-22.10.0-py3-none-any.whl", hash = "sha256:c957b2b4ea88587b46cf49d1dc17681c1e672864fd7af32fc1e9664d572b3458"},
+ {file = "black-22.10.0.tar.gz", hash = "sha256:f513588da599943e0cde4e32cc9879e825d58720d6557062d1098c5ad80080e1"},
+]
[package.dependencies]
click = ">=8.0.0"
@@ -33,6 +97,83 @@ d = ["aiohttp (>=3.7.4)"]
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
uvloop = ["uvloop (>=0.15.2)"]
+[[package]]
+name = "cffi"
+version = "1.15.1"
+description = "Foreign Function Interface for Python calling C code."
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"},
+ {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"},
+ {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"},
+ {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"},
+ {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"},
+ {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"},
+ {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"},
+ {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"},
+ {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"},
+ {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"},
+ {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"},
+ {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"},
+ {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"},
+ {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"},
+ {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"},
+ {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"},
+ {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"},
+ {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"},
+ {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"},
+ {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"},
+ {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"},
+ {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"},
+ {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"},
+ {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"},
+ {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"},
+ {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"},
+ {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"},
+ {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"},
+ {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"},
+ {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"},
+ {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"},
+ {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"},
+ {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"},
+ {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"},
+ {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"},
+ {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"},
+ {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"},
+ {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"},
+ {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"},
+ {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"},
+ {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"},
+ {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"},
+ {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"},
+ {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"},
+]
+
+[package.dependencies]
+pycparser = "*"
+
[[package]]
name = "click"
version = "8.1.3"
@@ -40,6 +181,10 @@ description = "Composable command line interface toolkit"
category = "dev"
optional = false
python-versions = ">=3.7"
+files = [
+ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
+ {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
+]
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
@@ -51,6 +196,87 @@ description = "Cross-platform colored terminal text."
category = "dev"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "cryptography"
+version = "39.0.0"
+description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "cryptography-39.0.0-cp36-abi3-macosx_10_12_universal2.whl", hash = "sha256:c52a1a6f81e738d07f43dab57831c29e57d21c81a942f4602fac7ee21b27f288"},
+ {file = "cryptography-39.0.0-cp36-abi3-macosx_10_12_x86_64.whl", hash = "sha256:80ee674c08aaef194bc4627b7f2956e5ba7ef29c3cc3ca488cf15854838a8f72"},
+ {file = "cryptography-39.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:887cbc1ea60786e534b00ba8b04d1095f4272d380ebd5f7a7eb4cc274710fad9"},
+ {file = "cryptography-39.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f97109336df5c178ee7c9c711b264c502b905c2d2a29ace99ed761533a3460f"},
+ {file = "cryptography-39.0.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a6915075c6d3a5e1215eab5d99bcec0da26036ff2102a1038401d6ef5bef25b"},
+ {file = "cryptography-39.0.0-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:76c24dd4fd196a80f9f2f5405a778a8ca132f16b10af113474005635fe7e066c"},
+ {file = "cryptography-39.0.0-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:bae6c7f4a36a25291b619ad064a30a07110a805d08dc89984f4f441f6c1f3f96"},
+ {file = "cryptography-39.0.0-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:875aea1039d78557c7c6b4db2fe0e9d2413439f4676310a5f269dd342ca7a717"},
+ {file = "cryptography-39.0.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:f6c0db08d81ead9576c4d94bbb27aed8d7a430fa27890f39084c2d0e2ec6b0df"},
+ {file = "cryptography-39.0.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f3ed2d864a2fa1666e749fe52fb8e23d8e06b8012e8bd8147c73797c506e86f1"},
+ {file = "cryptography-39.0.0-cp36-abi3-win32.whl", hash = "sha256:f671c1bb0d6088e94d61d80c606d65baacc0d374e67bf895148883461cd848de"},
+ {file = "cryptography-39.0.0-cp36-abi3-win_amd64.whl", hash = "sha256:e324de6972b151f99dc078defe8fb1b0a82c6498e37bff335f5bc6b1e3ab5a1e"},
+ {file = "cryptography-39.0.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:754978da4d0457e7ca176f58c57b1f9de6556591c19b25b8bcce3c77d314f5eb"},
+ {file = "cryptography-39.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ee1fd0de9851ff32dbbb9362a4d833b579b4a6cc96883e8e6d2ff2a6bc7104f"},
+ {file = "cryptography-39.0.0-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:fec8b932f51ae245121c4671b4bbc030880f363354b2f0e0bd1366017d891458"},
+ {file = "cryptography-39.0.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:407cec680e811b4fc829de966f88a7c62a596faa250fc1a4b520a0355b9bc190"},
+ {file = "cryptography-39.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7dacfdeee048814563eaaec7c4743c8aea529fe3dd53127313a792f0dadc1773"},
+ {file = "cryptography-39.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ad04f413436b0781f20c52a661660f1e23bcd89a0e9bb1d6d20822d048cf2856"},
+ {file = "cryptography-39.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50386acb40fbabbceeb2986332f0287f50f29ccf1497bae31cf5c3e7b4f4b34f"},
+ {file = "cryptography-39.0.0-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:e5d71c5d5bd5b5c3eebcf7c5c2bb332d62ec68921a8c593bea8c394911a005ce"},
+ {file = "cryptography-39.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:844ad4d7c3850081dffba91cdd91950038ee4ac525c575509a42d3fc806b83c8"},
+ {file = "cryptography-39.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e0a05aee6a82d944f9b4edd6a001178787d1546ec7c6223ee9a848a7ade92e39"},
+ {file = "cryptography-39.0.0.tar.gz", hash = "sha256:f964c7dcf7802d133e8dbd1565914fa0194f9d683d82411989889ecd701e8adf"},
+]
+
+[package.dependencies]
+cffi = ">=1.12"
+
+[package.extras]
+docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1,!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"]
+docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"]
+pep8test = ["black", "ruff"]
+sdist = ["setuptools-rust (>=0.11.4)"]
+ssh = ["bcrypt (>=3.1.5)"]
+test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"]
+
+[[package]]
+name = "fabric"
+version = "2.7.1"
+description = "High level SSH command execution"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "fabric-2.7.1-py2.py3-none-any.whl", hash = "sha256:7610362318ef2d391cc65d4befb684393975d889ed5720f23499394ec0e136fa"},
+ {file = "fabric-2.7.1.tar.gz", hash = "sha256:76f8fef59cf2061dbd849bbce4fe49bdd820884385004b0ca59136ac3db129e4"},
+]
+
+[package.dependencies]
+invoke = ">=1.3,<2.0"
+paramiko = ">=2.4"
+pathlib2 = "*"
+
+[package.extras]
+pytest = ["mock (>=2.0.0,<3.0)", "pytest (>=3.2.5,<4.0)"]
+testing = ["mock (>=2.0.0,<3.0)"]
+
+[[package]]
+name = "invoke"
+version = "1.7.3"
+description = "Pythonic task execution"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+ {file = "invoke-1.7.3-py3-none-any.whl", hash = "sha256:d9694a865764dd3fd91f25f7e9a97fb41666e822bbb00e670091e3f43933574d"},
+ {file = "invoke-1.7.3.tar.gz", hash = "sha256:41b428342d466a82135d5ab37119685a989713742be46e42a3a399d685579314"},
+]
[[package]]
name = "isort"
@@ -59,12 +285,16 @@ description = "A Python utility / library to sort Python imports."
category = "dev"
optional = false
python-versions = ">=3.6.1,<4.0"
+files = [
+ {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"},
+ {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"},
+]
[package.extras]
-pipfile_deprecated_finder = ["pipreqs", "requirementslib"]
-requirements_deprecated_finder = ["pipreqs", "pip-api"]
colors = ["colorama (>=0.4.3,<0.5.0)"]
+pipfile-deprecated-finder = ["pipreqs", "requirementslib"]
plugins = ["setuptools"]
+requirements-deprecated-finder = ["pip-api", "pipreqs"]
[[package]]
name = "jsonpatch"
@@ -73,6 +303,10 @@ description = "Apply JSON-Patches (RFC 6902)"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+ {file = "jsonpatch-1.32-py2.py3-none-any.whl", hash = "sha256:26ac385719ac9f54df8a2f0827bb8253aa3ea8ab7b3368457bcdb8c14595a397"},
+ {file = "jsonpatch-1.32.tar.gz", hash = "sha256:b6ddfe6c3db30d81a96aaeceb6baf916094ffa23d7dd5fa2c13e13f8b6e600c2"},
+]
[package.dependencies]
jsonpointer = ">=1.9"
@@ -84,6 +318,10 @@ description = "Identify specific nodes in a JSON document (RFC 6901)"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "jsonpointer-2.3-py2.py3-none-any.whl", hash = "sha256:51801e558539b4e9cd268638c078c6c5746c9ac96bc38152d443400e4f3793e9"},
+ {file = "jsonpointer-2.3.tar.gz", hash = "sha256:97cba51526c829282218feb99dab1b1e6bdf8efd1c43dc9d57be093c0d69c99a"},
+]
[[package]]
name = "jsonschema"
@@ -92,6 +330,10 @@ description = "An implementation of JSON Schema validation for Python"
category = "main"
optional = false
python-versions = ">=3.7"
+files = [
+ {file = "jsonschema-4.17.0-py3-none-any.whl", hash = "sha256:f660066c3966db7d6daeaea8a75e0b68237a48e51cf49882087757bb59916248"},
+ {file = "jsonschema-4.17.0.tar.gz", hash = "sha256:5bfcf2bca16a087ade17e02b282d34af7ccd749ef76241e7f9bd7c0cb8a9424d"},
+]
[package.dependencies]
attrs = ">=17.4.0"
@@ -108,6 +350,10 @@ description = "McCabe checker, plugin for flake8"
category = "dev"
optional = false
python-versions = ">=3.6"
+files = [
+ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
+ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
+]
[[package]]
name = "mypy"
@@ -116,6 +362,31 @@ description = "Optional static typing for Python"
category = "dev"
optional = false
python-versions = ">=3.6"
+files = [
+ {file = "mypy-0.961-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:697540876638ce349b01b6786bc6094ccdaba88af446a9abb967293ce6eaa2b0"},
+ {file = "mypy-0.961-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b117650592e1782819829605a193360a08aa99f1fc23d1d71e1a75a142dc7e15"},
+ {file = "mypy-0.961-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bdd5ca340beffb8c44cb9dc26697628d1b88c6bddf5c2f6eb308c46f269bb6f3"},
+ {file = "mypy-0.961-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3e09f1f983a71d0672bbc97ae33ee3709d10c779beb613febc36805a6e28bb4e"},
+ {file = "mypy-0.961-cp310-cp310-win_amd64.whl", hash = "sha256:e999229b9f3198c0c880d5e269f9f8129c8862451ce53a011326cad38b9ccd24"},
+ {file = "mypy-0.961-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b24be97351084b11582fef18d79004b3e4db572219deee0212078f7cf6352723"},
+ {file = "mypy-0.961-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f4a21d01fc0ba4e31d82f0fff195682e29f9401a8bdb7173891070eb260aeb3b"},
+ {file = "mypy-0.961-cp36-cp36m-win_amd64.whl", hash = "sha256:439c726a3b3da7ca84a0199a8ab444cd8896d95012c4a6c4a0d808e3147abf5d"},
+ {file = "mypy-0.961-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5a0b53747f713f490affdceef835d8f0cb7285187a6a44c33821b6d1f46ed813"},
+ {file = "mypy-0.961-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e9f70df36405c25cc530a86eeda1e0867863d9471fe76d1273c783df3d35c2e"},
+ {file = "mypy-0.961-cp37-cp37m-win_amd64.whl", hash = "sha256:b88f784e9e35dcaa075519096dc947a388319cb86811b6af621e3523980f1c8a"},
+ {file = "mypy-0.961-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d5aaf1edaa7692490f72bdb9fbd941fbf2e201713523bdb3f4038be0af8846c6"},
+ {file = "mypy-0.961-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9f5f5a74085d9a81a1f9c78081d60a0040c3efb3f28e5c9912b900adf59a16e6"},
+ {file = "mypy-0.961-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f4b794db44168a4fc886e3450201365c9526a522c46ba089b55e1f11c163750d"},
+ {file = "mypy-0.961-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:64759a273d590040a592e0f4186539858c948302c653c2eac840c7a3cd29e51b"},
+ {file = "mypy-0.961-cp38-cp38-win_amd64.whl", hash = "sha256:63e85a03770ebf403291ec50097954cc5caf2a9205c888ce3a61bd3f82e17569"},
+ {file = "mypy-0.961-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f1332964963d4832a94bebc10f13d3279be3ce8f6c64da563d6ee6e2eeda932"},
+ {file = "mypy-0.961-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:006be38474216b833eca29ff6b73e143386f352e10e9c2fbe76aa8549e5554f5"},
+ {file = "mypy-0.961-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9940e6916ed9371809b35b2154baf1f684acba935cd09928952310fbddaba648"},
+ {file = "mypy-0.961-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a5ea0875a049de1b63b972456542f04643daf320d27dc592d7c3d9cd5d9bf950"},
+ {file = "mypy-0.961-cp39-cp39-win_amd64.whl", hash = "sha256:1ece702f29270ec6af25db8cf6185c04c02311c6bb21a69f423d40e527b75c56"},
+ {file = "mypy-0.961-py3-none-any.whl", hash = "sha256:03c6cc893e7563e7b2949b969e63f02c000b32502a1b4d1314cabe391aa87d66"},
+ {file = "mypy-0.961.tar.gz", hash = "sha256:f730d56cb924d371c26b8eaddeea3cc07d78ff51c521c6d04899ac6904b75492"},
+]
[package.dependencies]
mypy-extensions = ">=0.4.3"
@@ -134,25 +405,61 @@ description = "Experimental type system extensions for programs checked with the
category = "dev"
optional = false
python-versions = "*"
+files = [
+ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
+ {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
+]
[[package]]
-name = "pathspec"
-version = "0.10.1"
-description = "Utility library for gitignore style pattern matching of file paths."
-category = "dev"
+name = "paramiko"
+version = "2.12.0"
+description = "SSH2 protocol library"
+category = "main"
optional = false
-python-versions = ">=3.7"
+python-versions = "*"
+files = [
+ {file = "paramiko-2.12.0-py2.py3-none-any.whl", hash = "sha256:b2df1a6325f6996ef55a8789d0462f5b502ea83b3c990cbb5bbe57345c6812c4"},
+ {file = "paramiko-2.12.0.tar.gz", hash = "sha256:376885c05c5d6aa6e1f4608aac2a6b5b0548b1add40274477324605903d9cd49"},
+]
+
+[package.dependencies]
+bcrypt = ">=3.1.3"
+cryptography = ">=2.5"
+pynacl = ">=1.0.1"
+six = "*"
+
+[package.extras]
+all = ["bcrypt (>=3.1.3)", "gssapi (>=1.4.1)", "invoke (>=1.3)", "pyasn1 (>=0.1.7)", "pynacl (>=1.0.1)", "pywin32 (>=2.1.8)"]
+ed25519 = ["bcrypt (>=3.1.3)", "pynacl (>=1.0.1)"]
+gssapi = ["gssapi (>=1.4.1)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8)"]
+invoke = ["invoke (>=1.3)"]
[[package]]
-name = "pexpect"
-version = "4.8.0"
-description = "Pexpect allows easy control of interactive console applications."
+name = "pathlib2"
+version = "2.3.7.post1"
+description = "Object-oriented filesystem paths"
category = "main"
optional = false
python-versions = "*"
+files = [
+ {file = "pathlib2-2.3.7.post1-py2.py3-none-any.whl", hash = "sha256:5266a0fd000452f1b3467d782f079a4343c63aaa119221fbdc4e39577489ca5b"},
+ {file = "pathlib2-2.3.7.post1.tar.gz", hash = "sha256:9fe0edad898b83c0c3e199c842b27ed216645d2e177757b2dd67384d4113c641"},
+]
[package.dependencies]
-ptyprocess = ">=0.5"
+six = "*"
+
+[[package]]
+name = "pathspec"
+version = "0.10.1"
+description = "Utility library for gitignore style pattern matching of file paths."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"},
+ {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"},
+]
[[package]]
name = "platformdirs"
@@ -161,18 +468,14 @@ description = "A small Python module for determining appropriate platform-specif
category = "dev"
optional = false
python-versions = ">=3.7"
+files = [
+ {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
+ {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
+]
[package.extras]
-docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
-test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"]
-
-[[package]]
-name = "ptyprocess"
-version = "0.7.0"
-description = "Run a subprocess in a pseudo terminal"
-category = "main"
-optional = false
-python-versions = "*"
+docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"]
+test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"]
[[package]]
name = "pycodestyle"
@@ -181,6 +484,22 @@ description = "Python style guide checker"
category = "dev"
optional = false
python-versions = ">=3.6"
+files = [
+ {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"},
+ {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"},
+]
+
+[[package]]
+name = "pycparser"
+version = "2.21"
+description = "C parser in Python"
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
+ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
+]
[[package]]
name = "pydocstyle"
@@ -189,6 +508,10 @@ description = "Python docstring style checker"
category = "dev"
optional = false
python-versions = ">=3.6"
+files = [
+ {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"},
+ {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"},
+]
[package.dependencies]
snowballstemmer = "*"
@@ -203,6 +526,10 @@ description = "passive checker of Python programs"
category = "dev"
optional = false
python-versions = ">=3.6"
+files = [
+ {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"},
+ {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"},
+]
[[package]]
name = "pylama"
@@ -211,6 +538,10 @@ description = "Code audit tool for python"
category = "dev"
optional = false
python-versions = ">=3.7"
+files = [
+ {file = "pylama-8.4.1-py3-none-any.whl", hash = "sha256:5bbdbf5b620aba7206d688ed9fc917ecd3d73e15ec1a89647037a09fa3a86e60"},
+ {file = "pylama-8.4.1.tar.gz", hash = "sha256:2d4f7aecfb5b7466216d48610c7d6bad1c3990c29cdd392ad08259b161e486f6"},
+]
[package.dependencies]
mccabe = ">=0.7.0"
@@ -219,15 +550,42 @@ pydocstyle = ">=6.1.1"
pyflakes = ">=2.5.0"
[package.extras]
-all = ["pylint", "eradicate", "radon", "mypy", "vulture"]
+all = ["eradicate", "mypy", "pylint", "radon", "vulture"]
eradicate = ["eradicate"]
mypy = ["mypy"]
pylint = ["pylint"]
radon = ["radon"]
-tests = ["pytest (>=7.1.2)", "pytest-mypy", "eradicate (>=2.0.0)", "radon (>=5.1.0)", "mypy", "pylint (>=2.11.1)", "pylama-quotes", "toml", "vulture", "types-setuptools", "types-toml"]
+tests = ["eradicate (>=2.0.0)", "mypy", "pylama-quotes", "pylint (>=2.11.1)", "pytest (>=7.1.2)", "pytest-mypy", "radon (>=5.1.0)", "toml", "types-setuptools", "types-toml", "vulture"]
toml = ["toml (>=0.10.2)"]
vulture = ["vulture"]
+[[package]]
+name = "pynacl"
+version = "1.5.0"
+description = "Python binding to the Networking and Cryptography (NaCl) library"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"},
+ {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"},
+ {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"},
+ {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"},
+ {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"},
+ {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"},
+ {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"},
+ {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"},
+ {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"},
+ {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"},
+]
+
+[package.dependencies]
+cffi = ">=1.4.1"
+
+[package.extras]
+docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
+tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"]
+
[[package]]
name = "pyrsistent"
version = "0.19.1"
@@ -235,6 +593,30 @@ description = "Persistent/Functional/Immutable data structures"
category = "main"
optional = false
python-versions = ">=3.7"
+files = [
+ {file = "pyrsistent-0.19.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8a34a2a8b220247658f7ced871197c390b3a6371d796a5869ab1c62abe0be527"},
+ {file = "pyrsistent-0.19.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73b2db09fe15b6e444c0bd566a125a385ca6493456224ce8b367d734f079f576"},
+ {file = "pyrsistent-0.19.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c58bd93c4d502f52938fccdbe6c9d70df3a585c6b39d900fab5f76b604282aa"},
+ {file = "pyrsistent-0.19.1-cp310-cp310-win32.whl", hash = "sha256:bc33fc20ddfd89b86b7710142963490d8c4ee8307ed6cc5e189a58fa72390eb9"},
+ {file = "pyrsistent-0.19.1-cp310-cp310-win_amd64.whl", hash = "sha256:06579d46d8ad69529b28f88711191a7fe7103c92d04a9f338dc754f71b92efa0"},
+ {file = "pyrsistent-0.19.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1d0620474d509172e1c50b79d5626bfe1899f174bf650186a50c6ce31289ff52"},
+ {file = "pyrsistent-0.19.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:945297fc344fef4d540135180ce7babeb2291d124698cc6282f3eac624aa5e82"},
+ {file = "pyrsistent-0.19.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16ac5ab3d9db78fed40c884d67079524e4cf8276639211ad9e6fa73e727727e"},
+ {file = "pyrsistent-0.19.1-cp37-cp37m-win32.whl", hash = "sha256:327f99800d04a9abcf580daecfd6dd4bfdb4a7e61c71bf2cd1189ef1ca44bade"},
+ {file = "pyrsistent-0.19.1-cp37-cp37m-win_amd64.whl", hash = "sha256:39f15ad754384e744ac8b00805913bfa66c41131faaa3e4c45c4af0731f3e8f6"},
+ {file = "pyrsistent-0.19.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:73d4ec2997716af3c8f28f7e3d3a565d273a598982d2fe95639e07ce4db5da45"},
+ {file = "pyrsistent-0.19.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62a41037387ae849a493cd945e22b34d167a843d57f75b07dbfad6d96cef485c"},
+ {file = "pyrsistent-0.19.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6df99c3578dc4eb33f3eb26bc28277ab40a720b71649d940bff9c1f704377772"},
+ {file = "pyrsistent-0.19.1-cp38-cp38-win32.whl", hash = "sha256:aaa869d9199d7d4c70a57678aff21654cc179c0c32bcfde87f1d65d0ff47e520"},
+ {file = "pyrsistent-0.19.1-cp38-cp38-win_amd64.whl", hash = "sha256:2032d971711643049b4f2c3ca5155a855d507d73bad26dac8d4349e5c5dd6758"},
+ {file = "pyrsistent-0.19.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6ef7430e45c5fa0bb6c361cada4a08ed9c184b5ed086815a85c3bc8c5054566b"},
+ {file = "pyrsistent-0.19.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73e3e2fd9da009d558050697cc22ad689f89a14a2ef2e67304628a913e59c947"},
+ {file = "pyrsistent-0.19.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c641111c3f110379bb9001dbb26b34eb8cafab3d0fa855dc161c391461a4aab"},
+ {file = "pyrsistent-0.19.1-cp39-cp39-win32.whl", hash = "sha256:62b704f18526a8fc243152de8f3f40ae39c5172baff10f50c0c5d5331d6f2342"},
+ {file = "pyrsistent-0.19.1-cp39-cp39-win_amd64.whl", hash = "sha256:890f577aec554f142e01daf890221d10e4f93a9b1107998d631d3f075b55e8f8"},
+ {file = "pyrsistent-0.19.1-py3-none-any.whl", hash = "sha256:8bc23e9ddcb523c3ffb4d712aa0bd5bc67b34ff4e2b23fb557012171bdb4013a"},
+ {file = "pyrsistent-0.19.1.tar.gz", hash = "sha256:cfe6d8b293d123255fd3b475b5f4e851eb5cbaee2064c8933aa27344381744ae"},
+]
[[package]]
name = "pyyaml"
@@ -243,6 +625,60 @@ description = "YAML parser and emitter for Python"
category = "main"
optional = false
python-versions = ">=3.6"
+files = [
+ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
+ {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
+ {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"},
+ {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"},
+ {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
+ {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
+ {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
+ {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"},
+ {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"},
+ {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"},
+ {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"},
+ {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"},
+ {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"},
+ {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"},
+ {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
+ {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
+ {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
+ {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"},
+ {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"},
+ {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"},
+ {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"},
+ {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"},
+ {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"},
+ {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"},
+ {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"},
+ {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"},
+ {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"},
+ {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"},
+ {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"},
+ {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"},
+ {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"},
+ {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"},
+ {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"},
+ {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"},
+ {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"},
+ {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"},
+ {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"},
+ {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"},
+ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
+ {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
+]
+
+[[package]]
+name = "six"
+version = "1.16.0"
+description = "Python 2 and 3 compatibility utilities"
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+]
[[package]]
name = "snowballstemmer"
@@ -251,6 +687,10 @@ description = "This package provides 29 stemmers for 28 languages generated from
category = "dev"
optional = false
python-versions = "*"
+files = [
+ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"},
+ {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"},
+]
[[package]]
name = "toml"
@@ -259,6 +699,10 @@ description = "Python Library for Tom's Obvious, Minimal Language"
category = "dev"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
+ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
+]
[[package]]
name = "tomli"
@@ -267,6 +711,10 @@ description = "A lil' TOML parser"
category = "dev"
optional = false
python-versions = ">=3.7"
+files = [
+ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
+ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
+]
[[package]]
name = "types-pyyaml"
@@ -275,6 +723,10 @@ description = "Typing stubs for PyYAML"
category = "main"
optional = false
python-versions = "*"
+files = [
+ {file = "types-PyYAML-6.0.12.1.tar.gz", hash = "sha256:70ccaafcf3fb404d57bffc1529fdd86a13e8b4f2cf9fc3ee81a6408ce0ad59d2"},
+ {file = "types_PyYAML-6.0.12.1-py3-none-any.whl", hash = "sha256:aaf5e51444c13bd34104695a89ad9c48412599a4f615d65a60e649109714f608"},
+]
[[package]]
name = "typing-extensions"
@@ -283,6 +735,10 @@ description = "Backported and Experimental Type Hints for Python 3.7+"
category = "dev"
optional = false
python-versions = ">=3.7"
+files = [
+ {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
+ {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
+]
[[package]]
name = "warlock"
@@ -291,47 +747,16 @@ description = "Python object model built on JSON schema and JSON patch."
category = "main"
optional = false
python-versions = ">=3.7,<4.0"
+files = [
+ {file = "warlock-2.0.1-py3-none-any.whl", hash = "sha256:448df959cec31904f686ac8c6b1dfab80f0cdabce3d303be517dd433eeebf012"},
+ {file = "warlock-2.0.1.tar.gz", hash = "sha256:99abbf9525b2a77f2cde896d3a9f18a5b4590db063db65e08207694d2e0137fc"},
+]
[package.dependencies]
jsonpatch = ">=1,<2"
jsonschema = ">=4,<5"
[metadata]
-lock-version = "1.1"
+lock-version = "2.0"
python-versions = "^3.10"
-content-hash = "a0f040b07fc6ce4deb0be078b9a88c2a465cb6bccb9e260a67e92c2403e2319f"
-
-[metadata.files]
-attrs = []
-black = []
-click = []
-colorama = []
-isort = []
-jsonpatch = []
-jsonpointer = []
-jsonschema = []
-mccabe = []
-mypy = []
-mypy-extensions = []
-pathspec = []
-pexpect = [
- {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"},
- {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"},
-]
-platformdirs = [
- {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
- {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
-]
-ptyprocess = []
-pycodestyle = []
-pydocstyle = []
-pyflakes = []
-pylama = []
-pyrsistent = []
-pyyaml = []
-snowballstemmer = []
-toml = []
-tomli = []
-types-pyyaml = []
-typing-extensions = []
-warlock = []
+content-hash = "719c43bcaa5d181921debda884f8f714063df0b2336d61e9f64ecab034e8b139"
diff --git a/dts/pyproject.toml b/dts/pyproject.toml
index a136c91e5e..50bcdb327a 100644
--- a/dts/pyproject.toml
+++ b/dts/pyproject.toml
@@ -9,10 +9,10 @@ authors = ["Owen Hilyard <ohilyard@iol.unh.edu>", "dts@dpdk.org"]
[tool.poetry.dependencies]
python = "^3.10"
-pexpect = "^4.8.0"
warlock = "^2.0.1"
PyYAML = "^6.0"
types-PyYAML = "^6.0.8"
+fabric = "^2.7.1"
[tool.poetry.dev-dependencies]
mypy = "^0.961"
--
2.30.2
^ permalink raw reply [flat|nested] 21+ messages in thread
* [PATCH v1 2/2] dts: replace pexpect with fabric
2023-04-03 11:46 [PATCH v1 1/2] dts: fabric requirements Juraj Linkeš
@ 2023-04-03 11:46 ` Juraj Linkeš
2023-04-24 13:35 ` [PATCH v2] " Juraj Linkeš
2023-04-03 12:33 ` [PATCH v1 1/2] dts: fabric requirements Thomas Monjalon
1 sibling, 1 reply; 21+ messages in thread
From: Juraj Linkeš @ 2023-04-03 11:46 UTC (permalink / raw)
To: thomas, Honnappa.Nagarahalli, lijuan.tu, bruce.richardson,
wathsala.vithanage, jspewock, probb
Cc: dev, Juraj Linkeš
Pexpect is not a dedicated SSH connection library while Fabric is. With
Fabric, all SSH-related logic is provided and we can just focus on
what's DTS specific.
Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
---
doc/guides/tools/dts.rst | 29 +-
dts/conf.yaml | 2 +-
dts/framework/exception.py | 10 +-
dts/framework/remote_session/linux_session.py | 31 +-
dts/framework/remote_session/os_session.py | 51 +++-
dts/framework/remote_session/posix_session.py | 48 +--
.../remote_session/remote/remote_session.py | 35 ++-
.../remote_session/remote/ssh_session.py | 287 ++++++------------
dts/framework/testbed_model/sut_node.py | 12 +-
dts/framework/utils.py | 9 -
10 files changed, 237 insertions(+), 277 deletions(-)
diff --git a/doc/guides/tools/dts.rst b/doc/guides/tools/dts.rst
index ebd6dceb6a..d15826c098 100644
--- a/doc/guides/tools/dts.rst
+++ b/doc/guides/tools/dts.rst
@@ -95,9 +95,14 @@ Setting up DTS environment
#. **SSH Connection**
- DTS uses Python pexpect for SSH connections between DTS environment and the other hosts.
- The pexpect implementation is a wrapper around the ssh command in the DTS environment.
- This means it'll use the SSH agent providing the ssh command and its keys.
+ DTS uses the Fabric Python library for SSH connections between DTS environment
+ and the other hosts.
+ The authentication method used is pubkey authentication.
+ Fabric tries to use a passed key/certificate,
+ then any key it can with through an SSH agent,
+ then any "id_rsa", "id_dsa" or "id_ecdsa" key discoverable in ``~/.ssh/``
+ (with any matching OpenSSH-style certificates).
+ DTS doesn't pass any keys, so Fabric tries to use the other two methods.
Setting up System Under Test
@@ -132,6 +137,21 @@ There are two areas that need to be set up on a System Under Test:
It's possible to use the hugepage configuration already present on the SUT.
If you wish to do so, don't specify the hugepage configuration in the DTS config file.
+#. **User with administrator privileges**
+
+.. _sut_admin_user:
+
+ DTS needs administrator privileges to run DPDK applications (such as testpmd) on the SUT.
+ The SUT user must be able run commands in privileged mode without asking for password.
+ On most Linux distributions, it's a matter of setting up passwordless sudo:
+
+ #. Run ``sudo visudo`` and check that it contains ``%sudo ALL=(ALL:ALL) ALL``.
+
+ #. Add the SUT user to the sudo group with:
+
+ .. code-block:: console
+
+ sudo usermod -aG sudo <sut_user>
Running DTS
-----------
@@ -151,7 +171,8 @@ which is a template that illustrates what can be configured in DTS:
:start-at: executions:
-The user must be root or any other user with prompt starting with ``#``.
+The user must have :ref:`administrator privileges <sut_admin_user>`
+which don't require password authentication.
The other fields are mostly self-explanatory
and documented in more detail in ``dts/framework/config/conf_yaml_schema.json``.
diff --git a/dts/conf.yaml b/dts/conf.yaml
index a9bd8a3ecf..129801d87c 100644
--- a/dts/conf.yaml
+++ b/dts/conf.yaml
@@ -16,7 +16,7 @@ executions:
nodes:
- name: "SUT 1"
hostname: sut1.change.me.localhost
- user: root
+ user: dtsuser
arch: x86_64
os: linux
lcores: ""
diff --git a/dts/framework/exception.py b/dts/framework/exception.py
index ca353d98fc..44ff4e979a 100644
--- a/dts/framework/exception.py
+++ b/dts/framework/exception.py
@@ -62,13 +62,19 @@ class SSHConnectionError(DTSError):
"""
host: str
+ errors: list[str]
severity: ClassVar[ErrorSeverity] = ErrorSeverity.SSH_ERR
- def __init__(self, host: str):
+ def __init__(self, host: str, errors: list[str] | None = None):
self.host = host
+ self.errors = [] if errors is None else errors
def __str__(self) -> str:
- return f"Error trying to connect with {self.host}"
+ message = f"Error trying to connect with {self.host}."
+ if self.errors:
+ message += f" Errors encountered while retrying: {', '.join(self.errors)}"
+
+ return message
class SSHSessionDeadError(DTSError):
diff --git a/dts/framework/remote_session/linux_session.py b/dts/framework/remote_session/linux_session.py
index a1e3bc3a92..f13f399121 100644
--- a/dts/framework/remote_session/linux_session.py
+++ b/dts/framework/remote_session/linux_session.py
@@ -14,10 +14,11 @@ class LinuxSession(PosixSession):
The implementation of non-Posix compliant parts of Linux remote sessions.
"""
+ def _get_privileged_command(self, command: str) -> str:
+ return f"sudo -- sh -c '{command}'"
+
def get_remote_cpus(self, use_first_core: bool) -> list[LogicalCore]:
- cpu_info = self.remote_session.send_command(
- "lscpu -p=CPU,CORE,SOCKET,NODE|grep -v \\#"
- ).stdout
+ cpu_info = self.send_command("lscpu -p=CPU,CORE,SOCKET,NODE|grep -v \\#").stdout
lcores = []
for cpu_line in cpu_info.splitlines():
lcore, core, socket, node = map(int, cpu_line.split(","))
@@ -45,20 +46,20 @@ def setup_hugepages(self, hugepage_amount: int, force_first_numa: bool) -> None:
self._mount_huge_pages()
def _get_hugepage_size(self) -> int:
- hugepage_size = self.remote_session.send_command(
+ hugepage_size = self.send_command(
"awk '/Hugepagesize/ {print $2}' /proc/meminfo"
).stdout
return int(hugepage_size)
def _get_hugepages_total(self) -> int:
- hugepages_total = self.remote_session.send_command(
+ hugepages_total = self.send_command(
"awk '/HugePages_Total/ { print $2 }' /proc/meminfo"
).stdout
return int(hugepages_total)
def _get_numa_nodes(self) -> list[int]:
try:
- numa_count = self.remote_session.send_command(
+ numa_count = self.send_command(
"cat /sys/devices/system/node/online", verify=True
).stdout
numa_range = expand_range(numa_count)
@@ -70,14 +71,12 @@ def _get_numa_nodes(self) -> list[int]:
def _mount_huge_pages(self) -> None:
self._logger.info("Re-mounting Hugepages.")
hugapge_fs_cmd = "awk '/hugetlbfs/ { print $2 }' /proc/mounts"
- self.remote_session.send_command(f"umount $({hugapge_fs_cmd})")
- result = self.remote_session.send_command(hugapge_fs_cmd)
+ self.send_command(f"umount $({hugapge_fs_cmd})")
+ result = self.send_command(hugapge_fs_cmd)
if result.stdout == "":
remote_mount_path = "/mnt/huge"
- self.remote_session.send_command(f"mkdir -p {remote_mount_path}")
- self.remote_session.send_command(
- f"mount -t hugetlbfs nodev {remote_mount_path}"
- )
+ self.send_command(f"mkdir -p {remote_mount_path}")
+ self.send_command(f"mount -t hugetlbfs nodev {remote_mount_path}")
def _supports_numa(self) -> bool:
# the system supports numa if self._numa_nodes is non-empty and there are more
@@ -94,14 +93,12 @@ def _configure_huge_pages(
)
if force_first_numa and self._supports_numa():
# clear non-numa hugepages
- self.remote_session.send_command(
- f"echo 0 | sudo tee {hugepage_config_path}"
- )
+ self.send_command(f"echo 0 | tee {hugepage_config_path}", privileged=True)
hugepage_config_path = (
f"/sys/devices/system/node/node{self._numa_nodes[0]}/hugepages"
f"/hugepages-{size}kB/nr_hugepages"
)
- self.remote_session.send_command(
- f"echo {amount} | sudo tee {hugepage_config_path}"
+ self.send_command(
+ f"echo {amount} | tee {hugepage_config_path}", privileged=True
)
diff --git a/dts/framework/remote_session/os_session.py b/dts/framework/remote_session/os_session.py
index 4c48ae2567..bfd70bd480 100644
--- a/dts/framework/remote_session/os_session.py
+++ b/dts/framework/remote_session/os_session.py
@@ -10,7 +10,7 @@
from framework.logger import DTSLOG
from framework.settings import SETTINGS
from framework.testbed_model import LogicalCore
-from framework.utils import EnvVarsDict, MesonArgs
+from framework.utils import MesonArgs
from .remote import CommandResult, RemoteSession, create_remote_session
@@ -53,17 +53,32 @@ def is_alive(self) -> bool:
def send_command(
self,
command: str,
- timeout: float,
+ timeout: float = SETTINGS.timeout,
+ privileged: bool = False,
verify: bool = False,
- env: EnvVarsDict | None = None,
+ env: dict | None = None,
) -> CommandResult:
"""
An all-purpose API in case the command to be executed is already
OS-agnostic, such as when the path to the executed command has been
constructed beforehand.
"""
+ if privileged:
+ command = self._get_privileged_command(command)
+
return self.remote_session.send_command(command, timeout, verify, env)
+ @abstractmethod
+ def _get_privileged_command(self, command: str) -> str:
+ """Modify the command so that it executes with administrative privileges.
+
+ Args:
+ command: The command to modify.
+
+ Returns:
+ The modified command that executes with administrative privileges.
+ """
+
@abstractmethod
def guess_dpdk_remote_dir(self, remote_dir) -> PurePath:
"""
@@ -90,17 +105,35 @@ def join_remote_path(self, *args: str | PurePath) -> PurePath:
"""
@abstractmethod
- def copy_file(
+ def copy_from(
self,
source_file: str | PurePath,
destination_file: str | PurePath,
- source_remote: bool = False,
) -> None:
+ """Copy a file from the remote Node to the local filesystem.
+
+ Copy source_file from the remote Node associated with this remote
+ session to destination_file on the local filesystem.
+
+ Args:
+ source_file: the file on the remote Node.
+ destination_file: a file or directory path on the local filesystem.
"""
+
+ @abstractmethod
+ def copy_to(
+ self,
+ source_file: str | PurePath,
+ destination_file: str | PurePath,
+ ) -> None:
+ """Copy a file from local filesystem to the remote Node.
+
Copy source_file from local filesystem to destination_file
- on the remote Node associated with the remote session.
- If source_remote is True, reverse the direction - copy source_file from the
- associated remote Node to destination_file on local storage.
+ on the remote Node associated with this remote session.
+
+ Args:
+ source_file: the file on the local filesystem.
+ destination_file: a file or directory path on the remote Node.
"""
@abstractmethod
@@ -128,7 +161,7 @@ def extract_remote_tarball(
@abstractmethod
def build_dpdk(
self,
- env_vars: EnvVarsDict,
+ env_vars: dict,
meson_args: MesonArgs,
remote_dpdk_dir: str | PurePath,
remote_dpdk_build_dir: str | PurePath,
diff --git a/dts/framework/remote_session/posix_session.py b/dts/framework/remote_session/posix_session.py
index d38062e8d6..8ca0acb429 100644
--- a/dts/framework/remote_session/posix_session.py
+++ b/dts/framework/remote_session/posix_session.py
@@ -9,7 +9,7 @@
from framework.config import Architecture
from framework.exception import DPDKBuildError, RemoteCommandExecutionError
from framework.settings import SETTINGS
-from framework.utils import EnvVarsDict, MesonArgs
+from framework.utils import MesonArgs
from .os_session import OSSession
@@ -34,7 +34,7 @@ def combine_short_options(**opts: bool) -> str:
def guess_dpdk_remote_dir(self, remote_dir) -> PurePosixPath:
remote_guess = self.join_remote_path(remote_dir, "dpdk-*")
- result = self.remote_session.send_command(f"ls -d {remote_guess} | tail -1")
+ result = self.send_command(f"ls -d {remote_guess} | tail -1")
return PurePosixPath(result.stdout)
def get_remote_tmp_dir(self) -> PurePosixPath:
@@ -48,7 +48,7 @@ def get_dpdk_build_env_vars(self, arch: Architecture) -> dict:
env_vars = {}
if arch == Architecture.i686:
# find the pkg-config path and store it in PKG_CONFIG_LIBDIR
- out = self.remote_session.send_command("find /usr -type d -name pkgconfig")
+ out = self.send_command("find /usr -type d -name pkgconfig")
pkg_path = ""
res_path = out.stdout.split("\r\n")
for cur_path in res_path:
@@ -65,13 +65,19 @@ def get_dpdk_build_env_vars(self, arch: Architecture) -> dict:
def join_remote_path(self, *args: str | PurePath) -> PurePosixPath:
return PurePosixPath(*args)
- def copy_file(
+ def copy_from(
self,
source_file: str | PurePath,
destination_file: str | PurePath,
- source_remote: bool = False,
) -> None:
- self.remote_session.copy_file(source_file, destination_file, source_remote)
+ self.remote_session.copy_from(source_file, destination_file)
+
+ def copy_to(
+ self,
+ source_file: str | PurePath,
+ destination_file: str | PurePath,
+ ) -> None:
+ self.remote_session.copy_to(source_file, destination_file)
def remove_remote_dir(
self,
@@ -80,24 +86,24 @@ def remove_remote_dir(
force: bool = True,
) -> None:
opts = PosixSession.combine_short_options(r=recursive, f=force)
- self.remote_session.send_command(f"rm{opts} {remote_dir_path}")
+ self.send_command(f"rm{opts} {remote_dir_path}")
def extract_remote_tarball(
self,
remote_tarball_path: str | PurePath,
expected_dir: str | PurePath | None = None,
) -> None:
- self.remote_session.send_command(
+ self.send_command(
f"tar xfm {remote_tarball_path} "
f"-C {PurePosixPath(remote_tarball_path).parent}",
60,
)
if expected_dir:
- self.remote_session.send_command(f"ls {expected_dir}", verify=True)
+ self.send_command(f"ls {expected_dir}", verify=True)
def build_dpdk(
self,
- env_vars: EnvVarsDict,
+ env_vars: dict,
meson_args: MesonArgs,
remote_dpdk_dir: str | PurePath,
remote_dpdk_build_dir: str | PurePath,
@@ -108,7 +114,7 @@ def build_dpdk(
if rebuild:
# reconfigure, then build
self._logger.info("Reconfiguring DPDK build.")
- self.remote_session.send_command(
+ self.send_command(
f"meson configure {meson_args} {remote_dpdk_build_dir}",
timeout,
verify=True,
@@ -118,7 +124,7 @@ def build_dpdk(
# fresh build - remove target dir first, then build from scratch
self._logger.info("Configuring DPDK build from scratch.")
self.remove_remote_dir(remote_dpdk_build_dir)
- self.remote_session.send_command(
+ self.send_command(
f"meson setup "
f"{meson_args} {remote_dpdk_dir} {remote_dpdk_build_dir}",
timeout,
@@ -127,14 +133,14 @@ def build_dpdk(
)
self._logger.info("Building DPDK.")
- self.remote_session.send_command(
+ self.send_command(
f"ninja -C {remote_dpdk_build_dir}", timeout, verify=True, env=env_vars
)
except RemoteCommandExecutionError as e:
raise DPDKBuildError(f"DPDK build failed when doing '{e.command}'.")
def get_dpdk_version(self, build_dir: str | PurePath) -> str:
- out = self.remote_session.send_command(
+ out = self.send_command(
f"cat {self.join_remote_path(build_dir, 'VERSION')}", verify=True
)
return out.stdout
@@ -146,7 +152,7 @@ def kill_cleanup_dpdk_apps(self, dpdk_prefix_list: Iterable[str]) -> None:
# kill and cleanup only if DPDK is running
dpdk_pids = self._get_dpdk_pids(dpdk_runtime_dirs)
for dpdk_pid in dpdk_pids:
- self.remote_session.send_command(f"kill -9 {dpdk_pid}", 20)
+ self.send_command(f"kill -9 {dpdk_pid}", 20)
self._check_dpdk_hugepages(dpdk_runtime_dirs)
self._remove_dpdk_runtime_dirs(dpdk_runtime_dirs)
@@ -168,7 +174,7 @@ def _list_remote_dirs(self, remote_path: str | PurePath) -> list[str] | None:
Return a list of directories of the remote_dir.
If remote_path doesn't exist, return None.
"""
- out = self.remote_session.send_command(
+ out = self.send_command(
f"ls -l {remote_path} | awk '/^d/ {{print $NF}}'"
).stdout
if "No such file or directory" in out:
@@ -182,9 +188,7 @@ def _get_dpdk_pids(self, dpdk_runtime_dirs: Iterable[str | PurePath]) -> list[in
for dpdk_runtime_dir in dpdk_runtime_dirs:
dpdk_config_file = PurePosixPath(dpdk_runtime_dir, "config")
if self._remote_files_exists(dpdk_config_file):
- out = self.remote_session.send_command(
- f"lsof -Fp {dpdk_config_file}"
- ).stdout
+ out = self.send_command(f"lsof -Fp {dpdk_config_file}").stdout
if out and "No such file or directory" not in out:
for out_line in out.splitlines():
match = re.match(pid_regex, out_line)
@@ -193,7 +197,7 @@ def _get_dpdk_pids(self, dpdk_runtime_dirs: Iterable[str | PurePath]) -> list[in
return pids
def _remote_files_exists(self, remote_path: PurePath) -> bool:
- result = self.remote_session.send_command(f"test -e {remote_path}")
+ result = self.send_command(f"test -e {remote_path}")
return not result.return_code
def _check_dpdk_hugepages(
@@ -202,9 +206,7 @@ def _check_dpdk_hugepages(
for dpdk_runtime_dir in dpdk_runtime_dirs:
hugepage_info = PurePosixPath(dpdk_runtime_dir, "hugepage_info")
if self._remote_files_exists(hugepage_info):
- out = self.remote_session.send_command(
- f"lsof -Fp {hugepage_info}"
- ).stdout
+ out = self.send_command(f"lsof -Fp {hugepage_info}").stdout
if out and "No such file or directory" not in out:
self._logger.warning("Some DPDK processes did not free hugepages.")
self._logger.warning("*******************************************")
diff --git a/dts/framework/remote_session/remote/remote_session.py b/dts/framework/remote_session/remote/remote_session.py
index 91dee3cb4f..0647d93de4 100644
--- a/dts/framework/remote_session/remote/remote_session.py
+++ b/dts/framework/remote_session/remote/remote_session.py
@@ -11,7 +11,6 @@
from framework.exception import RemoteCommandExecutionError
from framework.logger import DTSLOG
from framework.settings import SETTINGS
-from framework.utils import EnvVarsDict
@dataclasses.dataclass(slots=True, frozen=True)
@@ -89,7 +88,7 @@ def send_command(
command: str,
timeout: float = SETTINGS.timeout,
verify: bool = False,
- env: EnvVarsDict | None = None,
+ env: dict | None = None,
) -> CommandResult:
"""
Send a command to the connected node using optional env vars
@@ -114,7 +113,7 @@ def send_command(
@abstractmethod
def _send_command(
- self, command: str, timeout: float, env: EnvVarsDict | None
+ self, command: str, timeout: float, env: dict | None
) -> CommandResult:
"""
Use the underlying protocol to execute the command using optional env vars
@@ -141,15 +140,33 @@ def is_alive(self) -> bool:
"""
@abstractmethod
- def copy_file(
+ def copy_from(
self,
source_file: str | PurePath,
destination_file: str | PurePath,
- source_remote: bool = False,
) -> None:
+ """Copy a file from the remote Node to the local filesystem.
+
+ Copy source_file from the remote Node associated with this remote
+ session to destination_file on the local filesystem.
+
+ Args:
+ source_file: the file on the remote Node.
+ destination_file: a file or directory path on the local filesystem.
"""
- Copy source_file from local filesystem to destination_file on the remote Node
- associated with the remote session.
- If source_remote is True, reverse the direction - copy source_file from the
- associated Node to destination_file on local filesystem.
+
+ @abstractmethod
+ def copy_to(
+ self,
+ source_file: str | PurePath,
+ destination_file: str | PurePath,
+ ) -> None:
+ """Copy a file from local filesystem to the remote Node.
+
+ Copy source_file from local filesystem to destination_file
+ on the remote Node associated with this remote session.
+
+ Args:
+ source_file: the file on the local filesystem.
+ destination_file: a file or directory path on the remote Node.
"""
diff --git a/dts/framework/remote_session/remote/ssh_session.py b/dts/framework/remote_session/remote/ssh_session.py
index 42ff9498a2..8d127f1601 100644
--- a/dts/framework/remote_session/remote/ssh_session.py
+++ b/dts/framework/remote_session/remote/ssh_session.py
@@ -1,29 +1,49 @@
# SPDX-License-Identifier: BSD-3-Clause
-# Copyright(c) 2010-2014 Intel Corporation
-# Copyright(c) 2022-2023 PANTHEON.tech s.r.o.
-# Copyright(c) 2022-2023 University of New Hampshire
+# Copyright(c) 2023 PANTHEON.tech s.r.o.
-import time
+import socket
+import traceback
from pathlib import PurePath
-import pexpect # type: ignore
-from pexpect import pxssh # type: ignore
+from fabric import Connection # type: ignore[import]
+from invoke.exceptions import ( # type: ignore[import]
+ CommandTimedOut,
+ ThreadException,
+ UnexpectedExit,
+)
+from paramiko.ssh_exception import ( # type: ignore[import]
+ AuthenticationException,
+ BadHostKeyException,
+ NoValidConnectionsError,
+ SSHException,
+)
from framework.config import NodeConfiguration
from framework.exception import SSHConnectionError, SSHSessionDeadError, SSHTimeoutError
from framework.logger import DTSLOG
-from framework.utils import GREEN, RED, EnvVarsDict
from .remote_session import CommandResult, RemoteSession
class SSHSession(RemoteSession):
- """
- Module for creating Pexpect SSH remote sessions.
+ """A persistent SSH connection to a remote Node.
+
+ The connection is implemented with the Fabric Python library.
+
+ Args:
+ node_config: The configuration of the Node to connect to.
+ session_name: The name of the session.
+ logger: The logger used for logging.
+ This should be passed from the parent OSSession.
+
+ Attributes:
+ session: The underlying Fabric SSH connection.
+
+ Raises:
+ SSHConnectionError: The connection cannot be established.
"""
- session: pxssh.pxssh
- magic_prompt: str
+ session: Connection
def __init__(
self,
@@ -31,218 +51,91 @@ def __init__(
session_name: str,
logger: DTSLOG,
):
- self.magic_prompt = "MAGIC PROMPT"
super(SSHSession, self).__init__(node_config, session_name, logger)
def _connect(self) -> None:
- """
- Create connection to assigned node.
- """
+ errors = []
retry_attempts = 10
login_timeout = 20 if self.port else 10
- password_regex = (
- r"(?i)(?:password:)|(?:passphrase for key)|(?i)(password for .+:)"
- )
- try:
- for retry_attempt in range(retry_attempts):
- self.session = pxssh.pxssh(encoding="utf-8")
- try:
- self.session.login(
- self.ip,
- self.username,
- self.password,
- original_prompt="[$#>]",
- port=self.port,
- login_timeout=login_timeout,
- password_regex=password_regex,
- )
- break
- except Exception as e:
- self._logger.warning(e)
- time.sleep(2)
- self._logger.info(
- f"Retrying connection: retry number {retry_attempt + 1}."
- )
- else:
- raise Exception(f"Connection to {self.hostname} failed")
-
- self.send_expect("stty -echo", "#")
- self.send_expect("stty columns 1000", "#")
- self.send_expect("bind 'set enable-bracketed-paste off'", "#")
- except Exception as e:
- self._logger.error(RED(str(e)))
- if getattr(self, "port", None):
- suggestion = (
- f"\nSuggestion: Check if the firewall on {self.hostname} is "
- f"stopped.\n"
+ for retry_attempt in range(retry_attempts):
+ try:
+ self.session = Connection(
+ self.ip,
+ user=self.username,
+ port=self.port,
+ connect_kwargs={"password": self.password},
+ connect_timeout=login_timeout,
)
- self._logger.info(GREEN(suggestion))
-
- raise SSHConnectionError(self.hostname)
+ self.session.open()
- def send_expect(
- self, command: str, prompt: str, timeout: float = 15, verify: bool = False
- ) -> str | int:
- try:
- ret = self.send_expect_base(command, prompt, timeout)
- if verify:
- ret_status = self.send_expect_base("echo $?", prompt, timeout)
- try:
- retval = int(ret_status)
- if retval:
- self._logger.error(f"Command: {command} failure!")
- self._logger.error(ret)
- return retval
- else:
- return ret
- except ValueError:
- return ret
- else:
- return ret
- except Exception as e:
- self._logger.error(
- f"Exception happened in [{command}] and output is "
- f"[{self._get_output()}]"
- )
- raise e
-
- def send_expect_base(self, command: str, prompt: str, timeout: float) -> str:
- self._clean_session()
- original_prompt = self.session.PROMPT
- self.session.PROMPT = prompt
- self._send_line(command)
- self._prompt(command, timeout)
-
- before = self._get_output()
- self.session.PROMPT = original_prompt
- return before
-
- def _clean_session(self) -> None:
- self.session.PROMPT = self.magic_prompt
- self.get_output(timeout=0.01)
- self.session.PROMPT = self.session.UNIQUE_PROMPT
-
- def _send_line(self, command: str) -> None:
- if not self.is_alive():
- raise SSHSessionDeadError(self.hostname)
- if len(command) == 2 and command.startswith("^"):
- self.session.sendcontrol(command[1])
- else:
- self.session.sendline(command)
+ except (ValueError, BadHostKeyException, AuthenticationException) as e:
+ self._logger.exception(e)
+ raise SSHConnectionError(self.hostname) from e
- def _prompt(self, command: str, timeout: float) -> None:
- if not self.session.prompt(timeout):
- raise SSHTimeoutError(command, self._get_output()) from None
+ except (NoValidConnectionsError, socket.error, SSHException) as e:
+ self._logger.debug(traceback.format_exc())
+ self._logger.warning(e)
- def get_output(self, timeout: float = 15) -> str:
- """
- Get all output before timeout
- """
- try:
- self.session.prompt(timeout)
- except Exception:
- pass
-
- before = self._get_output()
- self._flush()
-
- return before
+ error = repr(e)
+ if error not in errors:
+ errors.append(error)
- def _get_output(self) -> str:
- if not self.is_alive():
- raise SSHSessionDeadError(self.hostname)
- before = self.session.before.rsplit("\r\n", 1)[0]
- if before == "[PEXPECT]":
- return ""
- return before
+ self._logger.info(
+ f"Retrying connection: retry number {retry_attempt + 1}."
+ )
- def _flush(self) -> None:
- """
- Clear all session buffer
- """
- self.session.buffer = ""
- self.session.before = ""
+ else:
+ break
+ else:
+ raise SSHConnectionError(self.hostname, errors)
def is_alive(self) -> bool:
- return self.session.isalive()
+ return self.session.is_connected
def _send_command(
- self, command: str, timeout: float, env: EnvVarsDict | None
+ self, command: str, timeout: float, env: dict | None
) -> CommandResult:
- output = self._send_command_get_output(command, timeout, env)
- return_code = int(self._send_command_get_output("echo $?", timeout, None))
+ """Send a command and return the result of the execution.
- # we're capturing only stdout
- return CommandResult(self.name, command, output, "", return_code)
+ Args:
+ command: The command to execute.
+ timeout: Wait at most this many seconds for the execution to complete.
+ env: Extra environment variables that will be used in command execution.
- def _send_command_get_output(
- self, command: str, timeout: float, env: EnvVarsDict | None
- ) -> str:
+ Raises:
+ SSHSessionDeadError: The session died while executing the command.
+ SSHTimeoutError: The command execution timed out.
+ """
try:
- self._clean_session()
- if env:
- command = f"{env} {command}"
- self._send_line(command)
- except Exception as e:
- raise e
+ output = self.session.run(
+ command, env=env, warn=True, hide=True, timeout=timeout
+ )
- output = self.get_output(timeout=timeout)
- self.session.PROMPT = self.session.UNIQUE_PROMPT
- self.session.prompt(0.1)
+ except (UnexpectedExit, ThreadException) as e:
+ self._logger.exception(e)
+ raise SSHSessionDeadError(self.hostname) from e
- return output
+ except CommandTimedOut as e:
+ self._logger.exception(e)
+ raise SSHTimeoutError(command, e.result.stderr) from e
- def _close(self, force: bool = False) -> None:
- if force is True:
- self.session.close()
- else:
- if self.is_alive():
- self.session.logout()
+ return CommandResult(
+ self.name, command, output.stdout, output.stderr, output.return_code
+ )
- def copy_file(
+ def copy_from(
self,
source_file: str | PurePath,
destination_file: str | PurePath,
- source_remote: bool = False,
) -> None:
- """
- Send a local file to a remote host.
- """
- if source_remote:
- source_file = f"{self.username}@{self.ip}:{source_file}"
- else:
- destination_file = f"{self.username}@{self.ip}:{destination_file}"
+ self.session.get(str(destination_file), str(source_file))
- port = ""
- if self.port:
- port = f" -P {self.port}"
-
- command = (
- f"scp -v{port} -o NoHostAuthenticationForLocalhost=yes"
- f" {source_file} {destination_file}"
- )
-
- self._spawn_scp(command)
+ def copy_to(
+ self,
+ source_file: str | PurePath,
+ destination_file: str | PurePath,
+ ) -> None:
+ self.session.put(str(source_file), str(destination_file))
- def _spawn_scp(self, scp_cmd: str) -> None:
- """
- Transfer a file with SCP
- """
- self._logger.info(scp_cmd)
- p: pexpect.spawn = pexpect.spawn(scp_cmd)
- time.sleep(0.5)
- ssh_newkey: str = "Are you sure you want to continue connecting"
- i: int = p.expect(
- [ssh_newkey, "[pP]assword", "# ", pexpect.EOF, pexpect.TIMEOUT], 120
- )
- if i == 0: # add once in trust list
- p.sendline("yes")
- i = p.expect([ssh_newkey, "[pP]assword", pexpect.EOF], 2)
-
- if i == 1:
- time.sleep(0.5)
- p.sendline(self.password)
- p.expect("Exit status 0", 60)
- if i == 4:
- self._logger.error("SCP TIMEOUT error %d" % i)
- p.close()
+ def _close(self, force: bool = False) -> None:
+ self.session.close()
diff --git a/dts/framework/testbed_model/sut_node.py b/dts/framework/testbed_model/sut_node.py
index 2b2b50d982..9dbc390848 100644
--- a/dts/framework/testbed_model/sut_node.py
+++ b/dts/framework/testbed_model/sut_node.py
@@ -10,7 +10,7 @@
from framework.config import BuildTargetConfiguration, NodeConfiguration
from framework.remote_session import CommandResult, OSSession
from framework.settings import SETTINGS
-from framework.utils import EnvVarsDict, MesonArgs
+from framework.utils import MesonArgs
from .hw import LogicalCoreCount, LogicalCoreList, VirtualDevice
from .node import Node
@@ -27,7 +27,7 @@ class SutNode(Node):
_dpdk_prefix_list: list[str]
_dpdk_timestamp: str
_build_target_config: BuildTargetConfiguration | None
- _env_vars: EnvVarsDict
+ _env_vars: dict
_remote_tmp_dir: PurePath
__remote_dpdk_dir: PurePath | None
_dpdk_version: str | None
@@ -38,7 +38,7 @@ def __init__(self, node_config: NodeConfiguration):
super(SutNode, self).__init__(node_config)
self._dpdk_prefix_list = []
self._build_target_config = None
- self._env_vars = EnvVarsDict()
+ self._env_vars = {}
self._remote_tmp_dir = self.main_session.get_remote_tmp_dir()
self.__remote_dpdk_dir = None
self._dpdk_version = None
@@ -94,7 +94,7 @@ def _configure_build_target(
"""
Populate common environment variables and set build target config.
"""
- self._env_vars = EnvVarsDict()
+ self._env_vars = {}
self._build_target_config = build_target_config
self._env_vars.update(
self.main_session.get_dpdk_build_env_vars(build_target_config.arch)
@@ -112,7 +112,7 @@ def _copy_dpdk_tarball(self) -> None:
Copy to and extract DPDK tarball on the SUT node.
"""
self._logger.info("Copying DPDK tarball to SUT.")
- self.main_session.copy_file(SETTINGS.dpdk_tarball_path, self._remote_tmp_dir)
+ self.main_session.copy_to(SETTINGS.dpdk_tarball_path, self._remote_tmp_dir)
# construct remote tarball path
# the basename is the same on local host and on remote Node
@@ -259,7 +259,7 @@ def run_dpdk_app(
Run DPDK application on the remote node.
"""
return self.main_session.send_command(
- f"{app_path} {eal_args}", timeout, verify=True
+ f"{app_path} {eal_args}", timeout, privileged=True, verify=True
)
diff --git a/dts/framework/utils.py b/dts/framework/utils.py
index 55e0b0ef0e..8cfbc6a29d 100644
--- a/dts/framework/utils.py
+++ b/dts/framework/utils.py
@@ -42,19 +42,10 @@ def expand_range(range_str: str) -> list[int]:
return expanded_range
-def GREEN(text: str) -> str:
- return f"\u001B[32;1m{str(text)}\u001B[0m"
-
-
def RED(text: str) -> str:
return f"\u001B[31;1m{str(text)}\u001B[0m"
-class EnvVarsDict(dict):
- def __str__(self) -> str:
- return " ".join(["=".join(item) for item in self.items()])
-
-
class MesonArgs(object):
"""
Aggregate the arguments needed to build DPDK:
--
2.30.2
^ permalink raw reply [flat|nested] 21+ messages in thread
* Re: [PATCH v1 1/2] dts: fabric requirements
2023-04-03 11:46 [PATCH v1 1/2] dts: fabric requirements Juraj Linkeš
2023-04-03 11:46 ` [PATCH v1 2/2] dts: replace pexpect with fabric Juraj Linkeš
@ 2023-04-03 12:33 ` Thomas Monjalon
2023-04-03 14:56 ` Juraj Linkeš
1 sibling, 1 reply; 21+ messages in thread
From: Thomas Monjalon @ 2023-04-03 12:33 UTC (permalink / raw)
To: wathsala.vithanage, jspewock, probb, Juraj Linkeš
Cc: Honnappa.Nagarahalli, lijuan.tu, bruce.richardson, dev
03/04/2023 13:46, Juraj Linkeš:
> Replace pexpect with Fabric.
You should squash these lines with the move to Fabric.
> Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
> ---
> dts/poetry.lock | 553 +++++++++++++++++++++++++++++++++++++++------
Do we really need *all* these lines?
I see a lot of lines about Windows and MacOSX which are not supported in DTS.
It is so long that it looks impossible to review.
^ permalink raw reply [flat|nested] 21+ messages in thread
* Re: [PATCH v1 1/2] dts: fabric requirements
2023-04-03 12:33 ` [PATCH v1 1/2] dts: fabric requirements Thomas Monjalon
@ 2023-04-03 14:56 ` Juraj Linkeš
2023-04-03 15:17 ` Thomas Monjalon
0 siblings, 1 reply; 21+ messages in thread
From: Juraj Linkeš @ 2023-04-03 14:56 UTC (permalink / raw)
To: Thomas Monjalon
Cc: wathsala.vithanage, jspewock, probb, Honnappa.Nagarahalli,
lijuan.tu, bruce.richardson, dev
[-- Attachment #1: Type: text/plain, Size: 731 bytes --]
On Mon, Apr 3, 2023 at 2:33 PM Thomas Monjalon <thomas@monjalon.net> wrote:
> 03/04/2023 13:46, Juraj Linkeš:
> > Replace pexpect with Fabric.
>
> You should squash these lines with the move to Fabric.
>
> > Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
> > ---
> > dts/poetry.lock | 553 +++++++++++++++++++++++++++++++++++++++------
>
> Do we really need *all* these lines?
> I see a lot of lines about Windows and MacOSX which are not supported in
> DTS.
> It is so long that it looks impossible to review.
>
>
This is a generated file and doesn't need to be reviewed. I separated the
dependencies part so that the code part is easier to review. If you want, I
can squash the two commits.
[-- Attachment #2: Type: text/html, Size: 1242 bytes --]
^ permalink raw reply [flat|nested] 21+ messages in thread
* Re: [PATCH v1 1/2] dts: fabric requirements
2023-04-03 14:56 ` Juraj Linkeš
@ 2023-04-03 15:17 ` Thomas Monjalon
2023-04-04 11:51 ` Juraj Linkeš
0 siblings, 1 reply; 21+ messages in thread
From: Thomas Monjalon @ 2023-04-03 15:17 UTC (permalink / raw)
To: Juraj Linkeš
Cc: wathsala.vithanage, jspewock, probb, Honnappa.Nagarahalli,
lijuan.tu, bruce.richardson, dev
03/04/2023 16:56, Juraj Linkeš:
> On Mon, Apr 3, 2023 at 2:33 PM Thomas Monjalon <thomas@monjalon.net> wrote:
>
> > 03/04/2023 13:46, Juraj Linkeš:
> > > Replace pexpect with Fabric.
> >
> > You should squash these lines with the move to Fabric.
> >
> > > Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
> > > ---
> > > dts/poetry.lock | 553 +++++++++++++++++++++++++++++++++++++++------
> >
> > Do we really need *all* these lines?
> > I see a lot of lines about Windows and MacOSX which are not supported in
> > DTS.
> > It is so long that it looks impossible to review.
> >
> >
> This is a generated file and doesn't need to be reviewed.
In general, I don't like storing generated files.
> I separated the
> dependencies part so that the code part is easier to review. If you want, I
> can squash the two commits.
What happens if we manually remove the useless lines?
^ permalink raw reply [flat|nested] 21+ messages in thread
* Re: [PATCH v1 1/2] dts: fabric requirements
2023-04-03 15:17 ` Thomas Monjalon
@ 2023-04-04 11:51 ` Juraj Linkeš
2023-04-11 14:48 ` Thomas Monjalon
0 siblings, 1 reply; 21+ messages in thread
From: Juraj Linkeš @ 2023-04-04 11:51 UTC (permalink / raw)
To: Thomas Monjalon
Cc: wathsala.vithanage, jspewock, probb, Honnappa.Nagarahalli,
lijuan.tu, bruce.richardson, dev
[-- Attachment #1: Type: text/plain, Size: 1739 bytes --]
On Mon, Apr 3, 2023 at 5:18 PM Thomas Monjalon <thomas@monjalon.net> wrote:
> 03/04/2023 16:56, Juraj Linkeš:
> > On Mon, Apr 3, 2023 at 2:33 PM Thomas Monjalon <thomas@monjalon.net>
> wrote:
> >
> > > 03/04/2023 13:46, Juraj Linkeš:
> > > > Replace pexpect with Fabric.
> > >
> > > You should squash these lines with the move to Fabric.
> > >
> > > > Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
> > > > ---
> > > > dts/poetry.lock | 553
> +++++++++++++++++++++++++++++++++++++++------
> > >
> > > Do we really need *all* these lines?
> > > I see a lot of lines about Windows and MacOSX which are not supported
> in
> > > DTS.
> > > It is so long that it looks impossible to review.
> > >
> > >
> > This is a generated file and doesn't need to be reviewed.
>
> In general, I don't like storing generated files.
>
Me neither, but this one is specifically designed to be stored in a
repository:
https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
>
> > I separated the
> > dependencies part so that the code part is easier to review. If you
> want, I
> > can squash the two commits.
>
> What happens if we manually remove the useless lines?
>
>
The lock file is there so that everyone installs exactly the same versions
of dependencies. We can specify the versions of dependencies in
pyproject.toml, but we won't control the versions of dependencies of
dependencies this way. If we remove the changes to the lock file, then we
won't be storing tested versions, everyone would be using slightly
different versions and we may potentially need to address versioning issues
in the future - best to prevent that with a lock file.
[-- Attachment #2: Type: text/html, Size: 2608 bytes --]
^ permalink raw reply [flat|nested] 21+ messages in thread
* Re: [PATCH v1 1/2] dts: fabric requirements
2023-04-04 11:51 ` Juraj Linkeš
@ 2023-04-11 14:48 ` Thomas Monjalon
2023-04-12 13:42 ` Juraj Linkeš
0 siblings, 1 reply; 21+ messages in thread
From: Thomas Monjalon @ 2023-04-11 14:48 UTC (permalink / raw)
To: Juraj Linkeš
Cc: wathsala.vithanage, jspewock, probb, Honnappa.Nagarahalli,
lijuan.tu, bruce.richardson, dev
04/04/2023 13:51, Juraj Linkeš:
> On Mon, Apr 3, 2023 at 5:18 PM Thomas Monjalon <thomas@monjalon.net> wrote:
>
> > 03/04/2023 16:56, Juraj Linkeš:
> > > On Mon, Apr 3, 2023 at 2:33 PM Thomas Monjalon <thomas@monjalon.net>
> > wrote:
> > >
> > > > 03/04/2023 13:46, Juraj Linkeš:
> > > > > Replace pexpect with Fabric.
> > > >
> > > > You should squash these lines with the move to Fabric.
> > > >
> > > > > Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
> > > > > ---
> > > > > dts/poetry.lock | 553
> > +++++++++++++++++++++++++++++++++++++++------
> > > >
> > > > Do we really need *all* these lines?
> > > > I see a lot of lines about Windows and MacOSX which are not supported
> > in
> > > > DTS.
> > > > It is so long that it looks impossible to review.
> > > >
> > > >
> > > This is a generated file and doesn't need to be reviewed.
> >
> > In general, I don't like storing generated files.
> >
>
> Me neither, but this one is specifically designed to be stored in a
> repository:
> https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
>
>
> >
> > > I separated the
> > > dependencies part so that the code part is easier to review. If you
> > want, I
> > > can squash the two commits.
> >
> > What happens if we manually remove the useless lines?
> >
> >
> The lock file is there so that everyone installs exactly the same versions
> of dependencies. We can specify the versions of dependencies in
> pyproject.toml, but we won't control the versions of dependencies of
> dependencies this way. If we remove the changes to the lock file, then we
> won't be storing tested versions, everyone would be using slightly
> different versions and we may potentially need to address versioning issues
> in the future - best to prevent that with a lock file.
You didn't answer about removing the usuless lines, like unneeded Windows support.
^ permalink raw reply [flat|nested] 21+ messages in thread
* Re: [PATCH v1 1/2] dts: fabric requirements
2023-04-11 14:48 ` Thomas Monjalon
@ 2023-04-12 13:42 ` Juraj Linkeš
2023-04-12 15:24 ` Thomas Monjalon
0 siblings, 1 reply; 21+ messages in thread
From: Juraj Linkeš @ 2023-04-12 13:42 UTC (permalink / raw)
To: Thomas Monjalon
Cc: wathsala.vithanage, jspewock, probb, Honnappa.Nagarahalli,
lijuan.tu, bruce.richardson, dev
On Tue, Apr 11, 2023 at 4:48 PM Thomas Monjalon <thomas@monjalon.net> wrote:
>
> 04/04/2023 13:51, Juraj Linkeš:
> > On Mon, Apr 3, 2023 at 5:18 PM Thomas Monjalon <thomas@monjalon.net> wrote:
> >
> > > 03/04/2023 16:56, Juraj Linkeš:
> > > > On Mon, Apr 3, 2023 at 2:33 PM Thomas Monjalon <thomas@monjalon.net>
> > > wrote:
> > > >
> > > > > 03/04/2023 13:46, Juraj Linkeš:
> > > > > > Replace pexpect with Fabric.
> > > > >
> > > > > You should squash these lines with the move to Fabric.
> > > > >
> > > > > > Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
> > > > > > ---
> > > > > > dts/poetry.lock | 553
> > > +++++++++++++++++++++++++++++++++++++++------
> > > > >
> > > > > Do we really need *all* these lines?
> > > > > I see a lot of lines about Windows and MacOSX which are not supported
> > > in
> > > > > DTS.
> > > > > It is so long that it looks impossible to review.
> > > > >
> > > > >
> > > > This is a generated file and doesn't need to be reviewed.
> > >
> > > In general, I don't like storing generated files.
> > >
> >
> > Me neither, but this one is specifically designed to be stored in a
> > repository:
> > https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
> >
> >
> > >
> > > > I separated the
> > > > dependencies part so that the code part is easier to review. If you
> > > want, I
> > > > can squash the two commits.
> > >
> > > What happens if we manually remove the useless lines?
> > >
> > >
> > The lock file is there so that everyone installs exactly the same versions
> > of dependencies. We can specify the versions of dependencies in
> > pyproject.toml, but we won't control the versions of dependencies of
> > dependencies this way. If we remove the changes to the lock file, then we
> > won't be storing tested versions, everyone would be using slightly
> > different versions and we may potentially need to address versioning issues
> > in the future - best to prevent that with a lock file.
>
> You didn't answer about removing the usuless lines, like unneeded Windows support.
>
Do you mean the list of files from macos and windows? I tried removing
those from mypy and testing it and it looks like it didn't have an
impact, but I don't know the inner workings of poetry and the lock
file to test it properly (i.e. to rule out any breakages). What would
be the reason for removing those? Seems like it has more downsides (we
could potentially break something and it's extra work) than updsides
(as this is a generated file, I don't really see any).
^ permalink raw reply [flat|nested] 21+ messages in thread
* Re: [PATCH v1 1/2] dts: fabric requirements
2023-04-12 13:42 ` Juraj Linkeš
@ 2023-04-12 15:24 ` Thomas Monjalon
2023-04-12 15:38 ` Honnappa Nagarahalli
0 siblings, 1 reply; 21+ messages in thread
From: Thomas Monjalon @ 2023-04-12 15:24 UTC (permalink / raw)
To: Juraj Linkeš
Cc: wathsala.vithanage, jspewock, probb, Honnappa.Nagarahalli,
lijuan.tu, bruce.richardson, dev
12/04/2023 15:42, Juraj Linkeš:
> On Tue, Apr 11, 2023 at 4:48 PM Thomas Monjalon <thomas@monjalon.net> wrote:
> >
> > 04/04/2023 13:51, Juraj Linkeš:
> > > On Mon, Apr 3, 2023 at 5:18 PM Thomas Monjalon <thomas@monjalon.net> wrote:
> > >
> > > > 03/04/2023 16:56, Juraj Linkeš:
> > > > > On Mon, Apr 3, 2023 at 2:33 PM Thomas Monjalon <thomas@monjalon.net>
> > > > wrote:
> > > > >
> > > > > > 03/04/2023 13:46, Juraj Linkeš:
> > > > > > > Replace pexpect with Fabric.
> > > > > >
> > > > > > You should squash these lines with the move to Fabric.
> > > > > >
> > > > > > > Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
> > > > > > > ---
> > > > > > > dts/poetry.lock | 553
> > > > +++++++++++++++++++++++++++++++++++++++------
> > > > > >
> > > > > > Do we really need *all* these lines?
> > > > > > I see a lot of lines about Windows and MacOSX which are not supported
> > > > in
> > > > > > DTS.
> > > > > > It is so long that it looks impossible to review.
> > > > > >
> > > > > >
> > > > > This is a generated file and doesn't need to be reviewed.
> > > >
> > > > In general, I don't like storing generated files.
> > > >
> > >
> > > Me neither, but this one is specifically designed to be stored in a
> > > repository:
> > > https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
> > >
> > >
> > > >
> > > > > I separated the
> > > > > dependencies part so that the code part is easier to review. If you
> > > > want, I
> > > > > can squash the two commits.
> > > >
> > > > What happens if we manually remove the useless lines?
> > > >
> > > >
> > > The lock file is there so that everyone installs exactly the same versions
> > > of dependencies. We can specify the versions of dependencies in
> > > pyproject.toml, but we won't control the versions of dependencies of
> > > dependencies this way. If we remove the changes to the lock file, then we
> > > won't be storing tested versions, everyone would be using slightly
> > > different versions and we may potentially need to address versioning issues
> > > in the future - best to prevent that with a lock file.
> >
> > You didn't answer about removing the usuless lines, like unneeded Windows support.
> >
>
> Do you mean the list of files from macos and windows? I tried removing
> those from mypy and testing it and it looks like it didn't have an
> impact, but I don't know the inner workings of poetry and the lock
> file to test it properly (i.e. to rule out any breakages). What would
> be the reason for removing those? Seems like it has more downsides (we
> could potentially break something and it's extra work) than updsides
> (as this is a generated file, I don't really see any).
Yes this is what I mean.
Any other opinion?
^ permalink raw reply [flat|nested] 21+ messages in thread
* RE: [PATCH v1 1/2] dts: fabric requirements
2023-04-12 15:24 ` Thomas Monjalon
@ 2023-04-12 15:38 ` Honnappa Nagarahalli
2023-04-13 6:50 ` Juraj Linkeš
0 siblings, 1 reply; 21+ messages in thread
From: Honnappa Nagarahalli @ 2023-04-12 15:38 UTC (permalink / raw)
To: thomas, Juraj Linkeš
Cc: Wathsala Wathawana Vithanage, jspewock, probb, lijuan.tu,
bruce.richardson, dev, nd, nd
> -----Original Message-----
> From: Thomas Monjalon <thomas@monjalon.net>
> Sent: Wednesday, April 12, 2023 10:25 AM
> To: Juraj Linkeš <juraj.linkes@pantheon.tech>
> Cc: Wathsala Wathawana Vithanage <wathsala.vithanage@arm.com>;
> jspewock@iol.unh.edu; probb@iol.unh.edu; Honnappa Nagarahalli
> <Honnappa.Nagarahalli@arm.com>; lijuan.tu@intel.com;
> bruce.richardson@intel.com; dev@dpdk.org
> Subject: Re: [PATCH v1 1/2] dts: fabric requirements
>
> 12/04/2023 15:42, Juraj Linkeš:
> > On Tue, Apr 11, 2023 at 4:48 PM Thomas Monjalon <thomas@monjalon.net>
> wrote:
> > >
> > > 04/04/2023 13:51, Juraj Linkeš:
> > > > On Mon, Apr 3, 2023 at 5:18 PM Thomas Monjalon
> <thomas@monjalon.net> wrote:
> > > >
> > > > > 03/04/2023 16:56, Juraj Linkeš:
> > > > > > On Mon, Apr 3, 2023 at 2:33 PM Thomas Monjalon
> > > > > > <thomas@monjalon.net>
> > > > > wrote:
> > > > > >
> > > > > > > 03/04/2023 13:46, Juraj Linkeš:
> > > > > > > > Replace pexpect with Fabric.
> > > > > > >
> > > > > > > You should squash these lines with the move to Fabric.
> > > > > > >
> > > > > > > > Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
> > > > > > > > ---
> > > > > > > > dts/poetry.lock | 553
> > > > > +++++++++++++++++++++++++++++++++++++++------
> > > > > > >
> > > > > > > Do we really need *all* these lines?
> > > > > > > I see a lot of lines about Windows and MacOSX which are not
> > > > > > > supported
> > > > > in
> > > > > > > DTS.
> > > > > > > It is so long that it looks impossible to review.
> > > > > > >
> > > > > > >
> > > > > > This is a generated file and doesn't need to be reviewed.
> > > > >
> > > > > In general, I don't like storing generated files.
> > > > >
> > > >
> > > > Me neither, but this one is specifically designed to be stored in
> > > > a
> > > > repository:
> > > > https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock
> > > > -file-to-version-control
> > > >
> > > >
> > > > >
> > > > > > I separated the
> > > > > > dependencies part so that the code part is easier to review.
> > > > > > If you
> > > > > want, I
> > > > > > can squash the two commits.
> > > > >
> > > > > What happens if we manually remove the useless lines?
> > > > >
> > > > >
> > > > The lock file is there so that everyone installs exactly the same
> > > > versions of dependencies. We can specify the versions of
> > > > dependencies in pyproject.toml, but we won't control the versions
> > > > of dependencies of dependencies this way. If we remove the changes
> > > > to the lock file, then we won't be storing tested versions,
> > > > everyone would be using slightly different versions and we may
> > > > potentially need to address versioning issues in the future - best to prevent
> that with a lock file.
> > >
> > > You didn't answer about removing the usuless lines, like unneeded Windows
> support.
> > >
> >
> > Do you mean the list of files from macos and windows? I tried removing
> > those from mypy and testing it and it looks like it didn't have an
> > impact, but I don't know the inner workings of poetry and the lock
> > file to test it properly (i.e. to rule out any breakages). What would
> > be the reason for removing those? Seems like it has more downsides (we
> > could potentially break something and it's extra work) than updsides
> > (as this is a generated file, I don't really see any).
>
> Yes this is what I mean.
> Any other opinion?
>
If it is a generated file, there might be an expectation from the tool that the file is not changed. It would be good to understand this.
Since it is a generated file, should we generate this during DTS run time rather than storing a generated file?
>
^ permalink raw reply [flat|nested] 21+ messages in thread
* Re: [PATCH v1 1/2] dts: fabric requirements
2023-04-12 15:38 ` Honnappa Nagarahalli
@ 2023-04-13 6:50 ` Juraj Linkeš
2023-04-13 7:49 ` Juraj Linkeš
0 siblings, 1 reply; 21+ messages in thread
From: Juraj Linkeš @ 2023-04-13 6:50 UTC (permalink / raw)
To: Honnappa Nagarahalli
Cc: thomas, Wathsala Wathawana Vithanage, jspewock, probb, lijuan.tu,
bruce.richardson, dev, nd
On Wed, Apr 12, 2023 at 5:38 PM Honnappa Nagarahalli
<Honnappa.Nagarahalli@arm.com> wrote:
>
>
>
> > -----Original Message-----
> > From: Thomas Monjalon <thomas@monjalon.net>
> > Sent: Wednesday, April 12, 2023 10:25 AM
> > To: Juraj Linkeš <juraj.linkes@pantheon.tech>
> > Cc: Wathsala Wathawana Vithanage <wathsala.vithanage@arm.com>;
> > jspewock@iol.unh.edu; probb@iol.unh.edu; Honnappa Nagarahalli
> > <Honnappa.Nagarahalli@arm.com>; lijuan.tu@intel.com;
> > bruce.richardson@intel.com; dev@dpdk.org
> > Subject: Re: [PATCH v1 1/2] dts: fabric requirements
> >
> > 12/04/2023 15:42, Juraj Linkeš:
> > > On Tue, Apr 11, 2023 at 4:48 PM Thomas Monjalon <thomas@monjalon.net>
> > wrote:
> > > >
> > > > 04/04/2023 13:51, Juraj Linkeš:
> > > > > On Mon, Apr 3, 2023 at 5:18 PM Thomas Monjalon
> > <thomas@monjalon.net> wrote:
> > > > >
> > > > > > 03/04/2023 16:56, Juraj Linkeš:
> > > > > > > On Mon, Apr 3, 2023 at 2:33 PM Thomas Monjalon
> > > > > > > <thomas@monjalon.net>
> > > > > > wrote:
> > > > > > >
> > > > > > > > 03/04/2023 13:46, Juraj Linkeš:
> > > > > > > > > Replace pexpect with Fabric.
> > > > > > > >
> > > > > > > > You should squash these lines with the move to Fabric.
> > > > > > > >
> > > > > > > > > Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
> > > > > > > > > ---
> > > > > > > > > dts/poetry.lock | 553
> > > > > > +++++++++++++++++++++++++++++++++++++++------
> > > > > > > >
> > > > > > > > Do we really need *all* these lines?
> > > > > > > > I see a lot of lines about Windows and MacOSX which are not
> > > > > > > > supported
> > > > > > in
> > > > > > > > DTS.
> > > > > > > > It is so long that it looks impossible to review.
> > > > > > > >
> > > > > > > >
> > > > > > > This is a generated file and doesn't need to be reviewed.
> > > > > >
> > > > > > In general, I don't like storing generated files.
> > > > > >
> > > > >
> > > > > Me neither, but this one is specifically designed to be stored in
> > > > > a
> > > > > repository:
> > > > > https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock
> > > > > -file-to-version-control
> > > > >
> > > > >
> > > > > >
> > > > > > > I separated the
> > > > > > > dependencies part so that the code part is easier to review.
> > > > > > > If you
> > > > > > want, I
> > > > > > > can squash the two commits.
> > > > > >
> > > > > > What happens if we manually remove the useless lines?
> > > > > >
> > > > > >
> > > > > The lock file is there so that everyone installs exactly the same
> > > > > versions of dependencies. We can specify the versions of
> > > > > dependencies in pyproject.toml, but we won't control the versions
> > > > > of dependencies of dependencies this way. If we remove the changes
> > > > > to the lock file, then we won't be storing tested versions,
> > > > > everyone would be using slightly different versions and we may
> > > > > potentially need to address versioning issues in the future - best to prevent
> > that with a lock file.
> > > >
> > > > You didn't answer about removing the usuless lines, like unneeded Windows
> > support.
> > > >
> > >
> > > Do you mean the list of files from macos and windows? I tried removing
> > > those from mypy and testing it and it looks like it didn't have an
> > > impact, but I don't know the inner workings of poetry and the lock
> > > file to test it properly (i.e. to rule out any breakages). What would
> > > be the reason for removing those? Seems like it has more downsides (we
> > > could potentially break something and it's extra work) than updsides
> > > (as this is a generated file, I don't really see any).
> >
> > Yes this is what I mean.
> > Any other opinion?
> >
> If it is a generated file, there might be an expectation from the tool that the file is not changed. It would be good to understand this.
>
> Since it is a generated file, should we generate this during DTS run time rather than storing a generated file?
>
The file is not used during runtime, but rather when installing
dependencies. It's supposed to be generated by maintainers (once every
time dependencies change or need updating) who verify the versions
defined in the generated lockfile so that everyone then uses the same
versions from that point on, preventing issues arising from different
users using different versions of dependencies. So it's maintainers
giving this file to other people.
Juraj
^ permalink raw reply [flat|nested] 21+ messages in thread
* Re: [PATCH v1 1/2] dts: fabric requirements
2023-04-13 6:50 ` Juraj Linkeš
@ 2023-04-13 7:49 ` Juraj Linkeš
0 siblings, 0 replies; 21+ messages in thread
From: Juraj Linkeš @ 2023-04-13 7:49 UTC (permalink / raw)
To: Honnappa Nagarahalli
Cc: thomas, Wathsala Wathawana Vithanage, jspewock, probb, lijuan.tu,
bruce.richardson, dev, nd
On Thu, Apr 13, 2023 at 8:50 AM Juraj Linkeš <juraj.linkes@pantheon.tech> wrote:
>
> On Wed, Apr 12, 2023 at 5:38 PM Honnappa Nagarahalli
> <Honnappa.Nagarahalli@arm.com> wrote:
> >
> >
> >
> > > -----Original Message-----
> > > From: Thomas Monjalon <thomas@monjalon.net>
> > > Sent: Wednesday, April 12, 2023 10:25 AM
> > > To: Juraj Linkeš <juraj.linkes@pantheon.tech>
> > > Cc: Wathsala Wathawana Vithanage <wathsala.vithanage@arm.com>;
> > > jspewock@iol.unh.edu; probb@iol.unh.edu; Honnappa Nagarahalli
> > > <Honnappa.Nagarahalli@arm.com>; lijuan.tu@intel.com;
> > > bruce.richardson@intel.com; dev@dpdk.org
> > > Subject: Re: [PATCH v1 1/2] dts: fabric requirements
> > >
> > > 12/04/2023 15:42, Juraj Linkeš:
> > > > On Tue, Apr 11, 2023 at 4:48 PM Thomas Monjalon <thomas@monjalon.net>
> > > wrote:
> > > > >
> > > > > 04/04/2023 13:51, Juraj Linkeš:
> > > > > > On Mon, Apr 3, 2023 at 5:18 PM Thomas Monjalon
> > > <thomas@monjalon.net> wrote:
> > > > > >
> > > > > > > 03/04/2023 16:56, Juraj Linkeš:
> > > > > > > > On Mon, Apr 3, 2023 at 2:33 PM Thomas Monjalon
> > > > > > > > <thomas@monjalon.net>
> > > > > > > wrote:
> > > > > > > >
> > > > > > > > > 03/04/2023 13:46, Juraj Linkeš:
> > > > > > > > > > Replace pexpect with Fabric.
> > > > > > > > >
> > > > > > > > > You should squash these lines with the move to Fabric.
> > > > > > > > >
> > > > > > > > > > Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
> > > > > > > > > > ---
> > > > > > > > > > dts/poetry.lock | 553
> > > > > > > +++++++++++++++++++++++++++++++++++++++------
> > > > > > > > >
> > > > > > > > > Do we really need *all* these lines?
> > > > > > > > > I see a lot of lines about Windows and MacOSX which are not
> > > > > > > > > supported
> > > > > > > in
> > > > > > > > > DTS.
> > > > > > > > > It is so long that it looks impossible to review.
> > > > > > > > >
> > > > > > > > >
> > > > > > > > This is a generated file and doesn't need to be reviewed.
> > > > > > >
> > > > > > > In general, I don't like storing generated files.
> > > > > > >
> > > > > >
> > > > > > Me neither, but this one is specifically designed to be stored in
> > > > > > a
> > > > > > repository:
> > > > > > https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock
> > > > > > -file-to-version-control
> > > > > >
> > > > > >
> > > > > > >
> > > > > > > > I separated the
> > > > > > > > dependencies part so that the code part is easier to review.
> > > > > > > > If you
> > > > > > > want, I
> > > > > > > > can squash the two commits.
> > > > > > >
> > > > > > > What happens if we manually remove the useless lines?
> > > > > > >
> > > > > > >
> > > > > > The lock file is there so that everyone installs exactly the same
> > > > > > versions of dependencies. We can specify the versions of
> > > > > > dependencies in pyproject.toml, but we won't control the versions
> > > > > > of dependencies of dependencies this way. If we remove the changes
> > > > > > to the lock file, then we won't be storing tested versions,
> > > > > > everyone would be using slightly different versions and we may
> > > > > > potentially need to address versioning issues in the future - best to prevent
> > > that with a lock file.
> > > > >
> > > > > You didn't answer about removing the usuless lines, like unneeded Windows
> > > support.
> > > > >
> > > >
> > > > Do you mean the list of files from macos and windows? I tried removing
> > > > those from mypy and testing it and it looks like it didn't have an
> > > > impact, but I don't know the inner workings of poetry and the lock
> > > > file to test it properly (i.e. to rule out any breakages). What would
> > > > be the reason for removing those? Seems like it has more downsides (we
> > > > could potentially break something and it's extra work) than updsides
> > > > (as this is a generated file, I don't really see any).
> > >
> > > Yes this is what I mean.
> > > Any other opinion?
> > >
> > If it is a generated file, there might be an expectation from the tool that the file is not changed. It would be good to understand this.
> >
> > Since it is a generated file, should we generate this during DTS run time rather than storing a generated file?
> >
>
> The file is not used during runtime, but rather when installing
> dependencies. It's supposed to be generated by maintainers (once every
> time dependencies change or need updating) who verify the versions
> defined in the generated lockfile so that everyone then uses the same
> versions from that point on, preventing issues arising from different
> users using different versions of dependencies. So it's maintainers
> giving this file to other people.
>
> Juraj
I looked into this some more and I have some extra stuff to explain.
There's another patch that updates and cleans up the dependencies:
http://patches.dpdk.org/project/dpdk/patch/20230331091355.1224059-1-juraj.linkes@pantheon.tech/
To do this patch, I updated my Poetry version to 1.2.0 and apparently,
then changed the file lists of packages. Before, they had that in a
separate section and in Poetry 1.2.0 they separated it into packages.
This led to this patch having a lot of unrelated changes (in unrelated
dependencies) brought on by the Poetry upgrade. I'll submit an update
with just the pexpect/fabric switch and we can discuss the changes
brought by the Poetry update in the other patch.
^ permalink raw reply [flat|nested] 21+ messages in thread
* [PATCH v2] dts: replace pexpect with fabric
2023-04-03 11:46 ` [PATCH v1 2/2] dts: replace pexpect with fabric Juraj Linkeš
@ 2023-04-24 13:35 ` Juraj Linkeš
2023-04-28 19:03 ` Jeremy Spewock
2023-06-09 9:46 ` [PATCH v3] " Juraj Linkeš
0 siblings, 2 replies; 21+ messages in thread
From: Juraj Linkeš @ 2023-04-24 13:35 UTC (permalink / raw)
To: thomas, Honnappa.Nagarahalli, lijuan.tu, wathsala.vithanage,
jspewock, probb
Cc: dev, Juraj Linkeš
Pexpect is not a dedicated SSH connection library while Fabric is. With
Fabric, all SSH-related logic is provided and we can just focus on
what's DTS specific.
Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
---
doc/guides/tools/dts.rst | 29 +-
dts/conf.yaml | 2 +-
dts/framework/exception.py | 10 +-
dts/framework/remote_session/linux_session.py | 31 +-
dts/framework/remote_session/os_session.py | 51 +++-
dts/framework/remote_session/posix_session.py | 48 +--
.../remote_session/remote/remote_session.py | 35 ++-
.../remote_session/remote/ssh_session.py | 287 ++++++------------
dts/framework/testbed_model/sut_node.py | 12 +-
dts/framework/utils.py | 9 -
dts/poetry.lock | 161 ++++++++--
dts/pyproject.toml | 2 +-
12 files changed, 376 insertions(+), 301 deletions(-)
diff --git a/doc/guides/tools/dts.rst b/doc/guides/tools/dts.rst
index ebd6dceb6a..d15826c098 100644
--- a/doc/guides/tools/dts.rst
+++ b/doc/guides/tools/dts.rst
@@ -95,9 +95,14 @@ Setting up DTS environment
#. **SSH Connection**
- DTS uses Python pexpect for SSH connections between DTS environment and the other hosts.
- The pexpect implementation is a wrapper around the ssh command in the DTS environment.
- This means it'll use the SSH agent providing the ssh command and its keys.
+ DTS uses the Fabric Python library for SSH connections between DTS environment
+ and the other hosts.
+ The authentication method used is pubkey authentication.
+ Fabric tries to use a passed key/certificate,
+ then any key it can with through an SSH agent,
+ then any "id_rsa", "id_dsa" or "id_ecdsa" key discoverable in ``~/.ssh/``
+ (with any matching OpenSSH-style certificates).
+ DTS doesn't pass any keys, so Fabric tries to use the other two methods.
Setting up System Under Test
@@ -132,6 +137,21 @@ There are two areas that need to be set up on a System Under Test:
It's possible to use the hugepage configuration already present on the SUT.
If you wish to do so, don't specify the hugepage configuration in the DTS config file.
+#. **User with administrator privileges**
+
+.. _sut_admin_user:
+
+ DTS needs administrator privileges to run DPDK applications (such as testpmd) on the SUT.
+ The SUT user must be able run commands in privileged mode without asking for password.
+ On most Linux distributions, it's a matter of setting up passwordless sudo:
+
+ #. Run ``sudo visudo`` and check that it contains ``%sudo ALL=(ALL:ALL) ALL``.
+
+ #. Add the SUT user to the sudo group with:
+
+ .. code-block:: console
+
+ sudo usermod -aG sudo <sut_user>
Running DTS
-----------
@@ -151,7 +171,8 @@ which is a template that illustrates what can be configured in DTS:
:start-at: executions:
-The user must be root or any other user with prompt starting with ``#``.
+The user must have :ref:`administrator privileges <sut_admin_user>`
+which don't require password authentication.
The other fields are mostly self-explanatory
and documented in more detail in ``dts/framework/config/conf_yaml_schema.json``.
diff --git a/dts/conf.yaml b/dts/conf.yaml
index a9bd8a3ecf..129801d87c 100644
--- a/dts/conf.yaml
+++ b/dts/conf.yaml
@@ -16,7 +16,7 @@ executions:
nodes:
- name: "SUT 1"
hostname: sut1.change.me.localhost
- user: root
+ user: dtsuser
arch: x86_64
os: linux
lcores: ""
diff --git a/dts/framework/exception.py b/dts/framework/exception.py
index ca353d98fc..44ff4e979a 100644
--- a/dts/framework/exception.py
+++ b/dts/framework/exception.py
@@ -62,13 +62,19 @@ class SSHConnectionError(DTSError):
"""
host: str
+ errors: list[str]
severity: ClassVar[ErrorSeverity] = ErrorSeverity.SSH_ERR
- def __init__(self, host: str):
+ def __init__(self, host: str, errors: list[str] | None = None):
self.host = host
+ self.errors = [] if errors is None else errors
def __str__(self) -> str:
- return f"Error trying to connect with {self.host}"
+ message = f"Error trying to connect with {self.host}."
+ if self.errors:
+ message += f" Errors encountered while retrying: {', '.join(self.errors)}"
+
+ return message
class SSHSessionDeadError(DTSError):
diff --git a/dts/framework/remote_session/linux_session.py b/dts/framework/remote_session/linux_session.py
index a1e3bc3a92..f13f399121 100644
--- a/dts/framework/remote_session/linux_session.py
+++ b/dts/framework/remote_session/linux_session.py
@@ -14,10 +14,11 @@ class LinuxSession(PosixSession):
The implementation of non-Posix compliant parts of Linux remote sessions.
"""
+ def _get_privileged_command(self, command: str) -> str:
+ return f"sudo -- sh -c '{command}'"
+
def get_remote_cpus(self, use_first_core: bool) -> list[LogicalCore]:
- cpu_info = self.remote_session.send_command(
- "lscpu -p=CPU,CORE,SOCKET,NODE|grep -v \\#"
- ).stdout
+ cpu_info = self.send_command("lscpu -p=CPU,CORE,SOCKET,NODE|grep -v \\#").stdout
lcores = []
for cpu_line in cpu_info.splitlines():
lcore, core, socket, node = map(int, cpu_line.split(","))
@@ -45,20 +46,20 @@ def setup_hugepages(self, hugepage_amount: int, force_first_numa: bool) -> None:
self._mount_huge_pages()
def _get_hugepage_size(self) -> int:
- hugepage_size = self.remote_session.send_command(
+ hugepage_size = self.send_command(
"awk '/Hugepagesize/ {print $2}' /proc/meminfo"
).stdout
return int(hugepage_size)
def _get_hugepages_total(self) -> int:
- hugepages_total = self.remote_session.send_command(
+ hugepages_total = self.send_command(
"awk '/HugePages_Total/ { print $2 }' /proc/meminfo"
).stdout
return int(hugepages_total)
def _get_numa_nodes(self) -> list[int]:
try:
- numa_count = self.remote_session.send_command(
+ numa_count = self.send_command(
"cat /sys/devices/system/node/online", verify=True
).stdout
numa_range = expand_range(numa_count)
@@ -70,14 +71,12 @@ def _get_numa_nodes(self) -> list[int]:
def _mount_huge_pages(self) -> None:
self._logger.info("Re-mounting Hugepages.")
hugapge_fs_cmd = "awk '/hugetlbfs/ { print $2 }' /proc/mounts"
- self.remote_session.send_command(f"umount $({hugapge_fs_cmd})")
- result = self.remote_session.send_command(hugapge_fs_cmd)
+ self.send_command(f"umount $({hugapge_fs_cmd})")
+ result = self.send_command(hugapge_fs_cmd)
if result.stdout == "":
remote_mount_path = "/mnt/huge"
- self.remote_session.send_command(f"mkdir -p {remote_mount_path}")
- self.remote_session.send_command(
- f"mount -t hugetlbfs nodev {remote_mount_path}"
- )
+ self.send_command(f"mkdir -p {remote_mount_path}")
+ self.send_command(f"mount -t hugetlbfs nodev {remote_mount_path}")
def _supports_numa(self) -> bool:
# the system supports numa if self._numa_nodes is non-empty and there are more
@@ -94,14 +93,12 @@ def _configure_huge_pages(
)
if force_first_numa and self._supports_numa():
# clear non-numa hugepages
- self.remote_session.send_command(
- f"echo 0 | sudo tee {hugepage_config_path}"
- )
+ self.send_command(f"echo 0 | tee {hugepage_config_path}", privileged=True)
hugepage_config_path = (
f"/sys/devices/system/node/node{self._numa_nodes[0]}/hugepages"
f"/hugepages-{size}kB/nr_hugepages"
)
- self.remote_session.send_command(
- f"echo {amount} | sudo tee {hugepage_config_path}"
+ self.send_command(
+ f"echo {amount} | tee {hugepage_config_path}", privileged=True
)
diff --git a/dts/framework/remote_session/os_session.py b/dts/framework/remote_session/os_session.py
index 4c48ae2567..bfd70bd480 100644
--- a/dts/framework/remote_session/os_session.py
+++ b/dts/framework/remote_session/os_session.py
@@ -10,7 +10,7 @@
from framework.logger import DTSLOG
from framework.settings import SETTINGS
from framework.testbed_model import LogicalCore
-from framework.utils import EnvVarsDict, MesonArgs
+from framework.utils import MesonArgs
from .remote import CommandResult, RemoteSession, create_remote_session
@@ -53,17 +53,32 @@ def is_alive(self) -> bool:
def send_command(
self,
command: str,
- timeout: float,
+ timeout: float = SETTINGS.timeout,
+ privileged: bool = False,
verify: bool = False,
- env: EnvVarsDict | None = None,
+ env: dict | None = None,
) -> CommandResult:
"""
An all-purpose API in case the command to be executed is already
OS-agnostic, such as when the path to the executed command has been
constructed beforehand.
"""
+ if privileged:
+ command = self._get_privileged_command(command)
+
return self.remote_session.send_command(command, timeout, verify, env)
+ @abstractmethod
+ def _get_privileged_command(self, command: str) -> str:
+ """Modify the command so that it executes with administrative privileges.
+
+ Args:
+ command: The command to modify.
+
+ Returns:
+ The modified command that executes with administrative privileges.
+ """
+
@abstractmethod
def guess_dpdk_remote_dir(self, remote_dir) -> PurePath:
"""
@@ -90,17 +105,35 @@ def join_remote_path(self, *args: str | PurePath) -> PurePath:
"""
@abstractmethod
- def copy_file(
+ def copy_from(
self,
source_file: str | PurePath,
destination_file: str | PurePath,
- source_remote: bool = False,
) -> None:
+ """Copy a file from the remote Node to the local filesystem.
+
+ Copy source_file from the remote Node associated with this remote
+ session to destination_file on the local filesystem.
+
+ Args:
+ source_file: the file on the remote Node.
+ destination_file: a file or directory path on the local filesystem.
"""
+
+ @abstractmethod
+ def copy_to(
+ self,
+ source_file: str | PurePath,
+ destination_file: str | PurePath,
+ ) -> None:
+ """Copy a file from local filesystem to the remote Node.
+
Copy source_file from local filesystem to destination_file
- on the remote Node associated with the remote session.
- If source_remote is True, reverse the direction - copy source_file from the
- associated remote Node to destination_file on local storage.
+ on the remote Node associated with this remote session.
+
+ Args:
+ source_file: the file on the local filesystem.
+ destination_file: a file or directory path on the remote Node.
"""
@abstractmethod
@@ -128,7 +161,7 @@ def extract_remote_tarball(
@abstractmethod
def build_dpdk(
self,
- env_vars: EnvVarsDict,
+ env_vars: dict,
meson_args: MesonArgs,
remote_dpdk_dir: str | PurePath,
remote_dpdk_build_dir: str | PurePath,
diff --git a/dts/framework/remote_session/posix_session.py b/dts/framework/remote_session/posix_session.py
index d38062e8d6..8ca0acb429 100644
--- a/dts/framework/remote_session/posix_session.py
+++ b/dts/framework/remote_session/posix_session.py
@@ -9,7 +9,7 @@
from framework.config import Architecture
from framework.exception import DPDKBuildError, RemoteCommandExecutionError
from framework.settings import SETTINGS
-from framework.utils import EnvVarsDict, MesonArgs
+from framework.utils import MesonArgs
from .os_session import OSSession
@@ -34,7 +34,7 @@ def combine_short_options(**opts: bool) -> str:
def guess_dpdk_remote_dir(self, remote_dir) -> PurePosixPath:
remote_guess = self.join_remote_path(remote_dir, "dpdk-*")
- result = self.remote_session.send_command(f"ls -d {remote_guess} | tail -1")
+ result = self.send_command(f"ls -d {remote_guess} | tail -1")
return PurePosixPath(result.stdout)
def get_remote_tmp_dir(self) -> PurePosixPath:
@@ -48,7 +48,7 @@ def get_dpdk_build_env_vars(self, arch: Architecture) -> dict:
env_vars = {}
if arch == Architecture.i686:
# find the pkg-config path and store it in PKG_CONFIG_LIBDIR
- out = self.remote_session.send_command("find /usr -type d -name pkgconfig")
+ out = self.send_command("find /usr -type d -name pkgconfig")
pkg_path = ""
res_path = out.stdout.split("\r\n")
for cur_path in res_path:
@@ -65,13 +65,19 @@ def get_dpdk_build_env_vars(self, arch: Architecture) -> dict:
def join_remote_path(self, *args: str | PurePath) -> PurePosixPath:
return PurePosixPath(*args)
- def copy_file(
+ def copy_from(
self,
source_file: str | PurePath,
destination_file: str | PurePath,
- source_remote: bool = False,
) -> None:
- self.remote_session.copy_file(source_file, destination_file, source_remote)
+ self.remote_session.copy_from(source_file, destination_file)
+
+ def copy_to(
+ self,
+ source_file: str | PurePath,
+ destination_file: str | PurePath,
+ ) -> None:
+ self.remote_session.copy_to(source_file, destination_file)
def remove_remote_dir(
self,
@@ -80,24 +86,24 @@ def remove_remote_dir(
force: bool = True,
) -> None:
opts = PosixSession.combine_short_options(r=recursive, f=force)
- self.remote_session.send_command(f"rm{opts} {remote_dir_path}")
+ self.send_command(f"rm{opts} {remote_dir_path}")
def extract_remote_tarball(
self,
remote_tarball_path: str | PurePath,
expected_dir: str | PurePath | None = None,
) -> None:
- self.remote_session.send_command(
+ self.send_command(
f"tar xfm {remote_tarball_path} "
f"-C {PurePosixPath(remote_tarball_path).parent}",
60,
)
if expected_dir:
- self.remote_session.send_command(f"ls {expected_dir}", verify=True)
+ self.send_command(f"ls {expected_dir}", verify=True)
def build_dpdk(
self,
- env_vars: EnvVarsDict,
+ env_vars: dict,
meson_args: MesonArgs,
remote_dpdk_dir: str | PurePath,
remote_dpdk_build_dir: str | PurePath,
@@ -108,7 +114,7 @@ def build_dpdk(
if rebuild:
# reconfigure, then build
self._logger.info("Reconfiguring DPDK build.")
- self.remote_session.send_command(
+ self.send_command(
f"meson configure {meson_args} {remote_dpdk_build_dir}",
timeout,
verify=True,
@@ -118,7 +124,7 @@ def build_dpdk(
# fresh build - remove target dir first, then build from scratch
self._logger.info("Configuring DPDK build from scratch.")
self.remove_remote_dir(remote_dpdk_build_dir)
- self.remote_session.send_command(
+ self.send_command(
f"meson setup "
f"{meson_args} {remote_dpdk_dir} {remote_dpdk_build_dir}",
timeout,
@@ -127,14 +133,14 @@ def build_dpdk(
)
self._logger.info("Building DPDK.")
- self.remote_session.send_command(
+ self.send_command(
f"ninja -C {remote_dpdk_build_dir}", timeout, verify=True, env=env_vars
)
except RemoteCommandExecutionError as e:
raise DPDKBuildError(f"DPDK build failed when doing '{e.command}'.")
def get_dpdk_version(self, build_dir: str | PurePath) -> str:
- out = self.remote_session.send_command(
+ out = self.send_command(
f"cat {self.join_remote_path(build_dir, 'VERSION')}", verify=True
)
return out.stdout
@@ -146,7 +152,7 @@ def kill_cleanup_dpdk_apps(self, dpdk_prefix_list: Iterable[str]) -> None:
# kill and cleanup only if DPDK is running
dpdk_pids = self._get_dpdk_pids(dpdk_runtime_dirs)
for dpdk_pid in dpdk_pids:
- self.remote_session.send_command(f"kill -9 {dpdk_pid}", 20)
+ self.send_command(f"kill -9 {dpdk_pid}", 20)
self._check_dpdk_hugepages(dpdk_runtime_dirs)
self._remove_dpdk_runtime_dirs(dpdk_runtime_dirs)
@@ -168,7 +174,7 @@ def _list_remote_dirs(self, remote_path: str | PurePath) -> list[str] | None:
Return a list of directories of the remote_dir.
If remote_path doesn't exist, return None.
"""
- out = self.remote_session.send_command(
+ out = self.send_command(
f"ls -l {remote_path} | awk '/^d/ {{print $NF}}'"
).stdout
if "No such file or directory" in out:
@@ -182,9 +188,7 @@ def _get_dpdk_pids(self, dpdk_runtime_dirs: Iterable[str | PurePath]) -> list[in
for dpdk_runtime_dir in dpdk_runtime_dirs:
dpdk_config_file = PurePosixPath(dpdk_runtime_dir, "config")
if self._remote_files_exists(dpdk_config_file):
- out = self.remote_session.send_command(
- f"lsof -Fp {dpdk_config_file}"
- ).stdout
+ out = self.send_command(f"lsof -Fp {dpdk_config_file}").stdout
if out and "No such file or directory" not in out:
for out_line in out.splitlines():
match = re.match(pid_regex, out_line)
@@ -193,7 +197,7 @@ def _get_dpdk_pids(self, dpdk_runtime_dirs: Iterable[str | PurePath]) -> list[in
return pids
def _remote_files_exists(self, remote_path: PurePath) -> bool:
- result = self.remote_session.send_command(f"test -e {remote_path}")
+ result = self.send_command(f"test -e {remote_path}")
return not result.return_code
def _check_dpdk_hugepages(
@@ -202,9 +206,7 @@ def _check_dpdk_hugepages(
for dpdk_runtime_dir in dpdk_runtime_dirs:
hugepage_info = PurePosixPath(dpdk_runtime_dir, "hugepage_info")
if self._remote_files_exists(hugepage_info):
- out = self.remote_session.send_command(
- f"lsof -Fp {hugepage_info}"
- ).stdout
+ out = self.send_command(f"lsof -Fp {hugepage_info}").stdout
if out and "No such file or directory" not in out:
self._logger.warning("Some DPDK processes did not free hugepages.")
self._logger.warning("*******************************************")
diff --git a/dts/framework/remote_session/remote/remote_session.py b/dts/framework/remote_session/remote/remote_session.py
index 91dee3cb4f..0647d93de4 100644
--- a/dts/framework/remote_session/remote/remote_session.py
+++ b/dts/framework/remote_session/remote/remote_session.py
@@ -11,7 +11,6 @@
from framework.exception import RemoteCommandExecutionError
from framework.logger import DTSLOG
from framework.settings import SETTINGS
-from framework.utils import EnvVarsDict
@dataclasses.dataclass(slots=True, frozen=True)
@@ -89,7 +88,7 @@ def send_command(
command: str,
timeout: float = SETTINGS.timeout,
verify: bool = False,
- env: EnvVarsDict | None = None,
+ env: dict | None = None,
) -> CommandResult:
"""
Send a command to the connected node using optional env vars
@@ -114,7 +113,7 @@ def send_command(
@abstractmethod
def _send_command(
- self, command: str, timeout: float, env: EnvVarsDict | None
+ self, command: str, timeout: float, env: dict | None
) -> CommandResult:
"""
Use the underlying protocol to execute the command using optional env vars
@@ -141,15 +140,33 @@ def is_alive(self) -> bool:
"""
@abstractmethod
- def copy_file(
+ def copy_from(
self,
source_file: str | PurePath,
destination_file: str | PurePath,
- source_remote: bool = False,
) -> None:
+ """Copy a file from the remote Node to the local filesystem.
+
+ Copy source_file from the remote Node associated with this remote
+ session to destination_file on the local filesystem.
+
+ Args:
+ source_file: the file on the remote Node.
+ destination_file: a file or directory path on the local filesystem.
"""
- Copy source_file from local filesystem to destination_file on the remote Node
- associated with the remote session.
- If source_remote is True, reverse the direction - copy source_file from the
- associated Node to destination_file on local filesystem.
+
+ @abstractmethod
+ def copy_to(
+ self,
+ source_file: str | PurePath,
+ destination_file: str | PurePath,
+ ) -> None:
+ """Copy a file from local filesystem to the remote Node.
+
+ Copy source_file from local filesystem to destination_file
+ on the remote Node associated with this remote session.
+
+ Args:
+ source_file: the file on the local filesystem.
+ destination_file: a file or directory path on the remote Node.
"""
diff --git a/dts/framework/remote_session/remote/ssh_session.py b/dts/framework/remote_session/remote/ssh_session.py
index 42ff9498a2..8d127f1601 100644
--- a/dts/framework/remote_session/remote/ssh_session.py
+++ b/dts/framework/remote_session/remote/ssh_session.py
@@ -1,29 +1,49 @@
# SPDX-License-Identifier: BSD-3-Clause
-# Copyright(c) 2010-2014 Intel Corporation
-# Copyright(c) 2022-2023 PANTHEON.tech s.r.o.
-# Copyright(c) 2022-2023 University of New Hampshire
+# Copyright(c) 2023 PANTHEON.tech s.r.o.
-import time
+import socket
+import traceback
from pathlib import PurePath
-import pexpect # type: ignore
-from pexpect import pxssh # type: ignore
+from fabric import Connection # type: ignore[import]
+from invoke.exceptions import ( # type: ignore[import]
+ CommandTimedOut,
+ ThreadException,
+ UnexpectedExit,
+)
+from paramiko.ssh_exception import ( # type: ignore[import]
+ AuthenticationException,
+ BadHostKeyException,
+ NoValidConnectionsError,
+ SSHException,
+)
from framework.config import NodeConfiguration
from framework.exception import SSHConnectionError, SSHSessionDeadError, SSHTimeoutError
from framework.logger import DTSLOG
-from framework.utils import GREEN, RED, EnvVarsDict
from .remote_session import CommandResult, RemoteSession
class SSHSession(RemoteSession):
- """
- Module for creating Pexpect SSH remote sessions.
+ """A persistent SSH connection to a remote Node.
+
+ The connection is implemented with the Fabric Python library.
+
+ Args:
+ node_config: The configuration of the Node to connect to.
+ session_name: The name of the session.
+ logger: The logger used for logging.
+ This should be passed from the parent OSSession.
+
+ Attributes:
+ session: The underlying Fabric SSH connection.
+
+ Raises:
+ SSHConnectionError: The connection cannot be established.
"""
- session: pxssh.pxssh
- magic_prompt: str
+ session: Connection
def __init__(
self,
@@ -31,218 +51,91 @@ def __init__(
session_name: str,
logger: DTSLOG,
):
- self.magic_prompt = "MAGIC PROMPT"
super(SSHSession, self).__init__(node_config, session_name, logger)
def _connect(self) -> None:
- """
- Create connection to assigned node.
- """
+ errors = []
retry_attempts = 10
login_timeout = 20 if self.port else 10
- password_regex = (
- r"(?i)(?:password:)|(?:passphrase for key)|(?i)(password for .+:)"
- )
- try:
- for retry_attempt in range(retry_attempts):
- self.session = pxssh.pxssh(encoding="utf-8")
- try:
- self.session.login(
- self.ip,
- self.username,
- self.password,
- original_prompt="[$#>]",
- port=self.port,
- login_timeout=login_timeout,
- password_regex=password_regex,
- )
- break
- except Exception as e:
- self._logger.warning(e)
- time.sleep(2)
- self._logger.info(
- f"Retrying connection: retry number {retry_attempt + 1}."
- )
- else:
- raise Exception(f"Connection to {self.hostname} failed")
-
- self.send_expect("stty -echo", "#")
- self.send_expect("stty columns 1000", "#")
- self.send_expect("bind 'set enable-bracketed-paste off'", "#")
- except Exception as e:
- self._logger.error(RED(str(e)))
- if getattr(self, "port", None):
- suggestion = (
- f"\nSuggestion: Check if the firewall on {self.hostname} is "
- f"stopped.\n"
+ for retry_attempt in range(retry_attempts):
+ try:
+ self.session = Connection(
+ self.ip,
+ user=self.username,
+ port=self.port,
+ connect_kwargs={"password": self.password},
+ connect_timeout=login_timeout,
)
- self._logger.info(GREEN(suggestion))
-
- raise SSHConnectionError(self.hostname)
+ self.session.open()
- def send_expect(
- self, command: str, prompt: str, timeout: float = 15, verify: bool = False
- ) -> str | int:
- try:
- ret = self.send_expect_base(command, prompt, timeout)
- if verify:
- ret_status = self.send_expect_base("echo $?", prompt, timeout)
- try:
- retval = int(ret_status)
- if retval:
- self._logger.error(f"Command: {command} failure!")
- self._logger.error(ret)
- return retval
- else:
- return ret
- except ValueError:
- return ret
- else:
- return ret
- except Exception as e:
- self._logger.error(
- f"Exception happened in [{command}] and output is "
- f"[{self._get_output()}]"
- )
- raise e
-
- def send_expect_base(self, command: str, prompt: str, timeout: float) -> str:
- self._clean_session()
- original_prompt = self.session.PROMPT
- self.session.PROMPT = prompt
- self._send_line(command)
- self._prompt(command, timeout)
-
- before = self._get_output()
- self.session.PROMPT = original_prompt
- return before
-
- def _clean_session(self) -> None:
- self.session.PROMPT = self.magic_prompt
- self.get_output(timeout=0.01)
- self.session.PROMPT = self.session.UNIQUE_PROMPT
-
- def _send_line(self, command: str) -> None:
- if not self.is_alive():
- raise SSHSessionDeadError(self.hostname)
- if len(command) == 2 and command.startswith("^"):
- self.session.sendcontrol(command[1])
- else:
- self.session.sendline(command)
+ except (ValueError, BadHostKeyException, AuthenticationException) as e:
+ self._logger.exception(e)
+ raise SSHConnectionError(self.hostname) from e
- def _prompt(self, command: str, timeout: float) -> None:
- if not self.session.prompt(timeout):
- raise SSHTimeoutError(command, self._get_output()) from None
+ except (NoValidConnectionsError, socket.error, SSHException) as e:
+ self._logger.debug(traceback.format_exc())
+ self._logger.warning(e)
- def get_output(self, timeout: float = 15) -> str:
- """
- Get all output before timeout
- """
- try:
- self.session.prompt(timeout)
- except Exception:
- pass
-
- before = self._get_output()
- self._flush()
-
- return before
+ error = repr(e)
+ if error not in errors:
+ errors.append(error)
- def _get_output(self) -> str:
- if not self.is_alive():
- raise SSHSessionDeadError(self.hostname)
- before = self.session.before.rsplit("\r\n", 1)[0]
- if before == "[PEXPECT]":
- return ""
- return before
+ self._logger.info(
+ f"Retrying connection: retry number {retry_attempt + 1}."
+ )
- def _flush(self) -> None:
- """
- Clear all session buffer
- """
- self.session.buffer = ""
- self.session.before = ""
+ else:
+ break
+ else:
+ raise SSHConnectionError(self.hostname, errors)
def is_alive(self) -> bool:
- return self.session.isalive()
+ return self.session.is_connected
def _send_command(
- self, command: str, timeout: float, env: EnvVarsDict | None
+ self, command: str, timeout: float, env: dict | None
) -> CommandResult:
- output = self._send_command_get_output(command, timeout, env)
- return_code = int(self._send_command_get_output("echo $?", timeout, None))
+ """Send a command and return the result of the execution.
- # we're capturing only stdout
- return CommandResult(self.name, command, output, "", return_code)
+ Args:
+ command: The command to execute.
+ timeout: Wait at most this many seconds for the execution to complete.
+ env: Extra environment variables that will be used in command execution.
- def _send_command_get_output(
- self, command: str, timeout: float, env: EnvVarsDict | None
- ) -> str:
+ Raises:
+ SSHSessionDeadError: The session died while executing the command.
+ SSHTimeoutError: The command execution timed out.
+ """
try:
- self._clean_session()
- if env:
- command = f"{env} {command}"
- self._send_line(command)
- except Exception as e:
- raise e
+ output = self.session.run(
+ command, env=env, warn=True, hide=True, timeout=timeout
+ )
- output = self.get_output(timeout=timeout)
- self.session.PROMPT = self.session.UNIQUE_PROMPT
- self.session.prompt(0.1)
+ except (UnexpectedExit, ThreadException) as e:
+ self._logger.exception(e)
+ raise SSHSessionDeadError(self.hostname) from e
- return output
+ except CommandTimedOut as e:
+ self._logger.exception(e)
+ raise SSHTimeoutError(command, e.result.stderr) from e
- def _close(self, force: bool = False) -> None:
- if force is True:
- self.session.close()
- else:
- if self.is_alive():
- self.session.logout()
+ return CommandResult(
+ self.name, command, output.stdout, output.stderr, output.return_code
+ )
- def copy_file(
+ def copy_from(
self,
source_file: str | PurePath,
destination_file: str | PurePath,
- source_remote: bool = False,
) -> None:
- """
- Send a local file to a remote host.
- """
- if source_remote:
- source_file = f"{self.username}@{self.ip}:{source_file}"
- else:
- destination_file = f"{self.username}@{self.ip}:{destination_file}"
+ self.session.get(str(destination_file), str(source_file))
- port = ""
- if self.port:
- port = f" -P {self.port}"
-
- command = (
- f"scp -v{port} -o NoHostAuthenticationForLocalhost=yes"
- f" {source_file} {destination_file}"
- )
-
- self._spawn_scp(command)
+ def copy_to(
+ self,
+ source_file: str | PurePath,
+ destination_file: str | PurePath,
+ ) -> None:
+ self.session.put(str(source_file), str(destination_file))
- def _spawn_scp(self, scp_cmd: str) -> None:
- """
- Transfer a file with SCP
- """
- self._logger.info(scp_cmd)
- p: pexpect.spawn = pexpect.spawn(scp_cmd)
- time.sleep(0.5)
- ssh_newkey: str = "Are you sure you want to continue connecting"
- i: int = p.expect(
- [ssh_newkey, "[pP]assword", "# ", pexpect.EOF, pexpect.TIMEOUT], 120
- )
- if i == 0: # add once in trust list
- p.sendline("yes")
- i = p.expect([ssh_newkey, "[pP]assword", pexpect.EOF], 2)
-
- if i == 1:
- time.sleep(0.5)
- p.sendline(self.password)
- p.expect("Exit status 0", 60)
- if i == 4:
- self._logger.error("SCP TIMEOUT error %d" % i)
- p.close()
+ def _close(self, force: bool = False) -> None:
+ self.session.close()
diff --git a/dts/framework/testbed_model/sut_node.py b/dts/framework/testbed_model/sut_node.py
index 2b2b50d982..9dbc390848 100644
--- a/dts/framework/testbed_model/sut_node.py
+++ b/dts/framework/testbed_model/sut_node.py
@@ -10,7 +10,7 @@
from framework.config import BuildTargetConfiguration, NodeConfiguration
from framework.remote_session import CommandResult, OSSession
from framework.settings import SETTINGS
-from framework.utils import EnvVarsDict, MesonArgs
+from framework.utils import MesonArgs
from .hw import LogicalCoreCount, LogicalCoreList, VirtualDevice
from .node import Node
@@ -27,7 +27,7 @@ class SutNode(Node):
_dpdk_prefix_list: list[str]
_dpdk_timestamp: str
_build_target_config: BuildTargetConfiguration | None
- _env_vars: EnvVarsDict
+ _env_vars: dict
_remote_tmp_dir: PurePath
__remote_dpdk_dir: PurePath | None
_dpdk_version: str | None
@@ -38,7 +38,7 @@ def __init__(self, node_config: NodeConfiguration):
super(SutNode, self).__init__(node_config)
self._dpdk_prefix_list = []
self._build_target_config = None
- self._env_vars = EnvVarsDict()
+ self._env_vars = {}
self._remote_tmp_dir = self.main_session.get_remote_tmp_dir()
self.__remote_dpdk_dir = None
self._dpdk_version = None
@@ -94,7 +94,7 @@ def _configure_build_target(
"""
Populate common environment variables and set build target config.
"""
- self._env_vars = EnvVarsDict()
+ self._env_vars = {}
self._build_target_config = build_target_config
self._env_vars.update(
self.main_session.get_dpdk_build_env_vars(build_target_config.arch)
@@ -112,7 +112,7 @@ def _copy_dpdk_tarball(self) -> None:
Copy to and extract DPDK tarball on the SUT node.
"""
self._logger.info("Copying DPDK tarball to SUT.")
- self.main_session.copy_file(SETTINGS.dpdk_tarball_path, self._remote_tmp_dir)
+ self.main_session.copy_to(SETTINGS.dpdk_tarball_path, self._remote_tmp_dir)
# construct remote tarball path
# the basename is the same on local host and on remote Node
@@ -259,7 +259,7 @@ def run_dpdk_app(
Run DPDK application on the remote node.
"""
return self.main_session.send_command(
- f"{app_path} {eal_args}", timeout, verify=True
+ f"{app_path} {eal_args}", timeout, privileged=True, verify=True
)
diff --git a/dts/framework/utils.py b/dts/framework/utils.py
index 55e0b0ef0e..8cfbc6a29d 100644
--- a/dts/framework/utils.py
+++ b/dts/framework/utils.py
@@ -42,19 +42,10 @@ def expand_range(range_str: str) -> list[int]:
return expanded_range
-def GREEN(text: str) -> str:
- return f"\u001B[32;1m{str(text)}\u001B[0m"
-
-
def RED(text: str) -> str:
return f"\u001B[31;1m{str(text)}\u001B[0m"
-class EnvVarsDict(dict):
- def __str__(self) -> str:
- return " ".join(["=".join(item) for item in self.items()])
-
-
class MesonArgs(object):
"""
Aggregate the arguments needed to build DPDK:
diff --git a/dts/poetry.lock b/dts/poetry.lock
index 0b2a007d4d..2438f337cd 100644
--- a/dts/poetry.lock
+++ b/dts/poetry.lock
@@ -12,6 +12,18 @@ docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"]
+[[package]]
+name = "bcrypt"
+version = "4.0.1"
+description = "Modern password hashing for your software and your servers"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[package.extras]
+tests = ["pytest (>=3.2.1,!=3.3.0)"]
+typecheck = ["mypy"]
+
[[package]]
name = "black"
version = "22.10.0"
@@ -33,6 +45,17 @@ d = ["aiohttp (>=3.7.4)"]
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
uvloop = ["uvloop (>=0.15.2)"]
+[[package]]
+name = "cffi"
+version = "1.15.1"
+description = "Foreign Function Interface for Python calling C code."
+category = "main"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+pycparser = "*"
+
[[package]]
name = "click"
version = "8.1.3"
@@ -52,6 +75,52 @@ category = "dev"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+[[package]]
+name = "cryptography"
+version = "40.0.2"
+description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+cffi = ">=1.12"
+
+[package.extras]
+docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
+docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"]
+pep8test = ["black", "ruff", "mypy", "check-manifest"]
+sdist = ["setuptools-rust (>=0.11.4)"]
+ssh = ["bcrypt (>=3.1.5)"]
+test = ["pytest (>=6.2.0)", "pytest-shard (>=0.1.2)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601"]
+test-randomorder = ["pytest-randomly"]
+tox = ["tox"]
+
+[[package]]
+name = "fabric"
+version = "2.7.1"
+description = "High level SSH command execution"
+category = "main"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+invoke = ">=1.3,<2.0"
+paramiko = ">=2.4"
+pathlib2 = "*"
+
+[package.extras]
+pytest = ["mock (>=2.0.0,<3.0)", "pytest (>=3.2.5,<4.0)"]
+testing = ["mock (>=2.0.0,<3.0)"]
+
+[[package]]
+name = "invoke"
+version = "1.7.3"
+description = "Pythonic task execution"
+category = "main"
+optional = false
+python-versions = "*"
+
[[package]]
name = "isort"
version = "5.10.1"
@@ -136,23 +205,41 @@ optional = false
python-versions = "*"
[[package]]
-name = "pathspec"
-version = "0.10.1"
-description = "Utility library for gitignore style pattern matching of file paths."
-category = "dev"
+name = "paramiko"
+version = "3.1.0"
+description = "SSH2 protocol library"
+category = "main"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.6"
+
+[package.dependencies]
+bcrypt = ">=3.2"
+cryptography = ">=3.3"
+pynacl = ">=1.5"
+
+[package.extras]
+all = ["pyasn1 (>=0.1.7)", "invoke (>=2.0)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"]
+gssapi = ["pyasn1 (>=0.1.7)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"]
+invoke = ["invoke (>=2.0)"]
[[package]]
-name = "pexpect"
-version = "4.8.0"
-description = "Pexpect allows easy control of interactive console applications."
+name = "pathlib2"
+version = "2.3.7.post1"
+description = "Object-oriented filesystem paths"
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
-ptyprocess = ">=0.5"
+six = "*"
+
+[[package]]
+name = "pathspec"
+version = "0.10.1"
+description = "Utility library for gitignore style pattern matching of file paths."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
[[package]]
name = "platformdirs"
@@ -166,14 +253,6 @@ python-versions = ">=3.7"
docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"]
-[[package]]
-name = "ptyprocess"
-version = "0.7.0"
-description = "Run a subprocess in a pseudo terminal"
-category = "main"
-optional = false
-python-versions = "*"
-
[[package]]
name = "pycodestyle"
version = "2.9.1"
@@ -182,6 +261,14 @@ category = "dev"
optional = false
python-versions = ">=3.6"
+[[package]]
+name = "pycparser"
+version = "2.21"
+description = "C parser in Python"
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
[[package]]
name = "pydocstyle"
version = "6.1.1"
@@ -228,6 +315,21 @@ tests = ["pytest (>=7.1.2)", "pytest-mypy", "eradicate (>=2.0.0)", "radon (>=5.1
toml = ["toml (>=0.10.2)"]
vulture = ["vulture"]
+[[package]]
+name = "pynacl"
+version = "1.5.0"
+description = "Python binding to the Networking and Cryptography (NaCl) library"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+cffi = ">=1.4.1"
+
+[package.extras]
+docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
+tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"]
+
[[package]]
name = "pyrsistent"
version = "0.19.1"
@@ -244,6 +346,14 @@ category = "main"
optional = false
python-versions = ">=3.6"
+[[package]]
+name = "six"
+version = "1.16.0"
+description = "Python 2 and 3 compatibility utilities"
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+
[[package]]
name = "snowballstemmer"
version = "2.2.0"
@@ -299,13 +409,18 @@ jsonschema = ">=4,<5"
[metadata]
lock-version = "1.1"
python-versions = "^3.10"
-content-hash = "a0f040b07fc6ce4deb0be078b9a88c2a465cb6bccb9e260a67e92c2403e2319f"
+content-hash = "719c43bcaa5d181921debda884f8f714063df0b2336d61e9f64ecab034e8b139"
[metadata.files]
attrs = []
+bcrypt = []
black = []
+cffi = []
click = []
colorama = []
+cryptography = []
+fabric = []
+invoke = []
isort = []
jsonpatch = []
jsonpointer = []
@@ -313,22 +428,22 @@ jsonschema = []
mccabe = []
mypy = []
mypy-extensions = []
+paramiko = []
+pathlib2 = []
pathspec = []
-pexpect = [
- {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"},
- {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"},
-]
platformdirs = [
{file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
{file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
]
-ptyprocess = []
pycodestyle = []
+pycparser = []
pydocstyle = []
pyflakes = []
pylama = []
+pynacl = []
pyrsistent = []
pyyaml = []
+six = []
snowballstemmer = []
toml = []
tomli = []
diff --git a/dts/pyproject.toml b/dts/pyproject.toml
index a136c91e5e..50bcdb327a 100644
--- a/dts/pyproject.toml
+++ b/dts/pyproject.toml
@@ -9,10 +9,10 @@ authors = ["Owen Hilyard <ohilyard@iol.unh.edu>", "dts@dpdk.org"]
[tool.poetry.dependencies]
python = "^3.10"
-pexpect = "^4.8.0"
warlock = "^2.0.1"
PyYAML = "^6.0"
types-PyYAML = "^6.0.8"
+fabric = "^2.7.1"
[tool.poetry.dev-dependencies]
mypy = "^0.961"
--
2.30.2
^ permalink raw reply [flat|nested] 21+ messages in thread
* Re: [PATCH v2] dts: replace pexpect with fabric
2023-04-24 13:35 ` [PATCH v2] " Juraj Linkeš
@ 2023-04-28 19:03 ` Jeremy Spewock
2023-05-02 13:00 ` Juraj Linkeš
2023-06-09 9:46 ` [PATCH v3] " Juraj Linkeš
1 sibling, 1 reply; 21+ messages in thread
From: Jeremy Spewock @ 2023-04-28 19:03 UTC (permalink / raw)
To: Juraj Linkeš
Cc: thomas, Honnappa.Nagarahalli, lijuan.tu, wathsala.vithanage, probb, dev
[-- Attachment #1: Type: text/plain, Size: 48711 bytes --]
On Mon, Apr 24, 2023 at 9:35 AM Juraj Linkeš <juraj.linkes@pantheon.tech>
wrote:
> Pexpect is not a dedicated SSH connection library while Fabric is. With
> Fabric, all SSH-related logic is provided and we can just focus on
> what's DTS specific.
>
> Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
> ---
> doc/guides/tools/dts.rst | 29 +-
> dts/conf.yaml | 2 +-
> dts/framework/exception.py | 10 +-
> dts/framework/remote_session/linux_session.py | 31 +-
> dts/framework/remote_session/os_session.py | 51 +++-
> dts/framework/remote_session/posix_session.py | 48 +--
> .../remote_session/remote/remote_session.py | 35 ++-
> .../remote_session/remote/ssh_session.py | 287 ++++++------------
> dts/framework/testbed_model/sut_node.py | 12 +-
> dts/framework/utils.py | 9 -
> dts/poetry.lock | 161 ++++++++--
> dts/pyproject.toml | 2 +-
> 12 files changed, 376 insertions(+), 301 deletions(-)
>
> diff --git a/doc/guides/tools/dts.rst b/doc/guides/tools/dts.rst
> index ebd6dceb6a..d15826c098 100644
> --- a/doc/guides/tools/dts.rst
> +++ b/doc/guides/tools/dts.rst
> @@ -95,9 +95,14 @@ Setting up DTS environment
>
> #. **SSH Connection**
>
> - DTS uses Python pexpect for SSH connections between DTS environment
> and the other hosts.
> - The pexpect implementation is a wrapper around the ssh command in the
> DTS environment.
> - This means it'll use the SSH agent providing the ssh command and its
> keys.
> + DTS uses the Fabric Python library for SSH connections between DTS
> environment
> + and the other hosts.
> + The authentication method used is pubkey authentication.
> + Fabric tries to use a passed key/certificate,
> + then any key it can with through an SSH agent,
> + then any "id_rsa", "id_dsa" or "id_ecdsa" key discoverable in
> ``~/.ssh/``
> + (with any matching OpenSSH-style certificates).
> + DTS doesn't pass any keys, so Fabric tries to use the other two
> methods.
>
>
> Setting up System Under Test
> @@ -132,6 +137,21 @@ There are two areas that need to be set up on a
> System Under Test:
> It's possible to use the hugepage configuration already present on
> the SUT.
> If you wish to do so, don't specify the hugepage configuration in
> the DTS config file.
>
> +#. **User with administrator privileges**
> +
> +.. _sut_admin_user:
> +
> + DTS needs administrator privileges to run DPDK applications (such as
> testpmd) on the SUT.
> + The SUT user must be able run commands in privileged mode without
> asking for password.
> + On most Linux distributions, it's a matter of setting up passwordless
> sudo:
> +
> + #. Run ``sudo visudo`` and check that it contains ``%sudo
> ALL=(ALL:ALL) ALL``.
> +
> + #. Add the SUT user to the sudo group with:
> +
> + .. code-block:: console
> +
> + sudo usermod -aG sudo <sut_user>
>
> Running DTS
> -----------
> @@ -151,7 +171,8 @@ which is a template that illustrates what can be
> configured in DTS:
> :start-at: executions:
>
>
> -The user must be root or any other user with prompt starting with ``#``.
> +The user must have :ref:`administrator privileges <sut_admin_user>`
> +which don't require password authentication.
> The other fields are mostly self-explanatory
> and documented in more detail in
> ``dts/framework/config/conf_yaml_schema.json``.
>
> diff --git a/dts/conf.yaml b/dts/conf.yaml
> index a9bd8a3ecf..129801d87c 100644
> --- a/dts/conf.yaml
> +++ b/dts/conf.yaml
> @@ -16,7 +16,7 @@ executions:
> nodes:
> - name: "SUT 1"
> hostname: sut1.change.me.localhost
> - user: root
> + user: dtsuser
> arch: x86_64
> os: linux
> lcores: ""
> diff --git a/dts/framework/exception.py b/dts/framework/exception.py
> index ca353d98fc..44ff4e979a 100644
> --- a/dts/framework/exception.py
> +++ b/dts/framework/exception.py
> @@ -62,13 +62,19 @@ class SSHConnectionError(DTSError):
> """
>
> host: str
> + errors: list[str]
> severity: ClassVar[ErrorSeverity] = ErrorSeverity.SSH_ERR
>
> - def __init__(self, host: str):
> + def __init__(self, host: str, errors: list[str] | None = None):
> self.host = host
> + self.errors = [] if errors is None else errors
>
> def __str__(self) -> str:
> - return f"Error trying to connect with {self.host}"
> + message = f"Error trying to connect with {self.host}."
> + if self.errors:
> + message += f" Errors encountered while retrying: {',
> '.join(self.errors)}"
> +
> + return message
>
>
> class SSHSessionDeadError(DTSError):
> diff --git a/dts/framework/remote_session/linux_session.py
> b/dts/framework/remote_session/linux_session.py
> index a1e3bc3a92..f13f399121 100644
> --- a/dts/framework/remote_session/linux_session.py
> +++ b/dts/framework/remote_session/linux_session.py
> @@ -14,10 +14,11 @@ class LinuxSession(PosixSession):
> The implementation of non-Posix compliant parts of Linux remote
> sessions.
> """
>
> + def _get_privileged_command(self, command: str) -> str:
> + return f"sudo -- sh -c '{command}'"
> +
> def get_remote_cpus(self, use_first_core: bool) -> list[LogicalCore]:
> - cpu_info = self.remote_session.send_command(
> - "lscpu -p=CPU,CORE,SOCKET,NODE|grep -v \\#"
> - ).stdout
> + cpu_info = self.send_command("lscpu -p=CPU,CORE,SOCKET,NODE|grep
> -v \\#").stdout
> lcores = []
> for cpu_line in cpu_info.splitlines():
> lcore, core, socket, node = map(int, cpu_line.split(","))
> @@ -45,20 +46,20 @@ def setup_hugepages(self, hugepage_amount: int,
> force_first_numa: bool) -> None:
> self._mount_huge_pages()
>
> def _get_hugepage_size(self) -> int:
> - hugepage_size = self.remote_session.send_command(
> + hugepage_size = self.send_command(
> "awk '/Hugepagesize/ {print $2}' /proc/meminfo"
> ).stdout
> return int(hugepage_size)
>
> def _get_hugepages_total(self) -> int:
> - hugepages_total = self.remote_session.send_command(
> + hugepages_total = self.send_command(
> "awk '/HugePages_Total/ { print $2 }' /proc/meminfo"
> ).stdout
> return int(hugepages_total)
>
> def _get_numa_nodes(self) -> list[int]:
> try:
> - numa_count = self.remote_session.send_command(
> + numa_count = self.send_command(
> "cat /sys/devices/system/node/online", verify=True
> ).stdout
> numa_range = expand_range(numa_count)
> @@ -70,14 +71,12 @@ def _get_numa_nodes(self) -> list[int]:
> def _mount_huge_pages(self) -> None:
> self._logger.info("Re-mounting Hugepages.")
> hugapge_fs_cmd = "awk '/hugetlbfs/ { print $2 }' /proc/mounts"
> - self.remote_session.send_command(f"umount $({hugapge_fs_cmd})")
> - result = self.remote_session.send_command(hugapge_fs_cmd)
> + self.send_command(f"umount $({hugapge_fs_cmd})")
> + result = self.send_command(hugapge_fs_cmd)
> if result.stdout == "":
> remote_mount_path = "/mnt/huge"
> - self.remote_session.send_command(f"mkdir -p
> {remote_mount_path}")
> - self.remote_session.send_command(
> - f"mount -t hugetlbfs nodev {remote_mount_path}"
> - )
> + self.send_command(f"mkdir -p {remote_mount_path}")
> + self.send_command(f"mount -t hugetlbfs nodev
> {remote_mount_path}")
>
> def _supports_numa(self) -> bool:
> # the system supports numa if self._numa_nodes is non-empty and
> there are more
> @@ -94,14 +93,12 @@ def _configure_huge_pages(
> )
> if force_first_numa and self._supports_numa():
> # clear non-numa hugepages
> - self.remote_session.send_command(
> - f"echo 0 | sudo tee {hugepage_config_path}"
> - )
> + self.send_command(f"echo 0 | tee {hugepage_config_path}",
> privileged=True)
> hugepage_config_path = (
>
> f"/sys/devices/system/node/node{self._numa_nodes[0]}/hugepages"
> f"/hugepages-{size}kB/nr_hugepages"
> )
>
> - self.remote_session.send_command(
> - f"echo {amount} | sudo tee {hugepage_config_path}"
> + self.send_command(
> + f"echo {amount} | tee {hugepage_config_path}", privileged=True
> )
> diff --git a/dts/framework/remote_session/os_session.py
> b/dts/framework/remote_session/os_session.py
> index 4c48ae2567..bfd70bd480 100644
> --- a/dts/framework/remote_session/os_session.py
> +++ b/dts/framework/remote_session/os_session.py
> @@ -10,7 +10,7 @@
> from framework.logger import DTSLOG
> from framework.settings import SETTINGS
> from framework.testbed_model import LogicalCore
> -from framework.utils import EnvVarsDict, MesonArgs
> +from framework.utils import MesonArgs
>
> from .remote import CommandResult, RemoteSession, create_remote_session
>
> @@ -53,17 +53,32 @@ def is_alive(self) -> bool:
> def send_command(
> self,
> command: str,
> - timeout: float,
> + timeout: float = SETTINGS.timeout,
> + privileged: bool = False,
> verify: bool = False,
> - env: EnvVarsDict | None = None,
> + env: dict | None = None,
> ) -> CommandResult:
> """
> An all-purpose API in case the command to be executed is already
> OS-agnostic, such as when the path to the executed command has
> been
> constructed beforehand.
> """
> + if privileged:
> + command = self._get_privileged_command(command)
> +
> return self.remote_session.send_command(command, timeout, verify,
> env)
>
> + @abstractmethod
> + def _get_privileged_command(self, command: str) -> str:
> + """Modify the command so that it executes with administrative
> privileges.
> +
> + Args:
> + command: The command to modify.
> +
> + Returns:
> + The modified command that executes with administrative
> privileges.
> + """
> +
> @abstractmethod
> def guess_dpdk_remote_dir(self, remote_dir) -> PurePath:
> """
> @@ -90,17 +105,35 @@ def join_remote_path(self, *args: str | PurePath) ->
> PurePath:
> """
>
> @abstractmethod
> - def copy_file(
> + def copy_from(
> self,
> source_file: str | PurePath,
> destination_file: str | PurePath,
> - source_remote: bool = False,
> ) -> None:
> + """Copy a file from the remote Node to the local filesystem.
> +
> + Copy source_file from the remote Node associated with this remote
> + session to destination_file on the local filesystem.
> +
> + Args:
> + source_file: the file on the remote Node.
> + destination_file: a file or directory path on the local
> filesystem.
> """
> +
> + @abstractmethod
> + def copy_to(
> + self,
> + source_file: str | PurePath,
> + destination_file: str | PurePath,
> + ) -> None:
> + """Copy a file from local filesystem to the remote Node.
> +
> Copy source_file from local filesystem to destination_file
> - on the remote Node associated with the remote session.
> - If source_remote is True, reverse the direction - copy
> source_file from the
> - associated remote Node to destination_file on local storage.
> + on the remote Node associated with this remote session.
> +
> + Args:
> + source_file: the file on the local filesystem.
> + destination_file: a file or directory path on the remote Node.
> """
>
> @abstractmethod
> @@ -128,7 +161,7 @@ def extract_remote_tarball(
> @abstractmethod
> def build_dpdk(
> self,
> - env_vars: EnvVarsDict,
> + env_vars: dict,
> meson_args: MesonArgs,
> remote_dpdk_dir: str | PurePath,
> remote_dpdk_build_dir: str | PurePath,
> diff --git a/dts/framework/remote_session/posix_session.py
> b/dts/framework/remote_session/posix_session.py
> index d38062e8d6..8ca0acb429 100644
> --- a/dts/framework/remote_session/posix_session.py
> +++ b/dts/framework/remote_session/posix_session.py
> @@ -9,7 +9,7 @@
> from framework.config import Architecture
> from framework.exception import DPDKBuildError,
> RemoteCommandExecutionError
> from framework.settings import SETTINGS
> -from framework.utils import EnvVarsDict, MesonArgs
> +from framework.utils import MesonArgs
>
> from .os_session import OSSession
>
> @@ -34,7 +34,7 @@ def combine_short_options(**opts: bool) -> str:
>
> def guess_dpdk_remote_dir(self, remote_dir) -> PurePosixPath:
> remote_guess = self.join_remote_path(remote_dir, "dpdk-*")
> - result = self.remote_session.send_command(f"ls -d {remote_guess}
> | tail -1")
> + result = self.send_command(f"ls -d {remote_guess} | tail -1")
> return PurePosixPath(result.stdout)
>
> def get_remote_tmp_dir(self) -> PurePosixPath:
> @@ -48,7 +48,7 @@ def get_dpdk_build_env_vars(self, arch: Architecture) ->
> dict:
> env_vars = {}
> if arch == Architecture.i686:
> # find the pkg-config path and store it in PKG_CONFIG_LIBDIR
> - out = self.remote_session.send_command("find /usr -type d
> -name pkgconfig")
> + out = self.send_command("find /usr -type d -name pkgconfig")
> pkg_path = ""
> res_path = out.stdout.split("\r\n")
> for cur_path in res_path:
> @@ -65,13 +65,19 @@ def get_dpdk_build_env_vars(self, arch: Architecture)
> -> dict:
> def join_remote_path(self, *args: str | PurePath) -> PurePosixPath:
> return PurePosixPath(*args)
>
> - def copy_file(
> + def copy_from(
> self,
> source_file: str | PurePath,
> destination_file: str | PurePath,
> - source_remote: bool = False,
> ) -> None:
> - self.remote_session.copy_file(source_file, destination_file,
> source_remote)
> + self.remote_session.copy_from(source_file, destination_file)
> +
> + def copy_to(
> + self,
> + source_file: str | PurePath,
> + destination_file: str | PurePath,
> + ) -> None:
> + self.remote_session.copy_to(source_file, destination_file)
>
> def remove_remote_dir(
> self,
> @@ -80,24 +86,24 @@ def remove_remote_dir(
> force: bool = True,
> ) -> None:
> opts = PosixSession.combine_short_options(r=recursive, f=force)
> - self.remote_session.send_command(f"rm{opts} {remote_dir_path}")
> + self.send_command(f"rm{opts} {remote_dir_path}")
>
> def extract_remote_tarball(
> self,
> remote_tarball_path: str | PurePath,
> expected_dir: str | PurePath | None = None,
> ) -> None:
> - self.remote_session.send_command(
> + self.send_command(
> f"tar xfm {remote_tarball_path} "
> f"-C {PurePosixPath(remote_tarball_path).parent}",
> 60,
> )
> if expected_dir:
> - self.remote_session.send_command(f"ls {expected_dir}",
> verify=True)
> + self.send_command(f"ls {expected_dir}", verify=True)
>
> def build_dpdk(
> self,
> - env_vars: EnvVarsDict,
> + env_vars: dict,
> meson_args: MesonArgs,
> remote_dpdk_dir: str | PurePath,
> remote_dpdk_build_dir: str | PurePath,
> @@ -108,7 +114,7 @@ def build_dpdk(
> if rebuild:
> # reconfigure, then build
> self._logger.info("Reconfiguring DPDK build.")
> - self.remote_session.send_command(
> + self.send_command(
> f"meson configure {meson_args}
> {remote_dpdk_build_dir}",
> timeout,
> verify=True,
> @@ -118,7 +124,7 @@ def build_dpdk(
> # fresh build - remove target dir first, then build from
> scratch
> self._logger.info("Configuring DPDK build from scratch.")
> self.remove_remote_dir(remote_dpdk_build_dir)
> - self.remote_session.send_command(
> + self.send_command(
> f"meson setup "
> f"{meson_args} {remote_dpdk_dir}
> {remote_dpdk_build_dir}",
> timeout,
> @@ -127,14 +133,14 @@ def build_dpdk(
> )
>
> self._logger.info("Building DPDK.")
> - self.remote_session.send_command(
> + self.send_command(
> f"ninja -C {remote_dpdk_build_dir}", timeout,
> verify=True, env=env_vars
> )
> except RemoteCommandExecutionError as e:
> raise DPDKBuildError(f"DPDK build failed when doing
> '{e.command}'.")
>
> def get_dpdk_version(self, build_dir: str | PurePath) -> str:
> - out = self.remote_session.send_command(
> + out = self.send_command(
> f"cat {self.join_remote_path(build_dir, 'VERSION')}",
> verify=True
> )
> return out.stdout
> @@ -146,7 +152,7 @@ def kill_cleanup_dpdk_apps(self, dpdk_prefix_list:
> Iterable[str]) -> None:
> # kill and cleanup only if DPDK is running
> dpdk_pids = self._get_dpdk_pids(dpdk_runtime_dirs)
> for dpdk_pid in dpdk_pids:
> - self.remote_session.send_command(f"kill -9 {dpdk_pid}",
> 20)
> + self.send_command(f"kill -9 {dpdk_pid}", 20)
> self._check_dpdk_hugepages(dpdk_runtime_dirs)
> self._remove_dpdk_runtime_dirs(dpdk_runtime_dirs)
>
> @@ -168,7 +174,7 @@ def _list_remote_dirs(self, remote_path: str |
> PurePath) -> list[str] | None:
> Return a list of directories of the remote_dir.
> If remote_path doesn't exist, return None.
> """
> - out = self.remote_session.send_command(
> + out = self.send_command(
> f"ls -l {remote_path} | awk '/^d/ {{print $NF}}'"
> ).stdout
> if "No such file or directory" in out:
> @@ -182,9 +188,7 @@ def _get_dpdk_pids(self, dpdk_runtime_dirs:
> Iterable[str | PurePath]) -> list[in
> for dpdk_runtime_dir in dpdk_runtime_dirs:
> dpdk_config_file = PurePosixPath(dpdk_runtime_dir, "config")
> if self._remote_files_exists(dpdk_config_file):
> - out = self.remote_session.send_command(
> - f"lsof -Fp {dpdk_config_file}"
> - ).stdout
> + out = self.send_command(f"lsof -Fp
> {dpdk_config_file}").stdout
> if out and "No such file or directory" not in out:
> for out_line in out.splitlines():
> match = re.match(pid_regex, out_line)
> @@ -193,7 +197,7 @@ def _get_dpdk_pids(self, dpdk_runtime_dirs:
> Iterable[str | PurePath]) -> list[in
> return pids
>
> def _remote_files_exists(self, remote_path: PurePath) -> bool:
> - result = self.remote_session.send_command(f"test -e
> {remote_path}")
> + result = self.send_command(f"test -e {remote_path}")
> return not result.return_code
>
> def _check_dpdk_hugepages(
> @@ -202,9 +206,7 @@ def _check_dpdk_hugepages(
> for dpdk_runtime_dir in dpdk_runtime_dirs:
> hugepage_info = PurePosixPath(dpdk_runtime_dir,
> "hugepage_info")
> if self._remote_files_exists(hugepage_info):
> - out = self.remote_session.send_command(
> - f"lsof -Fp {hugepage_info}"
> - ).stdout
> + out = self.send_command(f"lsof -Fp
> {hugepage_info}").stdout
> if out and "No such file or directory" not in out:
> self._logger.warning("Some DPDK processes did not
> free hugepages.")
>
> self._logger.warning("*******************************************")
> diff --git a/dts/framework/remote_session/remote/remote_session.py
> b/dts/framework/remote_session/remote/remote_session.py
> index 91dee3cb4f..0647d93de4 100644
> --- a/dts/framework/remote_session/remote/remote_session.py
> +++ b/dts/framework/remote_session/remote/remote_session.py
> @@ -11,7 +11,6 @@
> from framework.exception import RemoteCommandExecutionError
> from framework.logger import DTSLOG
> from framework.settings import SETTINGS
> -from framework.utils import EnvVarsDict
>
>
> @dataclasses.dataclass(slots=True, frozen=True)
> @@ -89,7 +88,7 @@ def send_command(
> command: str,
> timeout: float = SETTINGS.timeout,
> verify: bool = False,
> - env: EnvVarsDict | None = None,
> + env: dict | None = None,
> ) -> CommandResult:
> """
> Send a command to the connected node using optional env vars
> @@ -114,7 +113,7 @@ def send_command(
>
> @abstractmethod
> def _send_command(
> - self, command: str, timeout: float, env: EnvVarsDict | None
> + self, command: str, timeout: float, env: dict | None
> ) -> CommandResult:
> """
> Use the underlying protocol to execute the command using optional
> env vars
> @@ -141,15 +140,33 @@ def is_alive(self) -> bool:
> """
>
> @abstractmethod
> - def copy_file(
> + def copy_from(
> self,
> source_file: str | PurePath,
> destination_file: str | PurePath,
> - source_remote: bool = False,
> ) -> None:
> + """Copy a file from the remote Node to the local filesystem.
> +
> + Copy source_file from the remote Node associated with this remote
> + session to destination_file on the local filesystem.
> +
> + Args:
> + source_file: the file on the remote Node.
> + destination_file: a file or directory path on the local
> filesystem.
> """
> - Copy source_file from local filesystem to destination_file on the
> remote Node
> - associated with the remote session.
> - If source_remote is True, reverse the direction - copy
> source_file from the
> - associated Node to destination_file on local filesystem.
> +
> + @abstractmethod
> + def copy_to(
> + self,
> + source_file: str | PurePath,
> + destination_file: str | PurePath,
> + ) -> None:
> + """Copy a file from local filesystem to the remote Node.
> +
> + Copy source_file from local filesystem to destination_file
> + on the remote Node associated with this remote session.
> +
> + Args:
> + source_file: the file on the local filesystem.
> + destination_file: a file or directory path on the remote Node.
> """
> diff --git a/dts/framework/remote_session/remote/ssh_session.py
> b/dts/framework/remote_session/remote/ssh_session.py
> index 42ff9498a2..8d127f1601 100644
> --- a/dts/framework/remote_session/remote/ssh_session.py
> +++ b/dts/framework/remote_session/remote/ssh_session.py
> @@ -1,29 +1,49 @@
> # SPDX-License-Identifier: BSD-3-Clause
> -# Copyright(c) 2010-2014 Intel Corporation
> -# Copyright(c) 2022-2023 PANTHEON.tech s.r.o.
> -# Copyright(c) 2022-2023 University of New Hampshire
> +# Copyright(c) 2023 PANTHEON.tech s.r.o.
>
>
I've noticed in other patches you've simply appended the copyright for
PANTHEON.tech to the existing list. Is there a reason you remove the others
here as well?
> -import time
> +import socket
> +import traceback
> from pathlib import PurePath
>
> -import pexpect # type: ignore
> -from pexpect import pxssh # type: ignore
> +from fabric import Connection # type: ignore[import]
> +from invoke.exceptions import ( # type: ignore[import]
> + CommandTimedOut,
> + ThreadException,
> + UnexpectedExit,
> +)
> +from paramiko.ssh_exception import ( # type: ignore[import]
> + AuthenticationException,
> + BadHostKeyException,
> + NoValidConnectionsError,
> + SSHException,
> +)
>
> from framework.config import NodeConfiguration
> from framework.exception import SSHConnectionError, SSHSessionDeadError,
> SSHTimeoutError
> from framework.logger import DTSLOG
> -from framework.utils import GREEN, RED, EnvVarsDict
>
> from .remote_session import CommandResult, RemoteSession
>
>
> class SSHSession(RemoteSession):
> - """
> - Module for creating Pexpect SSH remote sessions.
> + """A persistent SSH connection to a remote Node.
> +
> + The connection is implemented with the Fabric Python library.
> +
> + Args:
> + node_config: The configuration of the Node to connect to.
> + session_name: The name of the session.
> + logger: The logger used for logging.
> + This should be passed from the parent OSSession.
> +
> + Attributes:
> + session: The underlying Fabric SSH connection.
> +
> + Raises:
> + SSHConnectionError: The connection cannot be established.
> """
>
> - session: pxssh.pxssh
> - magic_prompt: str
> + session: Connection
>
> def __init__(
> self,
> @@ -31,218 +51,91 @@ def __init__(
> session_name: str,
> logger: DTSLOG,
> ):
> - self.magic_prompt = "MAGIC PROMPT"
> super(SSHSession, self).__init__(node_config, session_name,
> logger)
>
> def _connect(self) -> None:
> - """
> - Create connection to assigned node.
> - """
> + errors = []
> retry_attempts = 10
> login_timeout = 20 if self.port else 10
> - password_regex = (
> - r"(?i)(?:password:)|(?:passphrase for key)|(?i)(password for
> .+:)"
> - )
> - try:
> - for retry_attempt in range(retry_attempts):
> - self.session = pxssh.pxssh(encoding="utf-8")
> - try:
> - self.session.login(
> - self.ip,
> - self.username,
> - self.password,
> - original_prompt="[$#>]",
> - port=self.port,
> - login_timeout=login_timeout,
> - password_regex=password_regex,
> - )
> - break
> - except Exception as e:
> - self._logger.warning(e)
> - time.sleep(2)
> - self._logger.info(
> - f"Retrying connection: retry number
> {retry_attempt + 1}."
> - )
> - else:
> - raise Exception(f"Connection to {self.hostname} failed")
> -
> - self.send_expect("stty -echo", "#")
> - self.send_expect("stty columns 1000", "#")
> - self.send_expect("bind 'set enable-bracketed-paste off'", "#")
> - except Exception as e:
> - self._logger.error(RED(str(e)))
> - if getattr(self, "port", None):
> - suggestion = (
> - f"\nSuggestion: Check if the firewall on
> {self.hostname} is "
> - f"stopped.\n"
> + for retry_attempt in range(retry_attempts):
> + try:
> + self.session = Connection(
> + self.ip,
> + user=self.username,
> + port=self.port,
> + connect_kwargs={"password": self.password},
> + connect_timeout=login_timeout,
> )
> - self._logger.info(GREEN(suggestion))
> -
> - raise SSHConnectionError(self.hostname)
> + self.session.open()
>
> - def send_expect(
> - self, command: str, prompt: str, timeout: float = 15, verify:
> bool = False
> - ) -> str | int:
> - try:
> - ret = self.send_expect_base(command, prompt, timeout)
> - if verify:
> - ret_status = self.send_expect_base("echo $?", prompt,
> timeout)
> - try:
> - retval = int(ret_status)
> - if retval:
> - self._logger.error(f"Command: {command} failure!")
> - self._logger.error(ret)
> - return retval
> - else:
> - return ret
> - except ValueError:
> - return ret
> - else:
> - return ret
> - except Exception as e:
> - self._logger.error(
> - f"Exception happened in [{command}] and output is "
> - f"[{self._get_output()}]"
> - )
> - raise e
> -
> - def send_expect_base(self, command: str, prompt: str, timeout: float)
> -> str:
> - self._clean_session()
> - original_prompt = self.session.PROMPT
> - self.session.PROMPT = prompt
> - self._send_line(command)
> - self._prompt(command, timeout)
> -
> - before = self._get_output()
> - self.session.PROMPT = original_prompt
> - return before
> -
> - def _clean_session(self) -> None:
> - self.session.PROMPT = self.magic_prompt
> - self.get_output(timeout=0.01)
> - self.session.PROMPT = self.session.UNIQUE_PROMPT
> -
> - def _send_line(self, command: str) -> None:
> - if not self.is_alive():
> - raise SSHSessionDeadError(self.hostname)
> - if len(command) == 2 and command.startswith("^"):
> - self.session.sendcontrol(command[1])
> - else:
> - self.session.sendline(command)
> + except (ValueError, BadHostKeyException,
> AuthenticationException) as e:
> + self._logger.exception(e)
> + raise SSHConnectionError(self.hostname) from e
>
> - def _prompt(self, command: str, timeout: float) -> None:
> - if not self.session.prompt(timeout):
> - raise SSHTimeoutError(command, self._get_output()) from None
> + except (NoValidConnectionsError, socket.error, SSHException)
> as e:
> + self._logger.debug(traceback.format_exc())
> + self._logger.warning(e)
>
> - def get_output(self, timeout: float = 15) -> str:
> - """
> - Get all output before timeout
> - """
> - try:
> - self.session.prompt(timeout)
> - except Exception:
> - pass
> -
> - before = self._get_output()
> - self._flush()
> -
> - return before
> + error = repr(e)
> + if error not in errors:
> + errors.append(error)
>
> - def _get_output(self) -> str:
> - if not self.is_alive():
> - raise SSHSessionDeadError(self.hostname)
> - before = self.session.before.rsplit("\r\n", 1)[0]
> - if before == "[PEXPECT]":
> - return ""
> - return before
> + self._logger.info(
> + f"Retrying connection: retry number {retry_attempt +
> 1}."
> + )
>
> - def _flush(self) -> None:
> - """
> - Clear all session buffer
> - """
> - self.session.buffer = ""
> - self.session.before = ""
> + else:
> + break
> + else:
> + raise SSHConnectionError(self.hostname, errors)
>
> def is_alive(self) -> bool:
> - return self.session.isalive()
> + return self.session.is_connected
>
> def _send_command(
> - self, command: str, timeout: float, env: EnvVarsDict | None
> + self, command: str, timeout: float, env: dict | None
> ) -> CommandResult:
> - output = self._send_command_get_output(command, timeout, env)
> - return_code = int(self._send_command_get_output("echo $?",
> timeout, None))
> + """Send a command and return the result of the execution.
>
> - # we're capturing only stdout
> - return CommandResult(self.name, command, output, "", return_code)
> + Args:
> + command: The command to execute.
> + timeout: Wait at most this many seconds for the execution to
> complete.
> + env: Extra environment variables that will be used in command
> execution.
>
> - def _send_command_get_output(
> - self, command: str, timeout: float, env: EnvVarsDict | None
> - ) -> str:
> + Raises:
> + SSHSessionDeadError: The session died while executing the
> command.
> + SSHTimeoutError: The command execution timed out.
> + """
> try:
> - self._clean_session()
> - if env:
> - command = f"{env} {command}"
> - self._send_line(command)
> - except Exception as e:
> - raise e
> + output = self.session.run(
> + command, env=env, warn=True, hide=True, timeout=timeout
> + )
>
> - output = self.get_output(timeout=timeout)
> - self.session.PROMPT = self.session.UNIQUE_PROMPT
> - self.session.prompt(0.1)
> + except (UnexpectedExit, ThreadException) as e:
> + self._logger.exception(e)
> + raise SSHSessionDeadError(self.hostname) from e
>
> - return output
> + except CommandTimedOut as e:
> + self._logger.exception(e)
> + raise SSHTimeoutError(command, e.result.stderr) from e
>
> - def _close(self, force: bool = False) -> None:
> - if force is True:
> - self.session.close()
> - else:
> - if self.is_alive():
> - self.session.logout()
> + return CommandResult(
> + self.name, command, output.stdout, output.stderr,
> output.return_code
> + )
>
> - def copy_file(
> + def copy_from(
> self,
> source_file: str | PurePath,
> destination_file: str | PurePath,
> - source_remote: bool = False,
> ) -> None:
> - """
> - Send a local file to a remote host.
> - """
> - if source_remote:
> - source_file = f"{self.username}@{self.ip}:{source_file}"
> - else:
> - destination_file = f"{self.username}@
> {self.ip}:{destination_file}"
> + self.session.get(str(destination_file), str(source_file))
>
> - port = ""
> - if self.port:
> - port = f" -P {self.port}"
> -
> - command = (
> - f"scp -v{port} -o NoHostAuthenticationForLocalhost=yes"
> - f" {source_file} {destination_file}"
> - )
> -
> - self._spawn_scp(command)
> + def copy_to(
> + self,
> + source_file: str | PurePath,
> + destination_file: str | PurePath,
> + ) -> None:
> + self.session.put(str(source_file), str(destination_file))
>
> - def _spawn_scp(self, scp_cmd: str) -> None:
> - """
> - Transfer a file with SCP
> - """
> - self._logger.info(scp_cmd)
> - p: pexpect.spawn = pexpect.spawn(scp_cmd)
> - time.sleep(0.5)
> - ssh_newkey: str = "Are you sure you want to continue connecting"
> - i: int = p.expect(
> - [ssh_newkey, "[pP]assword", "# ", pexpect.EOF,
> pexpect.TIMEOUT], 120
> - )
> - if i == 0: # add once in trust list
> - p.sendline("yes")
> - i = p.expect([ssh_newkey, "[pP]assword", pexpect.EOF], 2)
> -
> - if i == 1:
> - time.sleep(0.5)
> - p.sendline(self.password)
> - p.expect("Exit status 0", 60)
> - if i == 4:
> - self._logger.error("SCP TIMEOUT error %d" % i)
> - p.close()
> + def _close(self, force: bool = False) -> None:
> + self.session.close()
> diff --git a/dts/framework/testbed_model/sut_node.py
> b/dts/framework/testbed_model/sut_node.py
> index 2b2b50d982..9dbc390848 100644
> --- a/dts/framework/testbed_model/sut_node.py
> +++ b/dts/framework/testbed_model/sut_node.py
> @@ -10,7 +10,7 @@
> from framework.config import BuildTargetConfiguration, NodeConfiguration
> from framework.remote_session import CommandResult, OSSession
> from framework.settings import SETTINGS
> -from framework.utils import EnvVarsDict, MesonArgs
> +from framework.utils import MesonArgs
>
> from .hw import LogicalCoreCount, LogicalCoreList, VirtualDevice
> from .node import Node
> @@ -27,7 +27,7 @@ class SutNode(Node):
> _dpdk_prefix_list: list[str]
> _dpdk_timestamp: str
> _build_target_config: BuildTargetConfiguration | None
> - _env_vars: EnvVarsDict
> + _env_vars: dict
> _remote_tmp_dir: PurePath
> __remote_dpdk_dir: PurePath | None
> _dpdk_version: str | None
> @@ -38,7 +38,7 @@ def __init__(self, node_config: NodeConfiguration):
> super(SutNode, self).__init__(node_config)
> self._dpdk_prefix_list = []
> self._build_target_config = None
> - self._env_vars = EnvVarsDict()
> + self._env_vars = {}
> self._remote_tmp_dir = self.main_session.get_remote_tmp_dir()
> self.__remote_dpdk_dir = None
> self._dpdk_version = None
> @@ -94,7 +94,7 @@ def _configure_build_target(
> """
> Populate common environment variables and set build target config.
> """
> - self._env_vars = EnvVarsDict()
> + self._env_vars = {}
> self._build_target_config = build_target_config
> self._env_vars.update(
>
> self.main_session.get_dpdk_build_env_vars(build_target_config.arch)
> @@ -112,7 +112,7 @@ def _copy_dpdk_tarball(self) -> None:
> Copy to and extract DPDK tarball on the SUT node.
> """
> self._logger.info("Copying DPDK tarball to SUT.")
> - self.main_session.copy_file(SETTINGS.dpdk_tarball_path,
> self._remote_tmp_dir)
> + self.main_session.copy_to(SETTINGS.dpdk_tarball_path,
> self._remote_tmp_dir)
>
> # construct remote tarball path
> # the basename is the same on local host and on remote Node
> @@ -259,7 +259,7 @@ def run_dpdk_app(
> Run DPDK application on the remote node.
> """
> return self.main_session.send_command(
> - f"{app_path} {eal_args}", timeout, verify=True
> + f"{app_path} {eal_args}", timeout, privileged=True,
> verify=True
> )
>
>
> diff --git a/dts/framework/utils.py b/dts/framework/utils.py
> index 55e0b0ef0e..8cfbc6a29d 100644
> --- a/dts/framework/utils.py
> +++ b/dts/framework/utils.py
> @@ -42,19 +42,10 @@ def expand_range(range_str: str) -> list[int]:
> return expanded_range
>
>
> -def GREEN(text: str) -> str:
> - return f"\u001B[32;1m{str(text)}\u001B[0m"
> -
> -
> def RED(text: str) -> str:
> return f"\u001B[31;1m{str(text)}\u001B[0m"
>
>
> -class EnvVarsDict(dict):
> - def __str__(self) -> str:
> - return " ".join(["=".join(item) for item in self.items()])
> -
> -
> class MesonArgs(object):
> """
> Aggregate the arguments needed to build DPDK:
> diff --git a/dts/poetry.lock b/dts/poetry.lock
> index 0b2a007d4d..2438f337cd 100644
> --- a/dts/poetry.lock
> +++ b/dts/poetry.lock
> @@ -12,6 +12,18 @@ docs = ["furo", "sphinx", "zope.interface",
> "sphinx-notfound-page"]
> tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest
> (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins",
> "zope.interface", "cloudpickle"]
> tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler",
> "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins",
> "cloudpickle"]
>
> +[[package]]
> +name = "bcrypt"
> +version = "4.0.1"
> +description = "Modern password hashing for your software and your servers"
> +category = "main"
> +optional = false
> +python-versions = ">=3.6"
> +
> +[package.extras]
> +tests = ["pytest (>=3.2.1,!=3.3.0)"]
> +typecheck = ["mypy"]
> +
> [[package]]
> name = "black"
> version = "22.10.0"
> @@ -33,6 +45,17 @@ d = ["aiohttp (>=3.7.4)"]
> jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
> uvloop = ["uvloop (>=0.15.2)"]
>
> +[[package]]
> +name = "cffi"
> +version = "1.15.1"
> +description = "Foreign Function Interface for Python calling C code."
> +category = "main"
> +optional = false
> +python-versions = "*"
> +
> +[package.dependencies]
> +pycparser = "*"
> +
> [[package]]
> name = "click"
> version = "8.1.3"
> @@ -52,6 +75,52 @@ category = "dev"
> optional = false
> python-versions =
> "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
>
> +[[package]]
> +name = "cryptography"
> +version = "40.0.2"
> +description = "cryptography is a package which provides cryptographic
> recipes and primitives to Python developers."
> +category = "main"
> +optional = false
> +python-versions = ">=3.6"
> +
> +[package.dependencies]
> +cffi = ">=1.12"
> +
> +[package.extras]
> +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
> +docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)",
> "sphinxcontrib-spelling (>=4.0.1)"]
> +pep8test = ["black", "ruff", "mypy", "check-manifest"]
> +sdist = ["setuptools-rust (>=0.11.4)"]
> +ssh = ["bcrypt (>=3.1.5)"]
> +test = ["pytest (>=6.2.0)", "pytest-shard (>=0.1.2)", "pytest-benchmark",
> "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601"]
> +test-randomorder = ["pytest-randomly"]
> +tox = ["tox"]
> +
> +[[package]]
> +name = "fabric"
> +version = "2.7.1"
> +description = "High level SSH command execution"
> +category = "main"
> +optional = false
> +python-versions = "*"
> +
> +[package.dependencies]
> +invoke = ">=1.3,<2.0"
> +paramiko = ">=2.4"
> +pathlib2 = "*"
> +
> +[package.extras]
> +pytest = ["mock (>=2.0.0,<3.0)", "pytest (>=3.2.5,<4.0)"]
> +testing = ["mock (>=2.0.0,<3.0)"]
> +
> +[[package]]
> +name = "invoke"
> +version = "1.7.3"
> +description = "Pythonic task execution"
> +category = "main"
> +optional = false
> +python-versions = "*"
> +
> [[package]]
> name = "isort"
> version = "5.10.1"
> @@ -136,23 +205,41 @@ optional = false
> python-versions = "*"
>
> [[package]]
> -name = "pathspec"
> -version = "0.10.1"
> -description = "Utility library for gitignore style pattern matching of
> file paths."
> -category = "dev"
> +name = "paramiko"
> +version = "3.1.0"
> +description = "SSH2 protocol library"
> +category = "main"
> optional = false
> -python-versions = ">=3.7"
> +python-versions = ">=3.6"
> +
> +[package.dependencies]
> +bcrypt = ">=3.2"
> +cryptography = ">=3.3"
> +pynacl = ">=1.5"
> +
> +[package.extras]
> +all = ["pyasn1 (>=0.1.7)", "invoke (>=2.0)", "gssapi (>=1.4.1)", "pywin32
> (>=2.1.8)"]
> +gssapi = ["pyasn1 (>=0.1.7)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"]
> +invoke = ["invoke (>=2.0)"]
>
> [[package]]
> -name = "pexpect"
> -version = "4.8.0"
> -description = "Pexpect allows easy control of interactive console
> applications."
> +name = "pathlib2"
> +version = "2.3.7.post1"
> +description = "Object-oriented filesystem paths"
> category = "main"
> optional = false
> python-versions = "*"
>
> [package.dependencies]
> -ptyprocess = ">=0.5"
> +six = "*"
> +
> +[[package]]
> +name = "pathspec"
> +version = "0.10.1"
> +description = "Utility library for gitignore style pattern matching of
> file paths."
> +category = "dev"
> +optional = false
> +python-versions = ">=3.7"
>
> [[package]]
> name = "platformdirs"
> @@ -166,14 +253,6 @@ python-versions = ">=3.7"
> docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)",
> "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
> test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)",
> "pytest (>=6)"]
>
> -[[package]]
> -name = "ptyprocess"
> -version = "0.7.0"
> -description = "Run a subprocess in a pseudo terminal"
> -category = "main"
> -optional = false
> -python-versions = "*"
> -
> [[package]]
> name = "pycodestyle"
> version = "2.9.1"
> @@ -182,6 +261,14 @@ category = "dev"
> optional = false
> python-versions = ">=3.6"
>
> +[[package]]
> +name = "pycparser"
> +version = "2.21"
> +description = "C parser in Python"
> +category = "main"
> +optional = false
> +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
> +
> [[package]]
> name = "pydocstyle"
> version = "6.1.1"
> @@ -228,6 +315,21 @@ tests = ["pytest (>=7.1.2)", "pytest-mypy",
> "eradicate (>=2.0.0)", "radon (>=5.1
> toml = ["toml (>=0.10.2)"]
> vulture = ["vulture"]
>
> +[[package]]
> +name = "pynacl"
> +version = "1.5.0"
> +description = "Python binding to the Networking and Cryptography (NaCl)
> library"
> +category = "main"
> +optional = false
> +python-versions = ">=3.6"
> +
> +[package.dependencies]
> +cffi = ">=1.4.1"
> +
> +[package.extras]
> +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
> +tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"]
> +
> [[package]]
> name = "pyrsistent"
> version = "0.19.1"
> @@ -244,6 +346,14 @@ category = "main"
> optional = false
> python-versions = ">=3.6"
>
> +[[package]]
> +name = "six"
> +version = "1.16.0"
> +description = "Python 2 and 3 compatibility utilities"
> +category = "main"
> +optional = false
> +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
> +
> [[package]]
> name = "snowballstemmer"
> version = "2.2.0"
> @@ -299,13 +409,18 @@ jsonschema = ">=4,<5"
> [metadata]
> lock-version = "1.1"
> python-versions = "^3.10"
> -content-hash =
> "a0f040b07fc6ce4deb0be078b9a88c2a465cb6bccb9e260a67e92c2403e2319f"
> +content-hash =
> "719c43bcaa5d181921debda884f8f714063df0b2336d61e9f64ecab034e8b139"
>
> [metadata.files]
> attrs = []
> +bcrypt = []
> black = []
> +cffi = []
> click = []
> colorama = []
> +cryptography = []
> +fabric = []
> +invoke = []
> isort = []
> jsonpatch = []
> jsonpointer = []
> @@ -313,22 +428,22 @@ jsonschema = []
> mccabe = []
> mypy = []
> mypy-extensions = []
> +paramiko = []
> +pathlib2 = []
> pathspec = []
> -pexpect = [
> - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash =
> "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"},
> - {file = "pexpect-4.8.0.tar.gz", hash =
> "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"},
> -]
> platformdirs = [
> {file = "platformdirs-2.5.2-py3-none-any.whl", hash =
> "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
> {file = "platformdirs-2.5.2.tar.gz", hash =
> "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
> ]
> -ptyprocess = []
> pycodestyle = []
> +pycparser = []
> pydocstyle = []
> pyflakes = []
> pylama = []
> +pynacl = []
> pyrsistent = []
> pyyaml = []
> +six = []
> snowballstemmer = []
> toml = []
> tomli = []
> diff --git a/dts/pyproject.toml b/dts/pyproject.toml
> index a136c91e5e..50bcdb327a 100644
> --- a/dts/pyproject.toml
> +++ b/dts/pyproject.toml
> @@ -9,10 +9,10 @@ authors = ["Owen Hilyard <ohilyard@iol.unh.edu>", "
> dts@dpdk.org"]
>
> [tool.poetry.dependencies]
> python = "^3.10"
> -pexpect = "^4.8.0"
> warlock = "^2.0.1"
> PyYAML = "^6.0"
> types-PyYAML = "^6.0.8"
> +fabric = "^2.7.1"
>
> [tool.poetry.dev-dependencies]
> mypy = "^0.961"
> --
> 2.30.2
>
>
[-- Attachment #2: Type: text/html, Size: 60184 bytes --]
^ permalink raw reply [flat|nested] 21+ messages in thread
* Re: [PATCH v2] dts: replace pexpect with fabric
2023-04-28 19:03 ` Jeremy Spewock
@ 2023-05-02 13:00 ` Juraj Linkeš
2023-05-03 17:54 ` Jeremy Spewock
0 siblings, 1 reply; 21+ messages in thread
From: Juraj Linkeš @ 2023-05-02 13:00 UTC (permalink / raw)
To: Jeremy Spewock
Cc: thomas, Honnappa.Nagarahalli, lijuan.tu, wathsala.vithanage, probb, dev
On Fri, Apr 28, 2023 at 9:04 PM Jeremy Spewock <jspewock@iol.unh.edu> wrote:
>
>
>
> On Mon, Apr 24, 2023 at 9:35 AM Juraj Linkeš <juraj.linkes@pantheon.tech> wrote:
>>
>> Pexpect is not a dedicated SSH connection library while Fabric is. With
>> Fabric, all SSH-related logic is provided and we can just focus on
>> what's DTS specific.
>>
>> Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
>> ---
>> doc/guides/tools/dts.rst | 29 +-
>> dts/conf.yaml | 2 +-
>> dts/framework/exception.py | 10 +-
>> dts/framework/remote_session/linux_session.py | 31 +-
>> dts/framework/remote_session/os_session.py | 51 +++-
>> dts/framework/remote_session/posix_session.py | 48 +--
>> .../remote_session/remote/remote_session.py | 35 ++-
>> .../remote_session/remote/ssh_session.py | 287 ++++++------------
>> dts/framework/testbed_model/sut_node.py | 12 +-
>> dts/framework/utils.py | 9 -
>> dts/poetry.lock | 161 ++++++++--
>> dts/pyproject.toml | 2 +-
>> 12 files changed, 376 insertions(+), 301 deletions(-)
>>
>> diff --git a/doc/guides/tools/dts.rst b/doc/guides/tools/dts.rst
>> index ebd6dceb6a..d15826c098 100644
>> --- a/doc/guides/tools/dts.rst
>> +++ b/doc/guides/tools/dts.rst
>> @@ -95,9 +95,14 @@ Setting up DTS environment
>>
>> #. **SSH Connection**
>>
>> - DTS uses Python pexpect for SSH connections between DTS environment and the other hosts.
>> - The pexpect implementation is a wrapper around the ssh command in the DTS environment.
>> - This means it'll use the SSH agent providing the ssh command and its keys.
>> + DTS uses the Fabric Python library for SSH connections between DTS environment
>> + and the other hosts.
>> + The authentication method used is pubkey authentication.
>> + Fabric tries to use a passed key/certificate,
>> + then any key it can with through an SSH agent,
>> + then any "id_rsa", "id_dsa" or "id_ecdsa" key discoverable in ``~/.ssh/``
>> + (with any matching OpenSSH-style certificates).
>> + DTS doesn't pass any keys, so Fabric tries to use the other two methods.
>>
>>
>> Setting up System Under Test
>> @@ -132,6 +137,21 @@ There are two areas that need to be set up on a System Under Test:
>> It's possible to use the hugepage configuration already present on the SUT.
>> If you wish to do so, don't specify the hugepage configuration in the DTS config file.
>>
>> +#. **User with administrator privileges**
>> +
>> +.. _sut_admin_user:
>> +
>> + DTS needs administrator privileges to run DPDK applications (such as testpmd) on the SUT.
>> + The SUT user must be able run commands in privileged mode without asking for password.
>> + On most Linux distributions, it's a matter of setting up passwordless sudo:
>> +
>> + #. Run ``sudo visudo`` and check that it contains ``%sudo ALL=(ALL:ALL) ALL``.
>> +
>> + #. Add the SUT user to the sudo group with:
>> +
>> + .. code-block:: console
>> +
>> + sudo usermod -aG sudo <sut_user>
>>
>> Running DTS
>> -----------
>> @@ -151,7 +171,8 @@ which is a template that illustrates what can be configured in DTS:
>> :start-at: executions:
>>
>>
>> -The user must be root or any other user with prompt starting with ``#``.
>> +The user must have :ref:`administrator privileges <sut_admin_user>`
>> +which don't require password authentication.
>> The other fields are mostly self-explanatory
>> and documented in more detail in ``dts/framework/config/conf_yaml_schema.json``.
>>
>> diff --git a/dts/conf.yaml b/dts/conf.yaml
>> index a9bd8a3ecf..129801d87c 100644
>> --- a/dts/conf.yaml
>> +++ b/dts/conf.yaml
>> @@ -16,7 +16,7 @@ executions:
>> nodes:
>> - name: "SUT 1"
>> hostname: sut1.change.me.localhost
>> - user: root
>> + user: dtsuser
>> arch: x86_64
>> os: linux
>> lcores: ""
>> diff --git a/dts/framework/exception.py b/dts/framework/exception.py
>> index ca353d98fc..44ff4e979a 100644
>> --- a/dts/framework/exception.py
>> +++ b/dts/framework/exception.py
>> @@ -62,13 +62,19 @@ class SSHConnectionError(DTSError):
>> """
>>
>> host: str
>> + errors: list[str]
>> severity: ClassVar[ErrorSeverity] = ErrorSeverity.SSH_ERR
>>
>> - def __init__(self, host: str):
>> + def __init__(self, host: str, errors: list[str] | None = None):
>> self.host = host
>> + self.errors = [] if errors is None else errors
>>
>> def __str__(self) -> str:
>> - return f"Error trying to connect with {self.host}"
>> + message = f"Error trying to connect with {self.host}."
>> + if self.errors:
>> + message += f" Errors encountered while retrying: {', '.join(self.errors)}"
>> +
>> + return message
>>
>>
>> class SSHSessionDeadError(DTSError):
>> diff --git a/dts/framework/remote_session/linux_session.py b/dts/framework/remote_session/linux_session.py
>> index a1e3bc3a92..f13f399121 100644
>> --- a/dts/framework/remote_session/linux_session.py
>> +++ b/dts/framework/remote_session/linux_session.py
>> @@ -14,10 +14,11 @@ class LinuxSession(PosixSession):
>> The implementation of non-Posix compliant parts of Linux remote sessions.
>> """
>>
>> + def _get_privileged_command(self, command: str) -> str:
>> + return f"sudo -- sh -c '{command}'"
>> +
>> def get_remote_cpus(self, use_first_core: bool) -> list[LogicalCore]:
>> - cpu_info = self.remote_session.send_command(
>> - "lscpu -p=CPU,CORE,SOCKET,NODE|grep -v \\#"
>> - ).stdout
>> + cpu_info = self.send_command("lscpu -p=CPU,CORE,SOCKET,NODE|grep -v \\#").stdout
>> lcores = []
>> for cpu_line in cpu_info.splitlines():
>> lcore, core, socket, node = map(int, cpu_line.split(","))
>> @@ -45,20 +46,20 @@ def setup_hugepages(self, hugepage_amount: int, force_first_numa: bool) -> None:
>> self._mount_huge_pages()
>>
>> def _get_hugepage_size(self) -> int:
>> - hugepage_size = self.remote_session.send_command(
>> + hugepage_size = self.send_command(
>> "awk '/Hugepagesize/ {print $2}' /proc/meminfo"
>> ).stdout
>> return int(hugepage_size)
>>
>> def _get_hugepages_total(self) -> int:
>> - hugepages_total = self.remote_session.send_command(
>> + hugepages_total = self.send_command(
>> "awk '/HugePages_Total/ { print $2 }' /proc/meminfo"
>> ).stdout
>> return int(hugepages_total)
>>
>> def _get_numa_nodes(self) -> list[int]:
>> try:
>> - numa_count = self.remote_session.send_command(
>> + numa_count = self.send_command(
>> "cat /sys/devices/system/node/online", verify=True
>> ).stdout
>> numa_range = expand_range(numa_count)
>> @@ -70,14 +71,12 @@ def _get_numa_nodes(self) -> list[int]:
>> def _mount_huge_pages(self) -> None:
>> self._logger.info("Re-mounting Hugepages.")
>> hugapge_fs_cmd = "awk '/hugetlbfs/ { print $2 }' /proc/mounts"
>> - self.remote_session.send_command(f"umount $({hugapge_fs_cmd})")
>> - result = self.remote_session.send_command(hugapge_fs_cmd)
>> + self.send_command(f"umount $({hugapge_fs_cmd})")
>> + result = self.send_command(hugapge_fs_cmd)
>> if result.stdout == "":
>> remote_mount_path = "/mnt/huge"
>> - self.remote_session.send_command(f"mkdir -p {remote_mount_path}")
>> - self.remote_session.send_command(
>> - f"mount -t hugetlbfs nodev {remote_mount_path}"
>> - )
>> + self.send_command(f"mkdir -p {remote_mount_path}")
>> + self.send_command(f"mount -t hugetlbfs nodev {remote_mount_path}")
>>
>> def _supports_numa(self) -> bool:
>> # the system supports numa if self._numa_nodes is non-empty and there are more
>> @@ -94,14 +93,12 @@ def _configure_huge_pages(
>> )
>> if force_first_numa and self._supports_numa():
>> # clear non-numa hugepages
>> - self.remote_session.send_command(
>> - f"echo 0 | sudo tee {hugepage_config_path}"
>> - )
>> + self.send_command(f"echo 0 | tee {hugepage_config_path}", privileged=True)
>> hugepage_config_path = (
>> f"/sys/devices/system/node/node{self._numa_nodes[0]}/hugepages"
>> f"/hugepages-{size}kB/nr_hugepages"
>> )
>>
>> - self.remote_session.send_command(
>> - f"echo {amount} | sudo tee {hugepage_config_path}"
>> + self.send_command(
>> + f"echo {amount} | tee {hugepage_config_path}", privileged=True
>> )
>> diff --git a/dts/framework/remote_session/os_session.py b/dts/framework/remote_session/os_session.py
>> index 4c48ae2567..bfd70bd480 100644
>> --- a/dts/framework/remote_session/os_session.py
>> +++ b/dts/framework/remote_session/os_session.py
>> @@ -10,7 +10,7 @@
>> from framework.logger import DTSLOG
>> from framework.settings import SETTINGS
>> from framework.testbed_model import LogicalCore
>> -from framework.utils import EnvVarsDict, MesonArgs
>> +from framework.utils import MesonArgs
>>
>> from .remote import CommandResult, RemoteSession, create_remote_session
>>
>> @@ -53,17 +53,32 @@ def is_alive(self) -> bool:
>> def send_command(
>> self,
>> command: str,
>> - timeout: float,
>> + timeout: float = SETTINGS.timeout,
>> + privileged: bool = False,
>> verify: bool = False,
>> - env: EnvVarsDict | None = None,
>> + env: dict | None = None,
>> ) -> CommandResult:
>> """
>> An all-purpose API in case the command to be executed is already
>> OS-agnostic, such as when the path to the executed command has been
>> constructed beforehand.
>> """
>> + if privileged:
>> + command = self._get_privileged_command(command)
>> +
>> return self.remote_session.send_command(command, timeout, verify, env)
>>
>> + @abstractmethod
>> + def _get_privileged_command(self, command: str) -> str:
>> + """Modify the command so that it executes with administrative privileges.
>> +
>> + Args:
>> + command: The command to modify.
>> +
>> + Returns:
>> + The modified command that executes with administrative privileges.
>> + """
>> +
>> @abstractmethod
>> def guess_dpdk_remote_dir(self, remote_dir) -> PurePath:
>> """
>> @@ -90,17 +105,35 @@ def join_remote_path(self, *args: str | PurePath) -> PurePath:
>> """
>>
>> @abstractmethod
>> - def copy_file(
>> + def copy_from(
>> self,
>> source_file: str | PurePath,
>> destination_file: str | PurePath,
>> - source_remote: bool = False,
>> ) -> None:
>> + """Copy a file from the remote Node to the local filesystem.
>> +
>> + Copy source_file from the remote Node associated with this remote
>> + session to destination_file on the local filesystem.
>> +
>> + Args:
>> + source_file: the file on the remote Node.
>> + destination_file: a file or directory path on the local filesystem.
>> """
>> +
>> + @abstractmethod
>> + def copy_to(
>> + self,
>> + source_file: str | PurePath,
>> + destination_file: str | PurePath,
>> + ) -> None:
>> + """Copy a file from local filesystem to the remote Node.
>> +
>> Copy source_file from local filesystem to destination_file
>> - on the remote Node associated with the remote session.
>> - If source_remote is True, reverse the direction - copy source_file from the
>> - associated remote Node to destination_file on local storage.
>> + on the remote Node associated with this remote session.
>> +
>> + Args:
>> + source_file: the file on the local filesystem.
>> + destination_file: a file or directory path on the remote Node.
>> """
>>
>> @abstractmethod
>> @@ -128,7 +161,7 @@ def extract_remote_tarball(
>> @abstractmethod
>> def build_dpdk(
>> self,
>> - env_vars: EnvVarsDict,
>> + env_vars: dict,
>> meson_args: MesonArgs,
>> remote_dpdk_dir: str | PurePath,
>> remote_dpdk_build_dir: str | PurePath,
>> diff --git a/dts/framework/remote_session/posix_session.py b/dts/framework/remote_session/posix_session.py
>> index d38062e8d6..8ca0acb429 100644
>> --- a/dts/framework/remote_session/posix_session.py
>> +++ b/dts/framework/remote_session/posix_session.py
>> @@ -9,7 +9,7 @@
>> from framework.config import Architecture
>> from framework.exception import DPDKBuildError, RemoteCommandExecutionError
>> from framework.settings import SETTINGS
>> -from framework.utils import EnvVarsDict, MesonArgs
>> +from framework.utils import MesonArgs
>>
>> from .os_session import OSSession
>>
>> @@ -34,7 +34,7 @@ def combine_short_options(**opts: bool) -> str:
>>
>> def guess_dpdk_remote_dir(self, remote_dir) -> PurePosixPath:
>> remote_guess = self.join_remote_path(remote_dir, "dpdk-*")
>> - result = self.remote_session.send_command(f"ls -d {remote_guess} | tail -1")
>> + result = self.send_command(f"ls -d {remote_guess} | tail -1")
>> return PurePosixPath(result.stdout)
>>
>> def get_remote_tmp_dir(self) -> PurePosixPath:
>> @@ -48,7 +48,7 @@ def get_dpdk_build_env_vars(self, arch: Architecture) -> dict:
>> env_vars = {}
>> if arch == Architecture.i686:
>> # find the pkg-config path and store it in PKG_CONFIG_LIBDIR
>> - out = self.remote_session.send_command("find /usr -type d -name pkgconfig")
>> + out = self.send_command("find /usr -type d -name pkgconfig")
>> pkg_path = ""
>> res_path = out.stdout.split("\r\n")
>> for cur_path in res_path:
>> @@ -65,13 +65,19 @@ def get_dpdk_build_env_vars(self, arch: Architecture) -> dict:
>> def join_remote_path(self, *args: str | PurePath) -> PurePosixPath:
>> return PurePosixPath(*args)
>>
>> - def copy_file(
>> + def copy_from(
>> self,
>> source_file: str | PurePath,
>> destination_file: str | PurePath,
>> - source_remote: bool = False,
>> ) -> None:
>> - self.remote_session.copy_file(source_file, destination_file, source_remote)
>> + self.remote_session.copy_from(source_file, destination_file)
>> +
>> + def copy_to(
>> + self,
>> + source_file: str | PurePath,
>> + destination_file: str | PurePath,
>> + ) -> None:
>> + self.remote_session.copy_to(source_file, destination_file)
>>
>> def remove_remote_dir(
>> self,
>> @@ -80,24 +86,24 @@ def remove_remote_dir(
>> force: bool = True,
>> ) -> None:
>> opts = PosixSession.combine_short_options(r=recursive, f=force)
>> - self.remote_session.send_command(f"rm{opts} {remote_dir_path}")
>> + self.send_command(f"rm{opts} {remote_dir_path}")
>>
>> def extract_remote_tarball(
>> self,
>> remote_tarball_path: str | PurePath,
>> expected_dir: str | PurePath | None = None,
>> ) -> None:
>> - self.remote_session.send_command(
>> + self.send_command(
>> f"tar xfm {remote_tarball_path} "
>> f"-C {PurePosixPath(remote_tarball_path).parent}",
>> 60,
>> )
>> if expected_dir:
>> - self.remote_session.send_command(f"ls {expected_dir}", verify=True)
>> + self.send_command(f"ls {expected_dir}", verify=True)
>>
>> def build_dpdk(
>> self,
>> - env_vars: EnvVarsDict,
>> + env_vars: dict,
>> meson_args: MesonArgs,
>> remote_dpdk_dir: str | PurePath,
>> remote_dpdk_build_dir: str | PurePath,
>> @@ -108,7 +114,7 @@ def build_dpdk(
>> if rebuild:
>> # reconfigure, then build
>> self._logger.info("Reconfiguring DPDK build.")
>> - self.remote_session.send_command(
>> + self.send_command(
>> f"meson configure {meson_args} {remote_dpdk_build_dir}",
>> timeout,
>> verify=True,
>> @@ -118,7 +124,7 @@ def build_dpdk(
>> # fresh build - remove target dir first, then build from scratch
>> self._logger.info("Configuring DPDK build from scratch.")
>> self.remove_remote_dir(remote_dpdk_build_dir)
>> - self.remote_session.send_command(
>> + self.send_command(
>> f"meson setup "
>> f"{meson_args} {remote_dpdk_dir} {remote_dpdk_build_dir}",
>> timeout,
>> @@ -127,14 +133,14 @@ def build_dpdk(
>> )
>>
>> self._logger.info("Building DPDK.")
>> - self.remote_session.send_command(
>> + self.send_command(
>> f"ninja -C {remote_dpdk_build_dir}", timeout, verify=True, env=env_vars
>> )
>> except RemoteCommandExecutionError as e:
>> raise DPDKBuildError(f"DPDK build failed when doing '{e.command}'.")
>>
>> def get_dpdk_version(self, build_dir: str | PurePath) -> str:
>> - out = self.remote_session.send_command(
>> + out = self.send_command(
>> f"cat {self.join_remote_path(build_dir, 'VERSION')}", verify=True
>> )
>> return out.stdout
>> @@ -146,7 +152,7 @@ def kill_cleanup_dpdk_apps(self, dpdk_prefix_list: Iterable[str]) -> None:
>> # kill and cleanup only if DPDK is running
>> dpdk_pids = self._get_dpdk_pids(dpdk_runtime_dirs)
>> for dpdk_pid in dpdk_pids:
>> - self.remote_session.send_command(f"kill -9 {dpdk_pid}", 20)
>> + self.send_command(f"kill -9 {dpdk_pid}", 20)
>> self._check_dpdk_hugepages(dpdk_runtime_dirs)
>> self._remove_dpdk_runtime_dirs(dpdk_runtime_dirs)
>>
>> @@ -168,7 +174,7 @@ def _list_remote_dirs(self, remote_path: str | PurePath) -> list[str] | None:
>> Return a list of directories of the remote_dir.
>> If remote_path doesn't exist, return None.
>> """
>> - out = self.remote_session.send_command(
>> + out = self.send_command(
>> f"ls -l {remote_path} | awk '/^d/ {{print $NF}}'"
>> ).stdout
>> if "No such file or directory" in out:
>> @@ -182,9 +188,7 @@ def _get_dpdk_pids(self, dpdk_runtime_dirs: Iterable[str | PurePath]) -> list[in
>> for dpdk_runtime_dir in dpdk_runtime_dirs:
>> dpdk_config_file = PurePosixPath(dpdk_runtime_dir, "config")
>> if self._remote_files_exists(dpdk_config_file):
>> - out = self.remote_session.send_command(
>> - f"lsof -Fp {dpdk_config_file}"
>> - ).stdout
>> + out = self.send_command(f"lsof -Fp {dpdk_config_file}").stdout
>> if out and "No such file or directory" not in out:
>> for out_line in out.splitlines():
>> match = re.match(pid_regex, out_line)
>> @@ -193,7 +197,7 @@ def _get_dpdk_pids(self, dpdk_runtime_dirs: Iterable[str | PurePath]) -> list[in
>> return pids
>>
>> def _remote_files_exists(self, remote_path: PurePath) -> bool:
>> - result = self.remote_session.send_command(f"test -e {remote_path}")
>> + result = self.send_command(f"test -e {remote_path}")
>> return not result.return_code
>>
>> def _check_dpdk_hugepages(
>> @@ -202,9 +206,7 @@ def _check_dpdk_hugepages(
>> for dpdk_runtime_dir in dpdk_runtime_dirs:
>> hugepage_info = PurePosixPath(dpdk_runtime_dir, "hugepage_info")
>> if self._remote_files_exists(hugepage_info):
>> - out = self.remote_session.send_command(
>> - f"lsof -Fp {hugepage_info}"
>> - ).stdout
>> + out = self.send_command(f"lsof -Fp {hugepage_info}").stdout
>> if out and "No such file or directory" not in out:
>> self._logger.warning("Some DPDK processes did not free hugepages.")
>> self._logger.warning("*******************************************")
>> diff --git a/dts/framework/remote_session/remote/remote_session.py b/dts/framework/remote_session/remote/remote_session.py
>> index 91dee3cb4f..0647d93de4 100644
>> --- a/dts/framework/remote_session/remote/remote_session.py
>> +++ b/dts/framework/remote_session/remote/remote_session.py
>> @@ -11,7 +11,6 @@
>> from framework.exception import RemoteCommandExecutionError
>> from framework.logger import DTSLOG
>> from framework.settings import SETTINGS
>> -from framework.utils import EnvVarsDict
>>
>>
>> @dataclasses.dataclass(slots=True, frozen=True)
>> @@ -89,7 +88,7 @@ def send_command(
>> command: str,
>> timeout: float = SETTINGS.timeout,
>> verify: bool = False,
>> - env: EnvVarsDict | None = None,
>> + env: dict | None = None,
>> ) -> CommandResult:
>> """
>> Send a command to the connected node using optional env vars
>> @@ -114,7 +113,7 @@ def send_command(
>>
>> @abstractmethod
>> def _send_command(
>> - self, command: str, timeout: float, env: EnvVarsDict | None
>> + self, command: str, timeout: float, env: dict | None
>> ) -> CommandResult:
>> """
>> Use the underlying protocol to execute the command using optional env vars
>> @@ -141,15 +140,33 @@ def is_alive(self) -> bool:
>> """
>>
>> @abstractmethod
>> - def copy_file(
>> + def copy_from(
>> self,
>> source_file: str | PurePath,
>> destination_file: str | PurePath,
>> - source_remote: bool = False,
>> ) -> None:
>> + """Copy a file from the remote Node to the local filesystem.
>> +
>> + Copy source_file from the remote Node associated with this remote
>> + session to destination_file on the local filesystem.
>> +
>> + Args:
>> + source_file: the file on the remote Node.
>> + destination_file: a file or directory path on the local filesystem.
>> """
>> - Copy source_file from local filesystem to destination_file on the remote Node
>> - associated with the remote session.
>> - If source_remote is True, reverse the direction - copy source_file from the
>> - associated Node to destination_file on local filesystem.
>> +
>> + @abstractmethod
>> + def copy_to(
>> + self,
>> + source_file: str | PurePath,
>> + destination_file: str | PurePath,
>> + ) -> None:
>> + """Copy a file from local filesystem to the remote Node.
>> +
>> + Copy source_file from local filesystem to destination_file
>> + on the remote Node associated with this remote session.
>> +
>> + Args:
>> + source_file: the file on the local filesystem.
>> + destination_file: a file or directory path on the remote Node.
>> """
>> diff --git a/dts/framework/remote_session/remote/ssh_session.py b/dts/framework/remote_session/remote/ssh_session.py
>> index 42ff9498a2..8d127f1601 100644
>> --- a/dts/framework/remote_session/remote/ssh_session.py
>> +++ b/dts/framework/remote_session/remote/ssh_session.py
>> @@ -1,29 +1,49 @@
>> # SPDX-License-Identifier: BSD-3-Clause
>> -# Copyright(c) 2010-2014 Intel Corporation
>> -# Copyright(c) 2022-2023 PANTHEON.tech s.r.o.
>> -# Copyright(c) 2022-2023 University of New Hampshire
>> +# Copyright(c) 2023 PANTHEON.tech s.r.o.
>>
>
> I've noticed in other patches you've simply appended the copyright for PANTHEON.tech to the existing list. Is there a reason you remove the others here as well?
>
It's a rewrite of the file. I'm the only author of the code (i.e.
neither Intel nor UNH contributed to the Fabric code) so I left only
us there. I'm not sure this is the right way to do this, but it made
sense to me. I have no problem with leaving all parties in.
>>
>> -import time
>> +import socket
>> +import traceback
>> from pathlib import PurePath
>>
>> -import pexpect # type: ignore
>> -from pexpect import pxssh # type: ignore
>> +from fabric import Connection # type: ignore[import]
>> +from invoke.exceptions import ( # type: ignore[import]
>> + CommandTimedOut,
>> + ThreadException,
>> + UnexpectedExit,
>> +)
>> +from paramiko.ssh_exception import ( # type: ignore[import]
>> + AuthenticationException,
>> + BadHostKeyException,
>> + NoValidConnectionsError,
>> + SSHException,
>> +)
>>
>> from framework.config import NodeConfiguration
>> from framework.exception import SSHConnectionError, SSHSessionDeadError, SSHTimeoutError
>> from framework.logger import DTSLOG
>> -from framework.utils import GREEN, RED, EnvVarsDict
>>
>> from .remote_session import CommandResult, RemoteSession
>>
>>
>> class SSHSession(RemoteSession):
>> - """
>> - Module for creating Pexpect SSH remote sessions.
>> + """A persistent SSH connection to a remote Node.
>> +
>> + The connection is implemented with the Fabric Python library.
>> +
>> + Args:
>> + node_config: The configuration of the Node to connect to.
>> + session_name: The name of the session.
>> + logger: The logger used for logging.
>> + This should be passed from the parent OSSession.
>> +
>> + Attributes:
>> + session: The underlying Fabric SSH connection.
>> +
>> + Raises:
>> + SSHConnectionError: The connection cannot be established.
>> """
>>
>> - session: pxssh.pxssh
>> - magic_prompt: str
>> + session: Connection
>>
>> def __init__(
>> self,
>> @@ -31,218 +51,91 @@ def __init__(
>> session_name: str,
>> logger: DTSLOG,
>> ):
>> - self.magic_prompt = "MAGIC PROMPT"
>> super(SSHSession, self).__init__(node_config, session_name, logger)
>>
>> def _connect(self) -> None:
>> - """
>> - Create connection to assigned node.
>> - """
>> + errors = []
>> retry_attempts = 10
>> login_timeout = 20 if self.port else 10
>> - password_regex = (
>> - r"(?i)(?:password:)|(?:passphrase for key)|(?i)(password for .+:)"
>> - )
>> - try:
>> - for retry_attempt in range(retry_attempts):
>> - self.session = pxssh.pxssh(encoding="utf-8")
>> - try:
>> - self.session.login(
>> - self.ip,
>> - self.username,
>> - self.password,
>> - original_prompt="[$#>]",
>> - port=self.port,
>> - login_timeout=login_timeout,
>> - password_regex=password_regex,
>> - )
>> - break
>> - except Exception as e:
>> - self._logger.warning(e)
>> - time.sleep(2)
>> - self._logger.info(
>> - f"Retrying connection: retry number {retry_attempt + 1}."
>> - )
>> - else:
>> - raise Exception(f"Connection to {self.hostname} failed")
>> -
>> - self.send_expect("stty -echo", "#")
>> - self.send_expect("stty columns 1000", "#")
>> - self.send_expect("bind 'set enable-bracketed-paste off'", "#")
>> - except Exception as e:
>> - self._logger.error(RED(str(e)))
>> - if getattr(self, "port", None):
>> - suggestion = (
>> - f"\nSuggestion: Check if the firewall on {self.hostname} is "
>> - f"stopped.\n"
>> + for retry_attempt in range(retry_attempts):
>> + try:
>> + self.session = Connection(
>> + self.ip,
>> + user=self.username,
>> + port=self.port,
>> + connect_kwargs={"password": self.password},
>> + connect_timeout=login_timeout,
>> )
>> - self._logger.info(GREEN(suggestion))
>> -
>> - raise SSHConnectionError(self.hostname)
>> + self.session.open()
>>
>> - def send_expect(
>> - self, command: str, prompt: str, timeout: float = 15, verify: bool = False
>> - ) -> str | int:
>> - try:
>> - ret = self.send_expect_base(command, prompt, timeout)
>> - if verify:
>> - ret_status = self.send_expect_base("echo $?", prompt, timeout)
>> - try:
>> - retval = int(ret_status)
>> - if retval:
>> - self._logger.error(f"Command: {command} failure!")
>> - self._logger.error(ret)
>> - return retval
>> - else:
>> - return ret
>> - except ValueError:
>> - return ret
>> - else:
>> - return ret
>> - except Exception as e:
>> - self._logger.error(
>> - f"Exception happened in [{command}] and output is "
>> - f"[{self._get_output()}]"
>> - )
>> - raise e
>> -
>> - def send_expect_base(self, command: str, prompt: str, timeout: float) -> str:
>> - self._clean_session()
>> - original_prompt = self.session.PROMPT
>> - self.session.PROMPT = prompt
>> - self._send_line(command)
>> - self._prompt(command, timeout)
>> -
>> - before = self._get_output()
>> - self.session.PROMPT = original_prompt
>> - return before
>> -
>> - def _clean_session(self) -> None:
>> - self.session.PROMPT = self.magic_prompt
>> - self.get_output(timeout=0.01)
>> - self.session.PROMPT = self.session.UNIQUE_PROMPT
>> -
>> - def _send_line(self, command: str) -> None:
>> - if not self.is_alive():
>> - raise SSHSessionDeadError(self.hostname)
>> - if len(command) == 2 and command.startswith("^"):
>> - self.session.sendcontrol(command[1])
>> - else:
>> - self.session.sendline(command)
>> + except (ValueError, BadHostKeyException, AuthenticationException) as e:
>> + self._logger.exception(e)
>> + raise SSHConnectionError(self.hostname) from e
>>
>> - def _prompt(self, command: str, timeout: float) -> None:
>> - if not self.session.prompt(timeout):
>> - raise SSHTimeoutError(command, self._get_output()) from None
>> + except (NoValidConnectionsError, socket.error, SSHException) as e:
>> + self._logger.debug(traceback.format_exc())
>> + self._logger.warning(e)
>>
>> - def get_output(self, timeout: float = 15) -> str:
>> - """
>> - Get all output before timeout
>> - """
>> - try:
>> - self.session.prompt(timeout)
>> - except Exception:
>> - pass
>> -
>> - before = self._get_output()
>> - self._flush()
>> -
>> - return before
>> + error = repr(e)
>> + if error not in errors:
>> + errors.append(error)
>>
>> - def _get_output(self) -> str:
>> - if not self.is_alive():
>> - raise SSHSessionDeadError(self.hostname)
>> - before = self.session.before.rsplit("\r\n", 1)[0]
>> - if before == "[PEXPECT]":
>> - return ""
>> - return before
>> + self._logger.info(
>> + f"Retrying connection: retry number {retry_attempt + 1}."
>> + )
>>
>> - def _flush(self) -> None:
>> - """
>> - Clear all session buffer
>> - """
>> - self.session.buffer = ""
>> - self.session.before = ""
>> + else:
>> + break
>> + else:
>> + raise SSHConnectionError(self.hostname, errors)
>>
>> def is_alive(self) -> bool:
>> - return self.session.isalive()
>> + return self.session.is_connected
>>
>> def _send_command(
>> - self, command: str, timeout: float, env: EnvVarsDict | None
>> + self, command: str, timeout: float, env: dict | None
>> ) -> CommandResult:
>> - output = self._send_command_get_output(command, timeout, env)
>> - return_code = int(self._send_command_get_output("echo $?", timeout, None))
>> + """Send a command and return the result of the execution.
>>
>> - # we're capturing only stdout
>> - return CommandResult(self.name, command, output, "", return_code)
>> + Args:
>> + command: The command to execute.
>> + timeout: Wait at most this many seconds for the execution to complete.
>> + env: Extra environment variables that will be used in command execution.
>>
>> - def _send_command_get_output(
>> - self, command: str, timeout: float, env: EnvVarsDict | None
>> - ) -> str:
>> + Raises:
>> + SSHSessionDeadError: The session died while executing the command.
>> + SSHTimeoutError: The command execution timed out.
>> + """
>> try:
>> - self._clean_session()
>> - if env:
>> - command = f"{env} {command}"
>> - self._send_line(command)
>> - except Exception as e:
>> - raise e
>> + output = self.session.run(
>> + command, env=env, warn=True, hide=True, timeout=timeout
>> + )
>>
>> - output = self.get_output(timeout=timeout)
>> - self.session.PROMPT = self.session.UNIQUE_PROMPT
>> - self.session.prompt(0.1)
>> + except (UnexpectedExit, ThreadException) as e:
>> + self._logger.exception(e)
>> + raise SSHSessionDeadError(self.hostname) from e
>>
>> - return output
>> + except CommandTimedOut as e:
>> + self._logger.exception(e)
>> + raise SSHTimeoutError(command, e.result.stderr) from e
>>
>> - def _close(self, force: bool = False) -> None:
>> - if force is True:
>> - self.session.close()
>> - else:
>> - if self.is_alive():
>> - self.session.logout()
>> + return CommandResult(
>> + self.name, command, output.stdout, output.stderr, output.return_code
>> + )
>>
>> - def copy_file(
>> + def copy_from(
>> self,
>> source_file: str | PurePath,
>> destination_file: str | PurePath,
>> - source_remote: bool = False,
>> ) -> None:
>> - """
>> - Send a local file to a remote host.
>> - """
>> - if source_remote:
>> - source_file = f"{self.username}@{self.ip}:{source_file}"
>> - else:
>> - destination_file = f"{self.username}@{self.ip}:{destination_file}"
>> + self.session.get(str(destination_file), str(source_file))
>>
>> - port = ""
>> - if self.port:
>> - port = f" -P {self.port}"
>> -
>> - command = (
>> - f"scp -v{port} -o NoHostAuthenticationForLocalhost=yes"
>> - f" {source_file} {destination_file}"
>> - )
>> -
>> - self._spawn_scp(command)
>> + def copy_to(
>> + self,
>> + source_file: str | PurePath,
>> + destination_file: str | PurePath,
>> + ) -> None:
>> + self.session.put(str(source_file), str(destination_file))
>>
>> - def _spawn_scp(self, scp_cmd: str) -> None:
>> - """
>> - Transfer a file with SCP
>> - """
>> - self._logger.info(scp_cmd)
>> - p: pexpect.spawn = pexpect.spawn(scp_cmd)
>> - time.sleep(0.5)
>> - ssh_newkey: str = "Are you sure you want to continue connecting"
>> - i: int = p.expect(
>> - [ssh_newkey, "[pP]assword", "# ", pexpect.EOF, pexpect.TIMEOUT], 120
>> - )
>> - if i == 0: # add once in trust list
>> - p.sendline("yes")
>> - i = p.expect([ssh_newkey, "[pP]assword", pexpect.EOF], 2)
>> -
>> - if i == 1:
>> - time.sleep(0.5)
>> - p.sendline(self.password)
>> - p.expect("Exit status 0", 60)
>> - if i == 4:
>> - self._logger.error("SCP TIMEOUT error %d" % i)
>> - p.close()
>> + def _close(self, force: bool = False) -> None:
>> + self.session.close()
>> diff --git a/dts/framework/testbed_model/sut_node.py b/dts/framework/testbed_model/sut_node.py
>> index 2b2b50d982..9dbc390848 100644
>> --- a/dts/framework/testbed_model/sut_node.py
>> +++ b/dts/framework/testbed_model/sut_node.py
>> @@ -10,7 +10,7 @@
>> from framework.config import BuildTargetConfiguration, NodeConfiguration
>> from framework.remote_session import CommandResult, OSSession
>> from framework.settings import SETTINGS
>> -from framework.utils import EnvVarsDict, MesonArgs
>> +from framework.utils import MesonArgs
>>
>> from .hw import LogicalCoreCount, LogicalCoreList, VirtualDevice
>> from .node import Node
>> @@ -27,7 +27,7 @@ class SutNode(Node):
>> _dpdk_prefix_list: list[str]
>> _dpdk_timestamp: str
>> _build_target_config: BuildTargetConfiguration | None
>> - _env_vars: EnvVarsDict
>> + _env_vars: dict
>> _remote_tmp_dir: PurePath
>> __remote_dpdk_dir: PurePath | None
>> _dpdk_version: str | None
>> @@ -38,7 +38,7 @@ def __init__(self, node_config: NodeConfiguration):
>> super(SutNode, self).__init__(node_config)
>> self._dpdk_prefix_list = []
>> self._build_target_config = None
>> - self._env_vars = EnvVarsDict()
>> + self._env_vars = {}
>> self._remote_tmp_dir = self.main_session.get_remote_tmp_dir()
>> self.__remote_dpdk_dir = None
>> self._dpdk_version = None
>> @@ -94,7 +94,7 @@ def _configure_build_target(
>> """
>> Populate common environment variables and set build target config.
>> """
>> - self._env_vars = EnvVarsDict()
>> + self._env_vars = {}
>> self._build_target_config = build_target_config
>> self._env_vars.update(
>> self.main_session.get_dpdk_build_env_vars(build_target_config.arch)
>> @@ -112,7 +112,7 @@ def _copy_dpdk_tarball(self) -> None:
>> Copy to and extract DPDK tarball on the SUT node.
>> """
>> self._logger.info("Copying DPDK tarball to SUT.")
>> - self.main_session.copy_file(SETTINGS.dpdk_tarball_path, self._remote_tmp_dir)
>> + self.main_session.copy_to(SETTINGS.dpdk_tarball_path, self._remote_tmp_dir)
>>
>> # construct remote tarball path
>> # the basename is the same on local host and on remote Node
>> @@ -259,7 +259,7 @@ def run_dpdk_app(
>> Run DPDK application on the remote node.
>> """
>> return self.main_session.send_command(
>> - f"{app_path} {eal_args}", timeout, verify=True
>> + f"{app_path} {eal_args}", timeout, privileged=True, verify=True
>> )
>>
>>
>> diff --git a/dts/framework/utils.py b/dts/framework/utils.py
>> index 55e0b0ef0e..8cfbc6a29d 100644
>> --- a/dts/framework/utils.py
>> +++ b/dts/framework/utils.py
>> @@ -42,19 +42,10 @@ def expand_range(range_str: str) -> list[int]:
>> return expanded_range
>>
>>
>> -def GREEN(text: str) -> str:
>> - return f"\u001B[32;1m{str(text)}\u001B[0m"
>> -
>> -
>> def RED(text: str) -> str:
>> return f"\u001B[31;1m{str(text)}\u001B[0m"
>>
>>
>> -class EnvVarsDict(dict):
>> - def __str__(self) -> str:
>> - return " ".join(["=".join(item) for item in self.items()])
>> -
>> -
>> class MesonArgs(object):
>> """
>> Aggregate the arguments needed to build DPDK:
>> diff --git a/dts/poetry.lock b/dts/poetry.lock
>> index 0b2a007d4d..2438f337cd 100644
>> --- a/dts/poetry.lock
>> +++ b/dts/poetry.lock
>> @@ -12,6 +12,18 @@ docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
>> tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
>> tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"]
>>
>> +[[package]]
>> +name = "bcrypt"
>> +version = "4.0.1"
>> +description = "Modern password hashing for your software and your servers"
>> +category = "main"
>> +optional = false
>> +python-versions = ">=3.6"
>> +
>> +[package.extras]
>> +tests = ["pytest (>=3.2.1,!=3.3.0)"]
>> +typecheck = ["mypy"]
>> +
>> [[package]]
>> name = "black"
>> version = "22.10.0"
>> @@ -33,6 +45,17 @@ d = ["aiohttp (>=3.7.4)"]
>> jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
>> uvloop = ["uvloop (>=0.15.2)"]
>>
>> +[[package]]
>> +name = "cffi"
>> +version = "1.15.1"
>> +description = "Foreign Function Interface for Python calling C code."
>> +category = "main"
>> +optional = false
>> +python-versions = "*"
>> +
>> +[package.dependencies]
>> +pycparser = "*"
>> +
>> [[package]]
>> name = "click"
>> version = "8.1.3"
>> @@ -52,6 +75,52 @@ category = "dev"
>> optional = false
>> python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
>>
>> +[[package]]
>> +name = "cryptography"
>> +version = "40.0.2"
>> +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
>> +category = "main"
>> +optional = false
>> +python-versions = ">=3.6"
>> +
>> +[package.dependencies]
>> +cffi = ">=1.12"
>> +
>> +[package.extras]
>> +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
>> +docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"]
>> +pep8test = ["black", "ruff", "mypy", "check-manifest"]
>> +sdist = ["setuptools-rust (>=0.11.4)"]
>> +ssh = ["bcrypt (>=3.1.5)"]
>> +test = ["pytest (>=6.2.0)", "pytest-shard (>=0.1.2)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601"]
>> +test-randomorder = ["pytest-randomly"]
>> +tox = ["tox"]
>> +
>> +[[package]]
>> +name = "fabric"
>> +version = "2.7.1"
>> +description = "High level SSH command execution"
>> +category = "main"
>> +optional = false
>> +python-versions = "*"
>> +
>> +[package.dependencies]
>> +invoke = ">=1.3,<2.0"
>> +paramiko = ">=2.4"
>> +pathlib2 = "*"
>> +
>> +[package.extras]
>> +pytest = ["mock (>=2.0.0,<3.0)", "pytest (>=3.2.5,<4.0)"]
>> +testing = ["mock (>=2.0.0,<3.0)"]
>> +
>> +[[package]]
>> +name = "invoke"
>> +version = "1.7.3"
>> +description = "Pythonic task execution"
>> +category = "main"
>> +optional = false
>> +python-versions = "*"
>> +
>> [[package]]
>> name = "isort"
>> version = "5.10.1"
>> @@ -136,23 +205,41 @@ optional = false
>> python-versions = "*"
>>
>> [[package]]
>> -name = "pathspec"
>> -version = "0.10.1"
>> -description = "Utility library for gitignore style pattern matching of file paths."
>> -category = "dev"
>> +name = "paramiko"
>> +version = "3.1.0"
>> +description = "SSH2 protocol library"
>> +category = "main"
>> optional = false
>> -python-versions = ">=3.7"
>> +python-versions = ">=3.6"
>> +
>> +[package.dependencies]
>> +bcrypt = ">=3.2"
>> +cryptography = ">=3.3"
>> +pynacl = ">=1.5"
>> +
>> +[package.extras]
>> +all = ["pyasn1 (>=0.1.7)", "invoke (>=2.0)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"]
>> +gssapi = ["pyasn1 (>=0.1.7)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"]
>> +invoke = ["invoke (>=2.0)"]
>>
>> [[package]]
>> -name = "pexpect"
>> -version = "4.8.0"
>> -description = "Pexpect allows easy control of interactive console applications."
>> +name = "pathlib2"
>> +version = "2.3.7.post1"
>> +description = "Object-oriented filesystem paths"
>> category = "main"
>> optional = false
>> python-versions = "*"
>>
>> [package.dependencies]
>> -ptyprocess = ">=0.5"
>> +six = "*"
>> +
>> +[[package]]
>> +name = "pathspec"
>> +version = "0.10.1"
>> +description = "Utility library for gitignore style pattern matching of file paths."
>> +category = "dev"
>> +optional = false
>> +python-versions = ">=3.7"
>>
>> [[package]]
>> name = "platformdirs"
>> @@ -166,14 +253,6 @@ python-versions = ">=3.7"
>> docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
>> test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"]
>>
>> -[[package]]
>> -name = "ptyprocess"
>> -version = "0.7.0"
>> -description = "Run a subprocess in a pseudo terminal"
>> -category = "main"
>> -optional = false
>> -python-versions = "*"
>> -
>> [[package]]
>> name = "pycodestyle"
>> version = "2.9.1"
>> @@ -182,6 +261,14 @@ category = "dev"
>> optional = false
>> python-versions = ">=3.6"
>>
>> +[[package]]
>> +name = "pycparser"
>> +version = "2.21"
>> +description = "C parser in Python"
>> +category = "main"
>> +optional = false
>> +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
>> +
>> [[package]]
>> name = "pydocstyle"
>> version = "6.1.1"
>> @@ -228,6 +315,21 @@ tests = ["pytest (>=7.1.2)", "pytest-mypy", "eradicate (>=2.0.0)", "radon (>=5.1
>> toml = ["toml (>=0.10.2)"]
>> vulture = ["vulture"]
>>
>> +[[package]]
>> +name = "pynacl"
>> +version = "1.5.0"
>> +description = "Python binding to the Networking and Cryptography (NaCl) library"
>> +category = "main"
>> +optional = false
>> +python-versions = ">=3.6"
>> +
>> +[package.dependencies]
>> +cffi = ">=1.4.1"
>> +
>> +[package.extras]
>> +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
>> +tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"]
>> +
>> [[package]]
>> name = "pyrsistent"
>> version = "0.19.1"
>> @@ -244,6 +346,14 @@ category = "main"
>> optional = false
>> python-versions = ">=3.6"
>>
>> +[[package]]
>> +name = "six"
>> +version = "1.16.0"
>> +description = "Python 2 and 3 compatibility utilities"
>> +category = "main"
>> +optional = false
>> +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
>> +
>> [[package]]
>> name = "snowballstemmer"
>> version = "2.2.0"
>> @@ -299,13 +409,18 @@ jsonschema = ">=4,<5"
>> [metadata]
>> lock-version = "1.1"
>> python-versions = "^3.10"
>> -content-hash = "a0f040b07fc6ce4deb0be078b9a88c2a465cb6bccb9e260a67e92c2403e2319f"
>> +content-hash = "719c43bcaa5d181921debda884f8f714063df0b2336d61e9f64ecab034e8b139"
>>
>> [metadata.files]
>> attrs = []
>> +bcrypt = []
>> black = []
>> +cffi = []
>> click = []
>> colorama = []
>> +cryptography = []
>> +fabric = []
>> +invoke = []
>> isort = []
>> jsonpatch = []
>> jsonpointer = []
>> @@ -313,22 +428,22 @@ jsonschema = []
>> mccabe = []
>> mypy = []
>> mypy-extensions = []
>> +paramiko = []
>> +pathlib2 = []
>> pathspec = []
>> -pexpect = [
>> - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"},
>> - {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"},
>> -]
>> platformdirs = [
>> {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
>> {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
>> ]
>> -ptyprocess = []
>> pycodestyle = []
>> +pycparser = []
>> pydocstyle = []
>> pyflakes = []
>> pylama = []
>> +pynacl = []
>> pyrsistent = []
>> pyyaml = []
>> +six = []
>> snowballstemmer = []
>> toml = []
>> tomli = []
>> diff --git a/dts/pyproject.toml b/dts/pyproject.toml
>> index a136c91e5e..50bcdb327a 100644
>> --- a/dts/pyproject.toml
>> +++ b/dts/pyproject.toml
>> @@ -9,10 +9,10 @@ authors = ["Owen Hilyard <ohilyard@iol.unh.edu>", "dts@dpdk.org"]
>>
>> [tool.poetry.dependencies]
>> python = "^3.10"
>> -pexpect = "^4.8.0"
>> warlock = "^2.0.1"
>> PyYAML = "^6.0"
>> types-PyYAML = "^6.0.8"
>> +fabric = "^2.7.1"
>>
>> [tool.poetry.dev-dependencies]
>> mypy = "^0.961"
>> --
>> 2.30.2
>>
^ permalink raw reply [flat|nested] 21+ messages in thread
* Re: [PATCH v2] dts: replace pexpect with fabric
2023-05-02 13:00 ` Juraj Linkeš
@ 2023-05-03 17:54 ` Jeremy Spewock
0 siblings, 0 replies; 21+ messages in thread
From: Jeremy Spewock @ 2023-05-03 17:54 UTC (permalink / raw)
To: Juraj Linkeš
Cc: thomas, Honnappa.Nagarahalli, lijuan.tu, wathsala.vithanage, probb, dev
[-- Attachment #1: Type: text/plain, Size: 53245 bytes --]
On Tue, May 2, 2023 at 9:00 AM Juraj Linkeš <juraj.linkes@pantheon.tech>
wrote:
> On Fri, Apr 28, 2023 at 9:04 PM Jeremy Spewock <jspewock@iol.unh.edu>
> wrote:
> >
> >
> >
> > On Mon, Apr 24, 2023 at 9:35 AM Juraj Linkeš <juraj.linkes@pantheon.tech>
> wrote:
> >>
> >> Pexpect is not a dedicated SSH connection library while Fabric is. With
> >> Fabric, all SSH-related logic is provided and we can just focus on
> >> what's DTS specific.
> >>
> >> Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
> >> ---
> >> doc/guides/tools/dts.rst | 29 +-
> >> dts/conf.yaml | 2 +-
> >> dts/framework/exception.py | 10 +-
> >> dts/framework/remote_session/linux_session.py | 31 +-
> >> dts/framework/remote_session/os_session.py | 51 +++-
> >> dts/framework/remote_session/posix_session.py | 48 +--
> >> .../remote_session/remote/remote_session.py | 35 ++-
> >> .../remote_session/remote/ssh_session.py | 287 ++++++------------
> >> dts/framework/testbed_model/sut_node.py | 12 +-
> >> dts/framework/utils.py | 9 -
> >> dts/poetry.lock | 161 ++++++++--
> >> dts/pyproject.toml | 2 +-
> >> 12 files changed, 376 insertions(+), 301 deletions(-)
> >>
> >> diff --git a/doc/guides/tools/dts.rst b/doc/guides/tools/dts.rst
> >> index ebd6dceb6a..d15826c098 100644
> >> --- a/doc/guides/tools/dts.rst
> >> +++ b/doc/guides/tools/dts.rst
> >> @@ -95,9 +95,14 @@ Setting up DTS environment
> >>
> >> #. **SSH Connection**
> >>
> >> - DTS uses Python pexpect for SSH connections between DTS environment
> and the other hosts.
> >> - The pexpect implementation is a wrapper around the ssh command in
> the DTS environment.
> >> - This means it'll use the SSH agent providing the ssh command and
> its keys.
> >> + DTS uses the Fabric Python library for SSH connections between DTS
> environment
> >> + and the other hosts.
> >> + The authentication method used is pubkey authentication.
> >> + Fabric tries to use a passed key/certificate,
> >> + then any key it can with through an SSH agent,
> >> + then any "id_rsa", "id_dsa" or "id_ecdsa" key discoverable in
> ``~/.ssh/``
> >> + (with any matching OpenSSH-style certificates).
> >> + DTS doesn't pass any keys, so Fabric tries to use the other two
> methods.
> >>
> >>
> >> Setting up System Under Test
> >> @@ -132,6 +137,21 @@ There are two areas that need to be set up on a
> System Under Test:
> >> It's possible to use the hugepage configuration already present
> on the SUT.
> >> If you wish to do so, don't specify the hugepage configuration in
> the DTS config file.
> >>
> >> +#. **User with administrator privileges**
> >> +
> >> +.. _sut_admin_user:
> >> +
> >> + DTS needs administrator privileges to run DPDK applications (such
> as testpmd) on the SUT.
> >> + The SUT user must be able run commands in privileged mode without
> asking for password.
> >> + On most Linux distributions, it's a matter of setting up
> passwordless sudo:
> >> +
> >> + #. Run ``sudo visudo`` and check that it contains ``%sudo
> ALL=(ALL:ALL) ALL``.
> >> +
> >> + #. Add the SUT user to the sudo group with:
> >> +
> >> + .. code-block:: console
> >> +
> >> + sudo usermod -aG sudo <sut_user>
> >>
> >> Running DTS
> >> -----------
> >> @@ -151,7 +171,8 @@ which is a template that illustrates what can be
> configured in DTS:
> >> :start-at: executions:
> >>
> >>
> >> -The user must be root or any other user with prompt starting with
> ``#``.
> >> +The user must have :ref:`administrator privileges <sut_admin_user>`
> >> +which don't require password authentication.
> >> The other fields are mostly self-explanatory
> >> and documented in more detail in
> ``dts/framework/config/conf_yaml_schema.json``.
> >>
> >> diff --git a/dts/conf.yaml b/dts/conf.yaml
> >> index a9bd8a3ecf..129801d87c 100644
> >> --- a/dts/conf.yaml
> >> +++ b/dts/conf.yaml
> >> @@ -16,7 +16,7 @@ executions:
> >> nodes:
> >> - name: "SUT 1"
> >> hostname: sut1.change.me.localhost
> >> - user: root
> >> + user: dtsuser
> >> arch: x86_64
> >> os: linux
> >> lcores: ""
> >> diff --git a/dts/framework/exception.py b/dts/framework/exception.py
> >> index ca353d98fc..44ff4e979a 100644
> >> --- a/dts/framework/exception.py
> >> +++ b/dts/framework/exception.py
> >> @@ -62,13 +62,19 @@ class SSHConnectionError(DTSError):
> >> """
> >>
> >> host: str
> >> + errors: list[str]
> >> severity: ClassVar[ErrorSeverity] = ErrorSeverity.SSH_ERR
> >>
> >> - def __init__(self, host: str):
> >> + def __init__(self, host: str, errors: list[str] | None = None):
> >> self.host = host
> >> + self.errors = [] if errors is None else errors
> >>
> >> def __str__(self) -> str:
> >> - return f"Error trying to connect with {self.host}"
> >> + message = f"Error trying to connect with {self.host}."
> >> + if self.errors:
> >> + message += f" Errors encountered while retrying: {',
> '.join(self.errors)}"
> >> +
> >> + return message
> >>
> >>
> >> class SSHSessionDeadError(DTSError):
> >> diff --git a/dts/framework/remote_session/linux_session.py
> b/dts/framework/remote_session/linux_session.py
> >> index a1e3bc3a92..f13f399121 100644
> >> --- a/dts/framework/remote_session/linux_session.py
> >> +++ b/dts/framework/remote_session/linux_session.py
> >> @@ -14,10 +14,11 @@ class LinuxSession(PosixSession):
> >> The implementation of non-Posix compliant parts of Linux remote
> sessions.
> >> """
> >>
> >> + def _get_privileged_command(self, command: str) -> str:
> >> + return f"sudo -- sh -c '{command}'"
> >> +
> >> def get_remote_cpus(self, use_first_core: bool) ->
> list[LogicalCore]:
> >> - cpu_info = self.remote_session.send_command(
> >> - "lscpu -p=CPU,CORE,SOCKET,NODE|grep -v \\#"
> >> - ).stdout
> >> + cpu_info = self.send_command("lscpu
> -p=CPU,CORE,SOCKET,NODE|grep -v \\#").stdout
> >> lcores = []
> >> for cpu_line in cpu_info.splitlines():
> >> lcore, core, socket, node = map(int, cpu_line.split(","))
> >> @@ -45,20 +46,20 @@ def setup_hugepages(self, hugepage_amount: int,
> force_first_numa: bool) -> None:
> >> self._mount_huge_pages()
> >>
> >> def _get_hugepage_size(self) -> int:
> >> - hugepage_size = self.remote_session.send_command(
> >> + hugepage_size = self.send_command(
> >> "awk '/Hugepagesize/ {print $2}' /proc/meminfo"
> >> ).stdout
> >> return int(hugepage_size)
> >>
> >> def _get_hugepages_total(self) -> int:
> >> - hugepages_total = self.remote_session.send_command(
> >> + hugepages_total = self.send_command(
> >> "awk '/HugePages_Total/ { print $2 }' /proc/meminfo"
> >> ).stdout
> >> return int(hugepages_total)
> >>
> >> def _get_numa_nodes(self) -> list[int]:
> >> try:
> >> - numa_count = self.remote_session.send_command(
> >> + numa_count = self.send_command(
> >> "cat /sys/devices/system/node/online", verify=True
> >> ).stdout
> >> numa_range = expand_range(numa_count)
> >> @@ -70,14 +71,12 @@ def _get_numa_nodes(self) -> list[int]:
> >> def _mount_huge_pages(self) -> None:
> >> self._logger.info("Re-mounting Hugepages.")
> >> hugapge_fs_cmd = "awk '/hugetlbfs/ { print $2 }' /proc/mounts"
> >> - self.remote_session.send_command(f"umount $({hugapge_fs_cmd})")
> >> - result = self.remote_session.send_command(hugapge_fs_cmd)
> >> + self.send_command(f"umount $({hugapge_fs_cmd})")
> >> + result = self.send_command(hugapge_fs_cmd)
> >> if result.stdout == "":
> >> remote_mount_path = "/mnt/huge"
> >> - self.remote_session.send_command(f"mkdir -p
> {remote_mount_path}")
> >> - self.remote_session.send_command(
> >> - f"mount -t hugetlbfs nodev {remote_mount_path}"
> >> - )
> >> + self.send_command(f"mkdir -p {remote_mount_path}")
> >> + self.send_command(f"mount -t hugetlbfs nodev
> {remote_mount_path}")
> >>
> >> def _supports_numa(self) -> bool:
> >> # the system supports numa if self._numa_nodes is non-empty
> and there are more
> >> @@ -94,14 +93,12 @@ def _configure_huge_pages(
> >> )
> >> if force_first_numa and self._supports_numa():
> >> # clear non-numa hugepages
> >> - self.remote_session.send_command(
> >> - f"echo 0 | sudo tee {hugepage_config_path}"
> >> - )
> >> + self.send_command(f"echo 0 | tee {hugepage_config_path}",
> privileged=True)
> >> hugepage_config_path = (
> >>
> f"/sys/devices/system/node/node{self._numa_nodes[0]}/hugepages"
> >> f"/hugepages-{size}kB/nr_hugepages"
> >> )
> >>
> >> - self.remote_session.send_command(
> >> - f"echo {amount} | sudo tee {hugepage_config_path}"
> >> + self.send_command(
> >> + f"echo {amount} | tee {hugepage_config_path}",
> privileged=True
> >> )
> >> diff --git a/dts/framework/remote_session/os_session.py
> b/dts/framework/remote_session/os_session.py
> >> index 4c48ae2567..bfd70bd480 100644
> >> --- a/dts/framework/remote_session/os_session.py
> >> +++ b/dts/framework/remote_session/os_session.py
> >> @@ -10,7 +10,7 @@
> >> from framework.logger import DTSLOG
> >> from framework.settings import SETTINGS
> >> from framework.testbed_model import LogicalCore
> >> -from framework.utils import EnvVarsDict, MesonArgs
> >> +from framework.utils import MesonArgs
> >>
> >> from .remote import CommandResult, RemoteSession, create_remote_session
> >>
> >> @@ -53,17 +53,32 @@ def is_alive(self) -> bool:
> >> def send_command(
> >> self,
> >> command: str,
> >> - timeout: float,
> >> + timeout: float = SETTINGS.timeout,
> >> + privileged: bool = False,
> >> verify: bool = False,
> >> - env: EnvVarsDict | None = None,
> >> + env: dict | None = None,
> >> ) -> CommandResult:
> >> """
> >> An all-purpose API in case the command to be executed is
> already
> >> OS-agnostic, such as when the path to the executed command has
> been
> >> constructed beforehand.
> >> """
> >> + if privileged:
> >> + command = self._get_privileged_command(command)
> >> +
> >> return self.remote_session.send_command(command, timeout,
> verify, env)
> >>
> >> + @abstractmethod
> >> + def _get_privileged_command(self, command: str) -> str:
> >> + """Modify the command so that it executes with administrative
> privileges.
> >> +
> >> + Args:
> >> + command: The command to modify.
> >> +
> >> + Returns:
> >> + The modified command that executes with administrative
> privileges.
> >> + """
> >> +
> >> @abstractmethod
> >> def guess_dpdk_remote_dir(self, remote_dir) -> PurePath:
> >> """
> >> @@ -90,17 +105,35 @@ def join_remote_path(self, *args: str | PurePath)
> -> PurePath:
> >> """
> >>
> >> @abstractmethod
> >> - def copy_file(
> >> + def copy_from(
> >> self,
> >> source_file: str | PurePath,
> >> destination_file: str | PurePath,
> >> - source_remote: bool = False,
> >> ) -> None:
> >> + """Copy a file from the remote Node to the local filesystem.
> >> +
> >> + Copy source_file from the remote Node associated with this
> remote
> >> + session to destination_file on the local filesystem.
> >> +
> >> + Args:
> >> + source_file: the file on the remote Node.
> >> + destination_file: a file or directory path on the local
> filesystem.
> >> """
> >> +
> >> + @abstractmethod
> >> + def copy_to(
> >> + self,
> >> + source_file: str | PurePath,
> >> + destination_file: str | PurePath,
> >> + ) -> None:
> >> + """Copy a file from local filesystem to the remote Node.
> >> +
> >> Copy source_file from local filesystem to destination_file
> >> - on the remote Node associated with the remote session.
> >> - If source_remote is True, reverse the direction - copy
> source_file from the
> >> - associated remote Node to destination_file on local storage.
> >> + on the remote Node associated with this remote session.
> >> +
> >> + Args:
> >> + source_file: the file on the local filesystem.
> >> + destination_file: a file or directory path on the remote
> Node.
> >> """
> >>
> >> @abstractmethod
> >> @@ -128,7 +161,7 @@ def extract_remote_tarball(
> >> @abstractmethod
> >> def build_dpdk(
> >> self,
> >> - env_vars: EnvVarsDict,
> >> + env_vars: dict,
> >> meson_args: MesonArgs,
> >> remote_dpdk_dir: str | PurePath,
> >> remote_dpdk_build_dir: str | PurePath,
> >> diff --git a/dts/framework/remote_session/posix_session.py
> b/dts/framework/remote_session/posix_session.py
> >> index d38062e8d6..8ca0acb429 100644
> >> --- a/dts/framework/remote_session/posix_session.py
> >> +++ b/dts/framework/remote_session/posix_session.py
> >> @@ -9,7 +9,7 @@
> >> from framework.config import Architecture
> >> from framework.exception import DPDKBuildError,
> RemoteCommandExecutionError
> >> from framework.settings import SETTINGS
> >> -from framework.utils import EnvVarsDict, MesonArgs
> >> +from framework.utils import MesonArgs
> >>
> >> from .os_session import OSSession
> >>
> >> @@ -34,7 +34,7 @@ def combine_short_options(**opts: bool) -> str:
> >>
> >> def guess_dpdk_remote_dir(self, remote_dir) -> PurePosixPath:
> >> remote_guess = self.join_remote_path(remote_dir, "dpdk-*")
> >> - result = self.remote_session.send_command(f"ls -d
> {remote_guess} | tail -1")
> >> + result = self.send_command(f"ls -d {remote_guess} | tail -1")
> >> return PurePosixPath(result.stdout)
> >>
> >> def get_remote_tmp_dir(self) -> PurePosixPath:
> >> @@ -48,7 +48,7 @@ def get_dpdk_build_env_vars(self, arch: Architecture)
> -> dict:
> >> env_vars = {}
> >> if arch == Architecture.i686:
> >> # find the pkg-config path and store it in
> PKG_CONFIG_LIBDIR
> >> - out = self.remote_session.send_command("find /usr -type d
> -name pkgconfig")
> >> + out = self.send_command("find /usr -type d -name
> pkgconfig")
> >> pkg_path = ""
> >> res_path = out.stdout.split("\r\n")
> >> for cur_path in res_path:
> >> @@ -65,13 +65,19 @@ def get_dpdk_build_env_vars(self, arch:
> Architecture) -> dict:
> >> def join_remote_path(self, *args: str | PurePath) -> PurePosixPath:
> >> return PurePosixPath(*args)
> >>
> >> - def copy_file(
> >> + def copy_from(
> >> self,
> >> source_file: str | PurePath,
> >> destination_file: str | PurePath,
> >> - source_remote: bool = False,
> >> ) -> None:
> >> - self.remote_session.copy_file(source_file, destination_file,
> source_remote)
> >> + self.remote_session.copy_from(source_file, destination_file)
> >> +
> >> + def copy_to(
> >> + self,
> >> + source_file: str | PurePath,
> >> + destination_file: str | PurePath,
> >> + ) -> None:
> >> + self.remote_session.copy_to(source_file, destination_file)
> >>
> >> def remove_remote_dir(
> >> self,
> >> @@ -80,24 +86,24 @@ def remove_remote_dir(
> >> force: bool = True,
> >> ) -> None:
> >> opts = PosixSession.combine_short_options(r=recursive, f=force)
> >> - self.remote_session.send_command(f"rm{opts} {remote_dir_path}")
> >> + self.send_command(f"rm{opts} {remote_dir_path}")
> >>
> >> def extract_remote_tarball(
> >> self,
> >> remote_tarball_path: str | PurePath,
> >> expected_dir: str | PurePath | None = None,
> >> ) -> None:
> >> - self.remote_session.send_command(
> >> + self.send_command(
> >> f"tar xfm {remote_tarball_path} "
> >> f"-C {PurePosixPath(remote_tarball_path).parent}",
> >> 60,
> >> )
> >> if expected_dir:
> >> - self.remote_session.send_command(f"ls {expected_dir}",
> verify=True)
> >> + self.send_command(f"ls {expected_dir}", verify=True)
> >>
> >> def build_dpdk(
> >> self,
> >> - env_vars: EnvVarsDict,
> >> + env_vars: dict,
> >> meson_args: MesonArgs,
> >> remote_dpdk_dir: str | PurePath,
> >> remote_dpdk_build_dir: str | PurePath,
> >> @@ -108,7 +114,7 @@ def build_dpdk(
> >> if rebuild:
> >> # reconfigure, then build
> >> self._logger.info("Reconfiguring DPDK build.")
> >> - self.remote_session.send_command(
> >> + self.send_command(
> >> f"meson configure {meson_args}
> {remote_dpdk_build_dir}",
> >> timeout,
> >> verify=True,
> >> @@ -118,7 +124,7 @@ def build_dpdk(
> >> # fresh build - remove target dir first, then build
> from scratch
> >> self._logger.info("Configuring DPDK build from
> scratch.")
> >> self.remove_remote_dir(remote_dpdk_build_dir)
> >> - self.remote_session.send_command(
> >> + self.send_command(
> >> f"meson setup "
> >> f"{meson_args} {remote_dpdk_dir}
> {remote_dpdk_build_dir}",
> >> timeout,
> >> @@ -127,14 +133,14 @@ def build_dpdk(
> >> )
> >>
> >> self._logger.info("Building DPDK.")
> >> - self.remote_session.send_command(
> >> + self.send_command(
> >> f"ninja -C {remote_dpdk_build_dir}", timeout,
> verify=True, env=env_vars
> >> )
> >> except RemoteCommandExecutionError as e:
> >> raise DPDKBuildError(f"DPDK build failed when doing
> '{e.command}'.")
> >>
> >> def get_dpdk_version(self, build_dir: str | PurePath) -> str:
> >> - out = self.remote_session.send_command(
> >> + out = self.send_command(
> >> f"cat {self.join_remote_path(build_dir, 'VERSION')}",
> verify=True
> >> )
> >> return out.stdout
> >> @@ -146,7 +152,7 @@ def kill_cleanup_dpdk_apps(self, dpdk_prefix_list:
> Iterable[str]) -> None:
> >> # kill and cleanup only if DPDK is running
> >> dpdk_pids = self._get_dpdk_pids(dpdk_runtime_dirs)
> >> for dpdk_pid in dpdk_pids:
> >> - self.remote_session.send_command(f"kill -9
> {dpdk_pid}", 20)
> >> + self.send_command(f"kill -9 {dpdk_pid}", 20)
> >> self._check_dpdk_hugepages(dpdk_runtime_dirs)
> >> self._remove_dpdk_runtime_dirs(dpdk_runtime_dirs)
> >>
> >> @@ -168,7 +174,7 @@ def _list_remote_dirs(self, remote_path: str |
> PurePath) -> list[str] | None:
> >> Return a list of directories of the remote_dir.
> >> If remote_path doesn't exist, return None.
> >> """
> >> - out = self.remote_session.send_command(
> >> + out = self.send_command(
> >> f"ls -l {remote_path} | awk '/^d/ {{print $NF}}'"
> >> ).stdout
> >> if "No such file or directory" in out:
> >> @@ -182,9 +188,7 @@ def _get_dpdk_pids(self, dpdk_runtime_dirs:
> Iterable[str | PurePath]) -> list[in
> >> for dpdk_runtime_dir in dpdk_runtime_dirs:
> >> dpdk_config_file = PurePosixPath(dpdk_runtime_dir,
> "config")
> >> if self._remote_files_exists(dpdk_config_file):
> >> - out = self.remote_session.send_command(
> >> - f"lsof -Fp {dpdk_config_file}"
> >> - ).stdout
> >> + out = self.send_command(f"lsof -Fp
> {dpdk_config_file}").stdout
> >> if out and "No such file or directory" not in out:
> >> for out_line in out.splitlines():
> >> match = re.match(pid_regex, out_line)
> >> @@ -193,7 +197,7 @@ def _get_dpdk_pids(self, dpdk_runtime_dirs:
> Iterable[str | PurePath]) -> list[in
> >> return pids
> >>
> >> def _remote_files_exists(self, remote_path: PurePath) -> bool:
> >> - result = self.remote_session.send_command(f"test -e
> {remote_path}")
> >> + result = self.send_command(f"test -e {remote_path}")
> >> return not result.return_code
> >>
> >> def _check_dpdk_hugepages(
> >> @@ -202,9 +206,7 @@ def _check_dpdk_hugepages(
> >> for dpdk_runtime_dir in dpdk_runtime_dirs:
> >> hugepage_info = PurePosixPath(dpdk_runtime_dir,
> "hugepage_info")
> >> if self._remote_files_exists(hugepage_info):
> >> - out = self.remote_session.send_command(
> >> - f"lsof -Fp {hugepage_info}"
> >> - ).stdout
> >> + out = self.send_command(f"lsof -Fp
> {hugepage_info}").stdout
> >> if out and "No such file or directory" not in out:
> >> self._logger.warning("Some DPDK processes did not
> free hugepages.")
> >>
> self._logger.warning("*******************************************")
> >> diff --git a/dts/framework/remote_session/remote/remote_session.py
> b/dts/framework/remote_session/remote/remote_session.py
> >> index 91dee3cb4f..0647d93de4 100644
> >> --- a/dts/framework/remote_session/remote/remote_session.py
> >> +++ b/dts/framework/remote_session/remote/remote_session.py
> >> @@ -11,7 +11,6 @@
> >> from framework.exception import RemoteCommandExecutionError
> >> from framework.logger import DTSLOG
> >> from framework.settings import SETTINGS
> >> -from framework.utils import EnvVarsDict
> >>
> >>
> >> @dataclasses.dataclass(slots=True, frozen=True)
> >> @@ -89,7 +88,7 @@ def send_command(
> >> command: str,
> >> timeout: float = SETTINGS.timeout,
> >> verify: bool = False,
> >> - env: EnvVarsDict | None = None,
> >> + env: dict | None = None,
> >> ) -> CommandResult:
> >> """
> >> Send a command to the connected node using optional env vars
> >> @@ -114,7 +113,7 @@ def send_command(
> >>
> >> @abstractmethod
> >> def _send_command(
> >> - self, command: str, timeout: float, env: EnvVarsDict | None
> >> + self, command: str, timeout: float, env: dict | None
> >> ) -> CommandResult:
> >> """
> >> Use the underlying protocol to execute the command using
> optional env vars
> >> @@ -141,15 +140,33 @@ def is_alive(self) -> bool:
> >> """
> >>
> >> @abstractmethod
> >> - def copy_file(
> >> + def copy_from(
> >> self,
> >> source_file: str | PurePath,
> >> destination_file: str | PurePath,
> >> - source_remote: bool = False,
> >> ) -> None:
> >> + """Copy a file from the remote Node to the local filesystem.
> >> +
> >> + Copy source_file from the remote Node associated with this
> remote
> >> + session to destination_file on the local filesystem.
> >> +
> >> + Args:
> >> + source_file: the file on the remote Node.
> >> + destination_file: a file or directory path on the local
> filesystem.
> >> """
> >> - Copy source_file from local filesystem to destination_file on
> the remote Node
> >> - associated with the remote session.
> >> - If source_remote is True, reverse the direction - copy
> source_file from the
> >> - associated Node to destination_file on local filesystem.
> >> +
> >> + @abstractmethod
> >> + def copy_to(
> >> + self,
> >> + source_file: str | PurePath,
> >> + destination_file: str | PurePath,
> >> + ) -> None:
> >> + """Copy a file from local filesystem to the remote Node.
> >> +
> >> + Copy source_file from local filesystem to destination_file
> >> + on the remote Node associated with this remote session.
> >> +
> >> + Args:
> >> + source_file: the file on the local filesystem.
> >> + destination_file: a file or directory path on the remote
> Node.
> >> """
> >> diff --git a/dts/framework/remote_session/remote/ssh_session.py
> b/dts/framework/remote_session/remote/ssh_session.py
> >> index 42ff9498a2..8d127f1601 100644
> >> --- a/dts/framework/remote_session/remote/ssh_session.py
> >> +++ b/dts/framework/remote_session/remote/ssh_session.py
> >> @@ -1,29 +1,49 @@
> >> # SPDX-License-Identifier: BSD-3-Clause
> >> -# Copyright(c) 2010-2014 Intel Corporation
> >> -# Copyright(c) 2022-2023 PANTHEON.tech s.r.o.
> >> -# Copyright(c) 2022-2023 University of New Hampshire
> >> +# Copyright(c) 2023 PANTHEON.tech s.r.o.
> >>
> >
> > I've noticed in other patches you've simply appended the copyright for
> PANTHEON.tech to the existing list. Is there a reason you remove the others
> here as well?
> >
>
> It's a rewrite of the file. I'm the only author of the code (i.e.
> neither Intel nor UNH contributed to the Fabric code) so I left only
> us there. I'm not sure this is the right way to do this, but it made
> sense to me. I have no problem with leaving all parties in.
>
>
It also makes sense to me. I'm also not completely sure if it is the right
way to handle it, but the way I see it because the Copyrights exist in
every file it makes sense that they would be in the scope of that file.
> >>
> >> -import time
> >> +import socket
> >> +import traceback
> >> from pathlib import PurePath
> >>
> >> -import pexpect # type: ignore
> >> -from pexpect import pxssh # type: ignore
> >> +from fabric import Connection # type: ignore[import]
> >> +from invoke.exceptions import ( # type: ignore[import]
> >> + CommandTimedOut,
> >> + ThreadException,
> >> + UnexpectedExit,
> >> +)
> >> +from paramiko.ssh_exception import ( # type: ignore[import]
> >> + AuthenticationException,
> >> + BadHostKeyException,
> >> + NoValidConnectionsError,
> >> + SSHException,
> >> +)
> >>
> >> from framework.config import NodeConfiguration
> >> from framework.exception import SSHConnectionError,
> SSHSessionDeadError, SSHTimeoutError
> >> from framework.logger import DTSLOG
> >> -from framework.utils import GREEN, RED, EnvVarsDict
> >>
> >> from .remote_session import CommandResult, RemoteSession
> >>
> >>
> >> class SSHSession(RemoteSession):
> >> - """
> >> - Module for creating Pexpect SSH remote sessions.
> >> + """A persistent SSH connection to a remote Node.
> >> +
> >> + The connection is implemented with the Fabric Python library.
> >> +
> >> + Args:
> >> + node_config: The configuration of the Node to connect to.
> >> + session_name: The name of the session.
> >> + logger: The logger used for logging.
> >> + This should be passed from the parent OSSession.
> >> +
> >> + Attributes:
> >> + session: The underlying Fabric SSH connection.
> >> +
> >> + Raises:
> >> + SSHConnectionError: The connection cannot be established.
> >> """
> >>
> >> - session: pxssh.pxssh
> >> - magic_prompt: str
> >> + session: Connection
> >>
> >> def __init__(
> >> self,
> >> @@ -31,218 +51,91 @@ def __init__(
> >> session_name: str,
> >> logger: DTSLOG,
> >> ):
> >> - self.magic_prompt = "MAGIC PROMPT"
> >> super(SSHSession, self).__init__(node_config, session_name,
> logger)
> >>
> >> def _connect(self) -> None:
> >> - """
> >> - Create connection to assigned node.
> >> - """
> >> + errors = []
> >> retry_attempts = 10
> >> login_timeout = 20 if self.port else 10
> >> - password_regex = (
> >> - r"(?i)(?:password:)|(?:passphrase for key)|(?i)(password
> for .+:)"
> >> - )
> >> - try:
> >> - for retry_attempt in range(retry_attempts):
> >> - self.session = pxssh.pxssh(encoding="utf-8")
> >> - try:
> >> - self.session.login(
> >> - self.ip,
> >> - self.username,
> >> - self.password,
> >> - original_prompt="[$#>]",
> >> - port=self.port,
> >> - login_timeout=login_timeout,
> >> - password_regex=password_regex,
> >> - )
> >> - break
> >> - except Exception as e:
> >> - self._logger.warning(e)
> >> - time.sleep(2)
> >> - self._logger.info(
> >> - f"Retrying connection: retry number
> {retry_attempt + 1}."
> >> - )
> >> - else:
> >> - raise Exception(f"Connection to {self.hostname}
> failed")
> >> -
> >> - self.send_expect("stty -echo", "#")
> >> - self.send_expect("stty columns 1000", "#")
> >> - self.send_expect("bind 'set enable-bracketed-paste off'",
> "#")
> >> - except Exception as e:
> >> - self._logger.error(RED(str(e)))
> >> - if getattr(self, "port", None):
> >> - suggestion = (
> >> - f"\nSuggestion: Check if the firewall on
> {self.hostname} is "
> >> - f"stopped.\n"
> >> + for retry_attempt in range(retry_attempts):
> >> + try:
> >> + self.session = Connection(
> >> + self.ip,
> >> + user=self.username,
> >> + port=self.port,
> >> + connect_kwargs={"password": self.password},
> >> + connect_timeout=login_timeout,
> >> )
> >> - self._logger.info(GREEN(suggestion))
> >> -
> >> - raise SSHConnectionError(self.hostname)
> >> + self.session.open()
> >>
> >> - def send_expect(
> >> - self, command: str, prompt: str, timeout: float = 15, verify:
> bool = False
> >> - ) -> str | int:
> >> - try:
> >> - ret = self.send_expect_base(command, prompt, timeout)
> >> - if verify:
> >> - ret_status = self.send_expect_base("echo $?", prompt,
> timeout)
> >> - try:
> >> - retval = int(ret_status)
> >> - if retval:
> >> - self._logger.error(f"Command: {command}
> failure!")
> >> - self._logger.error(ret)
> >> - return retval
> >> - else:
> >> - return ret
> >> - except ValueError:
> >> - return ret
> >> - else:
> >> - return ret
> >> - except Exception as e:
> >> - self._logger.error(
> >> - f"Exception happened in [{command}] and output is "
> >> - f"[{self._get_output()}]"
> >> - )
> >> - raise e
> >> -
> >> - def send_expect_base(self, command: str, prompt: str, timeout:
> float) -> str:
> >> - self._clean_session()
> >> - original_prompt = self.session.PROMPT
> >> - self.session.PROMPT = prompt
> >> - self._send_line(command)
> >> - self._prompt(command, timeout)
> >> -
> >> - before = self._get_output()
> >> - self.session.PROMPT = original_prompt
> >> - return before
> >> -
> >> - def _clean_session(self) -> None:
> >> - self.session.PROMPT = self.magic_prompt
> >> - self.get_output(timeout=0.01)
> >> - self.session.PROMPT = self.session.UNIQUE_PROMPT
> >> -
> >> - def _send_line(self, command: str) -> None:
> >> - if not self.is_alive():
> >> - raise SSHSessionDeadError(self.hostname)
> >> - if len(command) == 2 and command.startswith("^"):
> >> - self.session.sendcontrol(command[1])
> >> - else:
> >> - self.session.sendline(command)
> >> + except (ValueError, BadHostKeyException,
> AuthenticationException) as e:
> >> + self._logger.exception(e)
> >> + raise SSHConnectionError(self.hostname) from e
> >>
> >> - def _prompt(self, command: str, timeout: float) -> None:
> >> - if not self.session.prompt(timeout):
> >> - raise SSHTimeoutError(command, self._get_output()) from
> None
> >> + except (NoValidConnectionsError, socket.error,
> SSHException) as e:
> >> + self._logger.debug(traceback.format_exc())
> >> + self._logger.warning(e)
> >>
> >> - def get_output(self, timeout: float = 15) -> str:
> >> - """
> >> - Get all output before timeout
> >> - """
> >> - try:
> >> - self.session.prompt(timeout)
> >> - except Exception:
> >> - pass
> >> -
> >> - before = self._get_output()
> >> - self._flush()
> >> -
> >> - return before
> >> + error = repr(e)
> >> + if error not in errors:
> >> + errors.append(error)
> >>
> >> - def _get_output(self) -> str:
> >> - if not self.is_alive():
> >> - raise SSHSessionDeadError(self.hostname)
> >> - before = self.session.before.rsplit("\r\n", 1)[0]
> >> - if before == "[PEXPECT]":
> >> - return ""
> >> - return before
> >> + self._logger.info(
> >> + f"Retrying connection: retry number {retry_attempt
> + 1}."
> >> + )
> >>
> >> - def _flush(self) -> None:
> >> - """
> >> - Clear all session buffer
> >> - """
> >> - self.session.buffer = ""
> >> - self.session.before = ""
> >> + else:
> >> + break
> >> + else:
> >> + raise SSHConnectionError(self.hostname, errors)
> >>
> >> def is_alive(self) -> bool:
> >> - return self.session.isalive()
> >> + return self.session.is_connected
> >>
> >> def _send_command(
> >> - self, command: str, timeout: float, env: EnvVarsDict | None
> >> + self, command: str, timeout: float, env: dict | None
> >> ) -> CommandResult:
> >> - output = self._send_command_get_output(command, timeout, env)
> >> - return_code = int(self._send_command_get_output("echo $?",
> timeout, None))
> >> + """Send a command and return the result of the execution.
> >>
> >> - # we're capturing only stdout
> >> - return CommandResult(self.name, command, output, "",
> return_code)
> >> + Args:
> >> + command: The command to execute.
> >> + timeout: Wait at most this many seconds for the execution
> to complete.
> >> + env: Extra environment variables that will be used in
> command execution.
> >>
> >> - def _send_command_get_output(
> >> - self, command: str, timeout: float, env: EnvVarsDict | None
> >> - ) -> str:
> >> + Raises:
> >> + SSHSessionDeadError: The session died while executing the
> command.
> >> + SSHTimeoutError: The command execution timed out.
> >> + """
> >> try:
> >> - self._clean_session()
> >> - if env:
> >> - command = f"{env} {command}"
> >> - self._send_line(command)
> >> - except Exception as e:
> >> - raise e
> >> + output = self.session.run(
> >> + command, env=env, warn=True, hide=True, timeout=timeout
> >> + )
> >>
> >> - output = self.get_output(timeout=timeout)
> >> - self.session.PROMPT = self.session.UNIQUE_PROMPT
> >> - self.session.prompt(0.1)
> >> + except (UnexpectedExit, ThreadException) as e:
> >> + self._logger.exception(e)
> >> + raise SSHSessionDeadError(self.hostname) from e
> >>
> >> - return output
> >> + except CommandTimedOut as e:
> >> + self._logger.exception(e)
> >> + raise SSHTimeoutError(command, e.result.stderr) from e
> >>
> >> - def _close(self, force: bool = False) -> None:
> >> - if force is True:
> >> - self.session.close()
> >> - else:
> >> - if self.is_alive():
> >> - self.session.logout()
> >> + return CommandResult(
> >> + self.name, command, output.stdout, output.stderr,
> output.return_code
> >> + )
> >>
> >> - def copy_file(
> >> + def copy_from(
> >> self,
> >> source_file: str | PurePath,
> >> destination_file: str | PurePath,
> >> - source_remote: bool = False,
> >> ) -> None:
> >> - """
> >> - Send a local file to a remote host.
> >> - """
> >> - if source_remote:
> >> - source_file = f"{self.username}@{self.ip}:{source_file}"
> >> - else:
> >> - destination_file = f"{self.username}@
> {self.ip}:{destination_file}"
> >> + self.session.get(str(destination_file), str(source_file))
> >>
> >> - port = ""
> >> - if self.port:
> >> - port = f" -P {self.port}"
> >> -
> >> - command = (
> >> - f"scp -v{port} -o NoHostAuthenticationForLocalhost=yes"
> >> - f" {source_file} {destination_file}"
> >> - )
> >> -
> >> - self._spawn_scp(command)
> >> + def copy_to(
> >> + self,
> >> + source_file: str | PurePath,
> >> + destination_file: str | PurePath,
> >> + ) -> None:
> >> + self.session.put(str(source_file), str(destination_file))
> >>
> >> - def _spawn_scp(self, scp_cmd: str) -> None:
> >> - """
> >> - Transfer a file with SCP
> >> - """
> >> - self._logger.info(scp_cmd)
> >> - p: pexpect.spawn = pexpect.spawn(scp_cmd)
> >> - time.sleep(0.5)
> >> - ssh_newkey: str = "Are you sure you want to continue
> connecting"
> >> - i: int = p.expect(
> >> - [ssh_newkey, "[pP]assword", "# ", pexpect.EOF,
> pexpect.TIMEOUT], 120
> >> - )
> >> - if i == 0: # add once in trust list
> >> - p.sendline("yes")
> >> - i = p.expect([ssh_newkey, "[pP]assword", pexpect.EOF], 2)
> >> -
> >> - if i == 1:
> >> - time.sleep(0.5)
> >> - p.sendline(self.password)
> >> - p.expect("Exit status 0", 60)
> >> - if i == 4:
> >> - self._logger.error("SCP TIMEOUT error %d" % i)
> >> - p.close()
> >> + def _close(self, force: bool = False) -> None:
> >> + self.session.close()
> >> diff --git a/dts/framework/testbed_model/sut_node.py
> b/dts/framework/testbed_model/sut_node.py
> >> index 2b2b50d982..9dbc390848 100644
> >> --- a/dts/framework/testbed_model/sut_node.py
> >> +++ b/dts/framework/testbed_model/sut_node.py
> >> @@ -10,7 +10,7 @@
> >> from framework.config import BuildTargetConfiguration,
> NodeConfiguration
> >> from framework.remote_session import CommandResult, OSSession
> >> from framework.settings import SETTINGS
> >> -from framework.utils import EnvVarsDict, MesonArgs
> >> +from framework.utils import MesonArgs
> >>
> >> from .hw import LogicalCoreCount, LogicalCoreList, VirtualDevice
> >> from .node import Node
> >> @@ -27,7 +27,7 @@ class SutNode(Node):
> >> _dpdk_prefix_list: list[str]
> >> _dpdk_timestamp: str
> >> _build_target_config: BuildTargetConfiguration | None
> >> - _env_vars: EnvVarsDict
> >> + _env_vars: dict
> >> _remote_tmp_dir: PurePath
> >> __remote_dpdk_dir: PurePath | None
> >> _dpdk_version: str | None
> >> @@ -38,7 +38,7 @@ def __init__(self, node_config: NodeConfiguration):
> >> super(SutNode, self).__init__(node_config)
> >> self._dpdk_prefix_list = []
> >> self._build_target_config = None
> >> - self._env_vars = EnvVarsDict()
> >> + self._env_vars = {}
> >> self._remote_tmp_dir = self.main_session.get_remote_tmp_dir()
> >> self.__remote_dpdk_dir = None
> >> self._dpdk_version = None
> >> @@ -94,7 +94,7 @@ def _configure_build_target(
> >> """
> >> Populate common environment variables and set build target
> config.
> >> """
> >> - self._env_vars = EnvVarsDict()
> >> + self._env_vars = {}
> >> self._build_target_config = build_target_config
> >> self._env_vars.update(
> >>
> self.main_session.get_dpdk_build_env_vars(build_target_config.arch)
> >> @@ -112,7 +112,7 @@ def _copy_dpdk_tarball(self) -> None:
> >> Copy to and extract DPDK tarball on the SUT node.
> >> """
> >> self._logger.info("Copying DPDK tarball to SUT.")
> >> - self.main_session.copy_file(SETTINGS.dpdk_tarball_path,
> self._remote_tmp_dir)
> >> + self.main_session.copy_to(SETTINGS.dpdk_tarball_path,
> self._remote_tmp_dir)
> >>
> >> # construct remote tarball path
> >> # the basename is the same on local host and on remote Node
> >> @@ -259,7 +259,7 @@ def run_dpdk_app(
> >> Run DPDK application on the remote node.
> >> """
> >> return self.main_session.send_command(
> >> - f"{app_path} {eal_args}", timeout, verify=True
> >> + f"{app_path} {eal_args}", timeout, privileged=True,
> verify=True
> >> )
> >>
> >>
> >> diff --git a/dts/framework/utils.py b/dts/framework/utils.py
> >> index 55e0b0ef0e..8cfbc6a29d 100644
> >> --- a/dts/framework/utils.py
> >> +++ b/dts/framework/utils.py
> >> @@ -42,19 +42,10 @@ def expand_range(range_str: str) -> list[int]:
> >> return expanded_range
> >>
> >>
> >> -def GREEN(text: str) -> str:
> >> - return f"\u001B[32;1m{str(text)}\u001B[0m"
> >> -
> >> -
> >> def RED(text: str) -> str:
> >> return f"\u001B[31;1m{str(text)}\u001B[0m"
> >>
> >>
> >> -class EnvVarsDict(dict):
> >> - def __str__(self) -> str:
> >> - return " ".join(["=".join(item) for item in self.items()])
> >> -
> >> -
> >> class MesonArgs(object):
> >> """
> >> Aggregate the arguments needed to build DPDK:
> >> diff --git a/dts/poetry.lock b/dts/poetry.lock
> >> index 0b2a007d4d..2438f337cd 100644
> >> --- a/dts/poetry.lock
> >> +++ b/dts/poetry.lock
> >> @@ -12,6 +12,18 @@ docs = ["furo", "sphinx", "zope.interface",
> "sphinx-notfound-page"]
> >> tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest
> (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins",
> "zope.interface", "cloudpickle"]
> >> tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler",
> "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins",
> "cloudpickle"]
> >>
> >> +[[package]]
> >> +name = "bcrypt"
> >> +version = "4.0.1"
> >> +description = "Modern password hashing for your software and your
> servers"
> >> +category = "main"
> >> +optional = false
> >> +python-versions = ">=3.6"
> >> +
> >> +[package.extras]
> >> +tests = ["pytest (>=3.2.1,!=3.3.0)"]
> >> +typecheck = ["mypy"]
> >> +
> >> [[package]]
> >> name = "black"
> >> version = "22.10.0"
> >> @@ -33,6 +45,17 @@ d = ["aiohttp (>=3.7.4)"]
> >> jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
> >> uvloop = ["uvloop (>=0.15.2)"]
> >>
> >> +[[package]]
> >> +name = "cffi"
> >> +version = "1.15.1"
> >> +description = "Foreign Function Interface for Python calling C code."
> >> +category = "main"
> >> +optional = false
> >> +python-versions = "*"
> >> +
> >> +[package.dependencies]
> >> +pycparser = "*"
> >> +
> >> [[package]]
> >> name = "click"
> >> version = "8.1.3"
> >> @@ -52,6 +75,52 @@ category = "dev"
> >> optional = false
> >> python-versions =
> "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
> >>
> >> +[[package]]
> >> +name = "cryptography"
> >> +version = "40.0.2"
> >> +description = "cryptography is a package which provides cryptographic
> recipes and primitives to Python developers."
> >> +category = "main"
> >> +optional = false
> >> +python-versions = ">=3.6"
> >> +
> >> +[package.dependencies]
> >> +cffi = ">=1.12"
> >> +
> >> +[package.extras]
> >> +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
> >> +docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)",
> "sphinxcontrib-spelling (>=4.0.1)"]
> >> +pep8test = ["black", "ruff", "mypy", "check-manifest"]
> >> +sdist = ["setuptools-rust (>=0.11.4)"]
> >> +ssh = ["bcrypt (>=3.1.5)"]
> >> +test = ["pytest (>=6.2.0)", "pytest-shard (>=0.1.2)",
> "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist",
> "pretend", "iso8601"]
> >> +test-randomorder = ["pytest-randomly"]
> >> +tox = ["tox"]
> >> +
> >> +[[package]]
> >> +name = "fabric"
> >> +version = "2.7.1"
> >> +description = "High level SSH command execution"
> >> +category = "main"
> >> +optional = false
> >> +python-versions = "*"
> >> +
> >> +[package.dependencies]
> >> +invoke = ">=1.3,<2.0"
> >> +paramiko = ">=2.4"
> >> +pathlib2 = "*"
> >> +
> >> +[package.extras]
> >> +pytest = ["mock (>=2.0.0,<3.0)", "pytest (>=3.2.5,<4.0)"]
> >> +testing = ["mock (>=2.0.0,<3.0)"]
> >> +
> >> +[[package]]
> >> +name = "invoke"
> >> +version = "1.7.3"
> >> +description = "Pythonic task execution"
> >> +category = "main"
> >> +optional = false
> >> +python-versions = "*"
> >> +
> >> [[package]]
> >> name = "isort"
> >> version = "5.10.1"
> >> @@ -136,23 +205,41 @@ optional = false
> >> python-versions = "*"
> >>
> >> [[package]]
> >> -name = "pathspec"
> >> -version = "0.10.1"
> >> -description = "Utility library for gitignore style pattern matching of
> file paths."
> >> -category = "dev"
> >> +name = "paramiko"
> >> +version = "3.1.0"
> >> +description = "SSH2 protocol library"
> >> +category = "main"
> >> optional = false
> >> -python-versions = ">=3.7"
> >> +python-versions = ">=3.6"
> >> +
> >> +[package.dependencies]
> >> +bcrypt = ">=3.2"
> >> +cryptography = ">=3.3"
> >> +pynacl = ">=1.5"
> >> +
> >> +[package.extras]
> >> +all = ["pyasn1 (>=0.1.7)", "invoke (>=2.0)", "gssapi (>=1.4.1)",
> "pywin32 (>=2.1.8)"]
> >> +gssapi = ["pyasn1 (>=0.1.7)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"]
> >> +invoke = ["invoke (>=2.0)"]
> >>
> >> [[package]]
> >> -name = "pexpect"
> >> -version = "4.8.0"
> >> -description = "Pexpect allows easy control of interactive console
> applications."
> >> +name = "pathlib2"
> >> +version = "2.3.7.post1"
> >> +description = "Object-oriented filesystem paths"
> >> category = "main"
> >> optional = false
> >> python-versions = "*"
> >>
> >> [package.dependencies]
> >> -ptyprocess = ">=0.5"
> >> +six = "*"
> >> +
> >> +[[package]]
> >> +name = "pathspec"
> >> +version = "0.10.1"
> >> +description = "Utility library for gitignore style pattern matching of
> file paths."
> >> +category = "dev"
> >> +optional = false
> >> +python-versions = ">=3.7"
> >>
> >> [[package]]
> >> name = "platformdirs"
> >> @@ -166,14 +253,6 @@ python-versions = ">=3.7"
> >> docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)",
> "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
> >> test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock
> (>=3.6)", "pytest (>=6)"]
> >>
> >> -[[package]]
> >> -name = "ptyprocess"
> >> -version = "0.7.0"
> >> -description = "Run a subprocess in a pseudo terminal"
> >> -category = "main"
> >> -optional = false
> >> -python-versions = "*"
> >> -
> >> [[package]]
> >> name = "pycodestyle"
> >> version = "2.9.1"
> >> @@ -182,6 +261,14 @@ category = "dev"
> >> optional = false
> >> python-versions = ">=3.6"
> >>
> >> +[[package]]
> >> +name = "pycparser"
> >> +version = "2.21"
> >> +description = "C parser in Python"
> >> +category = "main"
> >> +optional = false
> >> +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
> >> +
> >> [[package]]
> >> name = "pydocstyle"
> >> version = "6.1.1"
> >> @@ -228,6 +315,21 @@ tests = ["pytest (>=7.1.2)", "pytest-mypy",
> "eradicate (>=2.0.0)", "radon (>=5.1
> >> toml = ["toml (>=0.10.2)"]
> >> vulture = ["vulture"]
> >>
> >> +[[package]]
> >> +name = "pynacl"
> >> +version = "1.5.0"
> >> +description = "Python binding to the Networking and Cryptography
> (NaCl) library"
> >> +category = "main"
> >> +optional = false
> >> +python-versions = ">=3.6"
> >> +
> >> +[package.dependencies]
> >> +cffi = ">=1.4.1"
> >> +
> >> +[package.extras]
> >> +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
> >> +tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"]
> >> +
> >> [[package]]
> >> name = "pyrsistent"
> >> version = "0.19.1"
> >> @@ -244,6 +346,14 @@ category = "main"
> >> optional = false
> >> python-versions = ">=3.6"
> >>
> >> +[[package]]
> >> +name = "six"
> >> +version = "1.16.0"
> >> +description = "Python 2 and 3 compatibility utilities"
> >> +category = "main"
> >> +optional = false
> >> +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
> >> +
> >> [[package]]
> >> name = "snowballstemmer"
> >> version = "2.2.0"
> >> @@ -299,13 +409,18 @@ jsonschema = ">=4,<5"
> >> [metadata]
> >> lock-version = "1.1"
> >> python-versions = "^3.10"
> >> -content-hash =
> "a0f040b07fc6ce4deb0be078b9a88c2a465cb6bccb9e260a67e92c2403e2319f"
> >> +content-hash =
> "719c43bcaa5d181921debda884f8f714063df0b2336d61e9f64ecab034e8b139"
> >>
> >> [metadata.files]
> >> attrs = []
> >> +bcrypt = []
> >> black = []
> >> +cffi = []
> >> click = []
> >> colorama = []
> >> +cryptography = []
> >> +fabric = []
> >> +invoke = []
> >> isort = []
> >> jsonpatch = []
> >> jsonpointer = []
> >> @@ -313,22 +428,22 @@ jsonschema = []
> >> mccabe = []
> >> mypy = []
> >> mypy-extensions = []
> >> +paramiko = []
> >> +pathlib2 = []
> >> pathspec = []
> >> -pexpect = [
> >> - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash =
> "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"},
> >> - {file = "pexpect-4.8.0.tar.gz", hash =
> "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"},
> >> -]
> >> platformdirs = [
> >> {file = "platformdirs-2.5.2-py3-none-any.whl", hash =
> "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
> >> {file = "platformdirs-2.5.2.tar.gz", hash =
> "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
> >> ]
> >> -ptyprocess = []
> >> pycodestyle = []
> >> +pycparser = []
> >> pydocstyle = []
> >> pyflakes = []
> >> pylama = []
> >> +pynacl = []
> >> pyrsistent = []
> >> pyyaml = []
> >> +six = []
> >> snowballstemmer = []
> >> toml = []
> >> tomli = []
> >> diff --git a/dts/pyproject.toml b/dts/pyproject.toml
> >> index a136c91e5e..50bcdb327a 100644
> >> --- a/dts/pyproject.toml
> >> +++ b/dts/pyproject.toml
> >> @@ -9,10 +9,10 @@ authors = ["Owen Hilyard <ohilyard@iol.unh.edu>", "
> dts@dpdk.org"]
> >>
> >> [tool.poetry.dependencies]
> >> python = "^3.10"
> >> -pexpect = "^4.8.0"
> >> warlock = "^2.0.1"
> >> PyYAML = "^6.0"
> >> types-PyYAML = "^6.0.8"
> >> +fabric = "^2.7.1"
> >>
> >> [tool.poetry.dev-dependencies]
> >> mypy = "^0.961"
> >> --
> >> 2.30.2
> >>
>
Acked-by: Jeremy Spewock <jspewock@iol.unh.edu>
[-- Attachment #2: Type: text/html, Size: 72165 bytes --]
^ permalink raw reply [flat|nested] 21+ messages in thread
* [PATCH v3] dts: replace pexpect with fabric
2023-04-24 13:35 ` [PATCH v2] " Juraj Linkeš
2023-04-28 19:03 ` Jeremy Spewock
@ 2023-06-09 9:46 ` Juraj Linkeš
2023-06-21 18:33 ` Jeremy Spewock
2023-07-09 1:45 ` Patrick Robb
1 sibling, 2 replies; 21+ messages in thread
From: Juraj Linkeš @ 2023-06-09 9:46 UTC (permalink / raw)
To: thomas, Honnappa.Nagarahalli, lijuan.tu, wathsala.vithanage,
jspewock, probb
Cc: dev, Juraj Linkeš
Pexpect is not a dedicated SSH connection library while Fabric is. With
Fabric, all SSH-related logic is provided and we can just focus on
what's DTS specific.
Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
---
Notes:
v3: updated passwordless sudo setup on Linux
doc/guides/tools/dts.rst | 29 +-
dts/conf.yaml | 2 +-
dts/framework/exception.py | 10 +-
dts/framework/remote_session/linux_session.py | 31 +-
dts/framework/remote_session/os_session.py | 51 +++-
dts/framework/remote_session/posix_session.py | 48 +--
.../remote_session/remote/remote_session.py | 35 ++-
.../remote_session/remote/ssh_session.py | 287 ++++++------------
dts/framework/testbed_model/sut_node.py | 12 +-
dts/framework/utils.py | 9 -
dts/poetry.lock | 161 ++++++++--
dts/pyproject.toml | 2 +-
12 files changed, 376 insertions(+), 301 deletions(-)
diff --git a/doc/guides/tools/dts.rst b/doc/guides/tools/dts.rst
index ebd6dceb6a..c7b31623e4 100644
--- a/doc/guides/tools/dts.rst
+++ b/doc/guides/tools/dts.rst
@@ -95,9 +95,14 @@ Setting up DTS environment
#. **SSH Connection**
- DTS uses Python pexpect for SSH connections between DTS environment and the other hosts.
- The pexpect implementation is a wrapper around the ssh command in the DTS environment.
- This means it'll use the SSH agent providing the ssh command and its keys.
+ DTS uses the Fabric Python library for SSH connections between DTS environment
+ and the other hosts.
+ The authentication method used is pubkey authentication.
+ Fabric tries to use a passed key/certificate,
+ then any key it can with through an SSH agent,
+ then any "id_rsa", "id_dsa" or "id_ecdsa" key discoverable in ``~/.ssh/``
+ (with any matching OpenSSH-style certificates).
+ DTS doesn't pass any keys, so Fabric tries to use the other two methods.
Setting up System Under Test
@@ -132,6 +137,21 @@ There are two areas that need to be set up on a System Under Test:
It's possible to use the hugepage configuration already present on the SUT.
If you wish to do so, don't specify the hugepage configuration in the DTS config file.
+#. **User with administrator privileges**
+
+.. _sut_admin_user:
+
+ DTS needs administrator privileges to run DPDK applications (such as testpmd) on the SUT.
+ The SUT user must be able run commands in privileged mode without asking for password.
+ On most Linux distributions, it's a matter of setting up passwordless sudo:
+
+ #. Run ``sudo visudo`` and check that it contains ``%sudo ALL=(ALL:ALL) NOPASSWD:ALL``.
+
+ #. Add the SUT user to the sudo group with:
+
+ .. code-block:: console
+
+ sudo usermod -aG sudo <sut_user>
Running DTS
-----------
@@ -151,7 +171,8 @@ which is a template that illustrates what can be configured in DTS:
:start-at: executions:
-The user must be root or any other user with prompt starting with ``#``.
+The user must have :ref:`administrator privileges <sut_admin_user>`
+which don't require password authentication.
The other fields are mostly self-explanatory
and documented in more detail in ``dts/framework/config/conf_yaml_schema.json``.
diff --git a/dts/conf.yaml b/dts/conf.yaml
index a9bd8a3ecf..129801d87c 100644
--- a/dts/conf.yaml
+++ b/dts/conf.yaml
@@ -16,7 +16,7 @@ executions:
nodes:
- name: "SUT 1"
hostname: sut1.change.me.localhost
- user: root
+ user: dtsuser
arch: x86_64
os: linux
lcores: ""
diff --git a/dts/framework/exception.py b/dts/framework/exception.py
index ca353d98fc..44ff4e979a 100644
--- a/dts/framework/exception.py
+++ b/dts/framework/exception.py
@@ -62,13 +62,19 @@ class SSHConnectionError(DTSError):
"""
host: str
+ errors: list[str]
severity: ClassVar[ErrorSeverity] = ErrorSeverity.SSH_ERR
- def __init__(self, host: str):
+ def __init__(self, host: str, errors: list[str] | None = None):
self.host = host
+ self.errors = [] if errors is None else errors
def __str__(self) -> str:
- return f"Error trying to connect with {self.host}"
+ message = f"Error trying to connect with {self.host}."
+ if self.errors:
+ message += f" Errors encountered while retrying: {', '.join(self.errors)}"
+
+ return message
class SSHSessionDeadError(DTSError):
diff --git a/dts/framework/remote_session/linux_session.py b/dts/framework/remote_session/linux_session.py
index a1e3bc3a92..f13f399121 100644
--- a/dts/framework/remote_session/linux_session.py
+++ b/dts/framework/remote_session/linux_session.py
@@ -14,10 +14,11 @@ class LinuxSession(PosixSession):
The implementation of non-Posix compliant parts of Linux remote sessions.
"""
+ def _get_privileged_command(self, command: str) -> str:
+ return f"sudo -- sh -c '{command}'"
+
def get_remote_cpus(self, use_first_core: bool) -> list[LogicalCore]:
- cpu_info = self.remote_session.send_command(
- "lscpu -p=CPU,CORE,SOCKET,NODE|grep -v \\#"
- ).stdout
+ cpu_info = self.send_command("lscpu -p=CPU,CORE,SOCKET,NODE|grep -v \\#").stdout
lcores = []
for cpu_line in cpu_info.splitlines():
lcore, core, socket, node = map(int, cpu_line.split(","))
@@ -45,20 +46,20 @@ def setup_hugepages(self, hugepage_amount: int, force_first_numa: bool) -> None:
self._mount_huge_pages()
def _get_hugepage_size(self) -> int:
- hugepage_size = self.remote_session.send_command(
+ hugepage_size = self.send_command(
"awk '/Hugepagesize/ {print $2}' /proc/meminfo"
).stdout
return int(hugepage_size)
def _get_hugepages_total(self) -> int:
- hugepages_total = self.remote_session.send_command(
+ hugepages_total = self.send_command(
"awk '/HugePages_Total/ { print $2 }' /proc/meminfo"
).stdout
return int(hugepages_total)
def _get_numa_nodes(self) -> list[int]:
try:
- numa_count = self.remote_session.send_command(
+ numa_count = self.send_command(
"cat /sys/devices/system/node/online", verify=True
).stdout
numa_range = expand_range(numa_count)
@@ -70,14 +71,12 @@ def _get_numa_nodes(self) -> list[int]:
def _mount_huge_pages(self) -> None:
self._logger.info("Re-mounting Hugepages.")
hugapge_fs_cmd = "awk '/hugetlbfs/ { print $2 }' /proc/mounts"
- self.remote_session.send_command(f"umount $({hugapge_fs_cmd})")
- result = self.remote_session.send_command(hugapge_fs_cmd)
+ self.send_command(f"umount $({hugapge_fs_cmd})")
+ result = self.send_command(hugapge_fs_cmd)
if result.stdout == "":
remote_mount_path = "/mnt/huge"
- self.remote_session.send_command(f"mkdir -p {remote_mount_path}")
- self.remote_session.send_command(
- f"mount -t hugetlbfs nodev {remote_mount_path}"
- )
+ self.send_command(f"mkdir -p {remote_mount_path}")
+ self.send_command(f"mount -t hugetlbfs nodev {remote_mount_path}")
def _supports_numa(self) -> bool:
# the system supports numa if self._numa_nodes is non-empty and there are more
@@ -94,14 +93,12 @@ def _configure_huge_pages(
)
if force_first_numa and self._supports_numa():
# clear non-numa hugepages
- self.remote_session.send_command(
- f"echo 0 | sudo tee {hugepage_config_path}"
- )
+ self.send_command(f"echo 0 | tee {hugepage_config_path}", privileged=True)
hugepage_config_path = (
f"/sys/devices/system/node/node{self._numa_nodes[0]}/hugepages"
f"/hugepages-{size}kB/nr_hugepages"
)
- self.remote_session.send_command(
- f"echo {amount} | sudo tee {hugepage_config_path}"
+ self.send_command(
+ f"echo {amount} | tee {hugepage_config_path}", privileged=True
)
diff --git a/dts/framework/remote_session/os_session.py b/dts/framework/remote_session/os_session.py
index 4c48ae2567..bfd70bd480 100644
--- a/dts/framework/remote_session/os_session.py
+++ b/dts/framework/remote_session/os_session.py
@@ -10,7 +10,7 @@
from framework.logger import DTSLOG
from framework.settings import SETTINGS
from framework.testbed_model import LogicalCore
-from framework.utils import EnvVarsDict, MesonArgs
+from framework.utils import MesonArgs
from .remote import CommandResult, RemoteSession, create_remote_session
@@ -53,17 +53,32 @@ def is_alive(self) -> bool:
def send_command(
self,
command: str,
- timeout: float,
+ timeout: float = SETTINGS.timeout,
+ privileged: bool = False,
verify: bool = False,
- env: EnvVarsDict | None = None,
+ env: dict | None = None,
) -> CommandResult:
"""
An all-purpose API in case the command to be executed is already
OS-agnostic, such as when the path to the executed command has been
constructed beforehand.
"""
+ if privileged:
+ command = self._get_privileged_command(command)
+
return self.remote_session.send_command(command, timeout, verify, env)
+ @abstractmethod
+ def _get_privileged_command(self, command: str) -> str:
+ """Modify the command so that it executes with administrative privileges.
+
+ Args:
+ command: The command to modify.
+
+ Returns:
+ The modified command that executes with administrative privileges.
+ """
+
@abstractmethod
def guess_dpdk_remote_dir(self, remote_dir) -> PurePath:
"""
@@ -90,17 +105,35 @@ def join_remote_path(self, *args: str | PurePath) -> PurePath:
"""
@abstractmethod
- def copy_file(
+ def copy_from(
self,
source_file: str | PurePath,
destination_file: str | PurePath,
- source_remote: bool = False,
) -> None:
+ """Copy a file from the remote Node to the local filesystem.
+
+ Copy source_file from the remote Node associated with this remote
+ session to destination_file on the local filesystem.
+
+ Args:
+ source_file: the file on the remote Node.
+ destination_file: a file or directory path on the local filesystem.
"""
+
+ @abstractmethod
+ def copy_to(
+ self,
+ source_file: str | PurePath,
+ destination_file: str | PurePath,
+ ) -> None:
+ """Copy a file from local filesystem to the remote Node.
+
Copy source_file from local filesystem to destination_file
- on the remote Node associated with the remote session.
- If source_remote is True, reverse the direction - copy source_file from the
- associated remote Node to destination_file on local storage.
+ on the remote Node associated with this remote session.
+
+ Args:
+ source_file: the file on the local filesystem.
+ destination_file: a file or directory path on the remote Node.
"""
@abstractmethod
@@ -128,7 +161,7 @@ def extract_remote_tarball(
@abstractmethod
def build_dpdk(
self,
- env_vars: EnvVarsDict,
+ env_vars: dict,
meson_args: MesonArgs,
remote_dpdk_dir: str | PurePath,
remote_dpdk_build_dir: str | PurePath,
diff --git a/dts/framework/remote_session/posix_session.py b/dts/framework/remote_session/posix_session.py
index d38062e8d6..8ca0acb429 100644
--- a/dts/framework/remote_session/posix_session.py
+++ b/dts/framework/remote_session/posix_session.py
@@ -9,7 +9,7 @@
from framework.config import Architecture
from framework.exception import DPDKBuildError, RemoteCommandExecutionError
from framework.settings import SETTINGS
-from framework.utils import EnvVarsDict, MesonArgs
+from framework.utils import MesonArgs
from .os_session import OSSession
@@ -34,7 +34,7 @@ def combine_short_options(**opts: bool) -> str:
def guess_dpdk_remote_dir(self, remote_dir) -> PurePosixPath:
remote_guess = self.join_remote_path(remote_dir, "dpdk-*")
- result = self.remote_session.send_command(f"ls -d {remote_guess} | tail -1")
+ result = self.send_command(f"ls -d {remote_guess} | tail -1")
return PurePosixPath(result.stdout)
def get_remote_tmp_dir(self) -> PurePosixPath:
@@ -48,7 +48,7 @@ def get_dpdk_build_env_vars(self, arch: Architecture) -> dict:
env_vars = {}
if arch == Architecture.i686:
# find the pkg-config path and store it in PKG_CONFIG_LIBDIR
- out = self.remote_session.send_command("find /usr -type d -name pkgconfig")
+ out = self.send_command("find /usr -type d -name pkgconfig")
pkg_path = ""
res_path = out.stdout.split("\r\n")
for cur_path in res_path:
@@ -65,13 +65,19 @@ def get_dpdk_build_env_vars(self, arch: Architecture) -> dict:
def join_remote_path(self, *args: str | PurePath) -> PurePosixPath:
return PurePosixPath(*args)
- def copy_file(
+ def copy_from(
self,
source_file: str | PurePath,
destination_file: str | PurePath,
- source_remote: bool = False,
) -> None:
- self.remote_session.copy_file(source_file, destination_file, source_remote)
+ self.remote_session.copy_from(source_file, destination_file)
+
+ def copy_to(
+ self,
+ source_file: str | PurePath,
+ destination_file: str | PurePath,
+ ) -> None:
+ self.remote_session.copy_to(source_file, destination_file)
def remove_remote_dir(
self,
@@ -80,24 +86,24 @@ def remove_remote_dir(
force: bool = True,
) -> None:
opts = PosixSession.combine_short_options(r=recursive, f=force)
- self.remote_session.send_command(f"rm{opts} {remote_dir_path}")
+ self.send_command(f"rm{opts} {remote_dir_path}")
def extract_remote_tarball(
self,
remote_tarball_path: str | PurePath,
expected_dir: str | PurePath | None = None,
) -> None:
- self.remote_session.send_command(
+ self.send_command(
f"tar xfm {remote_tarball_path} "
f"-C {PurePosixPath(remote_tarball_path).parent}",
60,
)
if expected_dir:
- self.remote_session.send_command(f"ls {expected_dir}", verify=True)
+ self.send_command(f"ls {expected_dir}", verify=True)
def build_dpdk(
self,
- env_vars: EnvVarsDict,
+ env_vars: dict,
meson_args: MesonArgs,
remote_dpdk_dir: str | PurePath,
remote_dpdk_build_dir: str | PurePath,
@@ -108,7 +114,7 @@ def build_dpdk(
if rebuild:
# reconfigure, then build
self._logger.info("Reconfiguring DPDK build.")
- self.remote_session.send_command(
+ self.send_command(
f"meson configure {meson_args} {remote_dpdk_build_dir}",
timeout,
verify=True,
@@ -118,7 +124,7 @@ def build_dpdk(
# fresh build - remove target dir first, then build from scratch
self._logger.info("Configuring DPDK build from scratch.")
self.remove_remote_dir(remote_dpdk_build_dir)
- self.remote_session.send_command(
+ self.send_command(
f"meson setup "
f"{meson_args} {remote_dpdk_dir} {remote_dpdk_build_dir}",
timeout,
@@ -127,14 +133,14 @@ def build_dpdk(
)
self._logger.info("Building DPDK.")
- self.remote_session.send_command(
+ self.send_command(
f"ninja -C {remote_dpdk_build_dir}", timeout, verify=True, env=env_vars
)
except RemoteCommandExecutionError as e:
raise DPDKBuildError(f"DPDK build failed when doing '{e.command}'.")
def get_dpdk_version(self, build_dir: str | PurePath) -> str:
- out = self.remote_session.send_command(
+ out = self.send_command(
f"cat {self.join_remote_path(build_dir, 'VERSION')}", verify=True
)
return out.stdout
@@ -146,7 +152,7 @@ def kill_cleanup_dpdk_apps(self, dpdk_prefix_list: Iterable[str]) -> None:
# kill and cleanup only if DPDK is running
dpdk_pids = self._get_dpdk_pids(dpdk_runtime_dirs)
for dpdk_pid in dpdk_pids:
- self.remote_session.send_command(f"kill -9 {dpdk_pid}", 20)
+ self.send_command(f"kill -9 {dpdk_pid}", 20)
self._check_dpdk_hugepages(dpdk_runtime_dirs)
self._remove_dpdk_runtime_dirs(dpdk_runtime_dirs)
@@ -168,7 +174,7 @@ def _list_remote_dirs(self, remote_path: str | PurePath) -> list[str] | None:
Return a list of directories of the remote_dir.
If remote_path doesn't exist, return None.
"""
- out = self.remote_session.send_command(
+ out = self.send_command(
f"ls -l {remote_path} | awk '/^d/ {{print $NF}}'"
).stdout
if "No such file or directory" in out:
@@ -182,9 +188,7 @@ def _get_dpdk_pids(self, dpdk_runtime_dirs: Iterable[str | PurePath]) -> list[in
for dpdk_runtime_dir in dpdk_runtime_dirs:
dpdk_config_file = PurePosixPath(dpdk_runtime_dir, "config")
if self._remote_files_exists(dpdk_config_file):
- out = self.remote_session.send_command(
- f"lsof -Fp {dpdk_config_file}"
- ).stdout
+ out = self.send_command(f"lsof -Fp {dpdk_config_file}").stdout
if out and "No such file or directory" not in out:
for out_line in out.splitlines():
match = re.match(pid_regex, out_line)
@@ -193,7 +197,7 @@ def _get_dpdk_pids(self, dpdk_runtime_dirs: Iterable[str | PurePath]) -> list[in
return pids
def _remote_files_exists(self, remote_path: PurePath) -> bool:
- result = self.remote_session.send_command(f"test -e {remote_path}")
+ result = self.send_command(f"test -e {remote_path}")
return not result.return_code
def _check_dpdk_hugepages(
@@ -202,9 +206,7 @@ def _check_dpdk_hugepages(
for dpdk_runtime_dir in dpdk_runtime_dirs:
hugepage_info = PurePosixPath(dpdk_runtime_dir, "hugepage_info")
if self._remote_files_exists(hugepage_info):
- out = self.remote_session.send_command(
- f"lsof -Fp {hugepage_info}"
- ).stdout
+ out = self.send_command(f"lsof -Fp {hugepage_info}").stdout
if out and "No such file or directory" not in out:
self._logger.warning("Some DPDK processes did not free hugepages.")
self._logger.warning("*******************************************")
diff --git a/dts/framework/remote_session/remote/remote_session.py b/dts/framework/remote_session/remote/remote_session.py
index 91dee3cb4f..0647d93de4 100644
--- a/dts/framework/remote_session/remote/remote_session.py
+++ b/dts/framework/remote_session/remote/remote_session.py
@@ -11,7 +11,6 @@
from framework.exception import RemoteCommandExecutionError
from framework.logger import DTSLOG
from framework.settings import SETTINGS
-from framework.utils import EnvVarsDict
@dataclasses.dataclass(slots=True, frozen=True)
@@ -89,7 +88,7 @@ def send_command(
command: str,
timeout: float = SETTINGS.timeout,
verify: bool = False,
- env: EnvVarsDict | None = None,
+ env: dict | None = None,
) -> CommandResult:
"""
Send a command to the connected node using optional env vars
@@ -114,7 +113,7 @@ def send_command(
@abstractmethod
def _send_command(
- self, command: str, timeout: float, env: EnvVarsDict | None
+ self, command: str, timeout: float, env: dict | None
) -> CommandResult:
"""
Use the underlying protocol to execute the command using optional env vars
@@ -141,15 +140,33 @@ def is_alive(self) -> bool:
"""
@abstractmethod
- def copy_file(
+ def copy_from(
self,
source_file: str | PurePath,
destination_file: str | PurePath,
- source_remote: bool = False,
) -> None:
+ """Copy a file from the remote Node to the local filesystem.
+
+ Copy source_file from the remote Node associated with this remote
+ session to destination_file on the local filesystem.
+
+ Args:
+ source_file: the file on the remote Node.
+ destination_file: a file or directory path on the local filesystem.
"""
- Copy source_file from local filesystem to destination_file on the remote Node
- associated with the remote session.
- If source_remote is True, reverse the direction - copy source_file from the
- associated Node to destination_file on local filesystem.
+
+ @abstractmethod
+ def copy_to(
+ self,
+ source_file: str | PurePath,
+ destination_file: str | PurePath,
+ ) -> None:
+ """Copy a file from local filesystem to the remote Node.
+
+ Copy source_file from local filesystem to destination_file
+ on the remote Node associated with this remote session.
+
+ Args:
+ source_file: the file on the local filesystem.
+ destination_file: a file or directory path on the remote Node.
"""
diff --git a/dts/framework/remote_session/remote/ssh_session.py b/dts/framework/remote_session/remote/ssh_session.py
index 42ff9498a2..8d127f1601 100644
--- a/dts/framework/remote_session/remote/ssh_session.py
+++ b/dts/framework/remote_session/remote/ssh_session.py
@@ -1,29 +1,49 @@
# SPDX-License-Identifier: BSD-3-Clause
-# Copyright(c) 2010-2014 Intel Corporation
-# Copyright(c) 2022-2023 PANTHEON.tech s.r.o.
-# Copyright(c) 2022-2023 University of New Hampshire
+# Copyright(c) 2023 PANTHEON.tech s.r.o.
-import time
+import socket
+import traceback
from pathlib import PurePath
-import pexpect # type: ignore
-from pexpect import pxssh # type: ignore
+from fabric import Connection # type: ignore[import]
+from invoke.exceptions import ( # type: ignore[import]
+ CommandTimedOut,
+ ThreadException,
+ UnexpectedExit,
+)
+from paramiko.ssh_exception import ( # type: ignore[import]
+ AuthenticationException,
+ BadHostKeyException,
+ NoValidConnectionsError,
+ SSHException,
+)
from framework.config import NodeConfiguration
from framework.exception import SSHConnectionError, SSHSessionDeadError, SSHTimeoutError
from framework.logger import DTSLOG
-from framework.utils import GREEN, RED, EnvVarsDict
from .remote_session import CommandResult, RemoteSession
class SSHSession(RemoteSession):
- """
- Module for creating Pexpect SSH remote sessions.
+ """A persistent SSH connection to a remote Node.
+
+ The connection is implemented with the Fabric Python library.
+
+ Args:
+ node_config: The configuration of the Node to connect to.
+ session_name: The name of the session.
+ logger: The logger used for logging.
+ This should be passed from the parent OSSession.
+
+ Attributes:
+ session: The underlying Fabric SSH connection.
+
+ Raises:
+ SSHConnectionError: The connection cannot be established.
"""
- session: pxssh.pxssh
- magic_prompt: str
+ session: Connection
def __init__(
self,
@@ -31,218 +51,91 @@ def __init__(
session_name: str,
logger: DTSLOG,
):
- self.magic_prompt = "MAGIC PROMPT"
super(SSHSession, self).__init__(node_config, session_name, logger)
def _connect(self) -> None:
- """
- Create connection to assigned node.
- """
+ errors = []
retry_attempts = 10
login_timeout = 20 if self.port else 10
- password_regex = (
- r"(?i)(?:password:)|(?:passphrase for key)|(?i)(password for .+:)"
- )
- try:
- for retry_attempt in range(retry_attempts):
- self.session = pxssh.pxssh(encoding="utf-8")
- try:
- self.session.login(
- self.ip,
- self.username,
- self.password,
- original_prompt="[$#>]",
- port=self.port,
- login_timeout=login_timeout,
- password_regex=password_regex,
- )
- break
- except Exception as e:
- self._logger.warning(e)
- time.sleep(2)
- self._logger.info(
- f"Retrying connection: retry number {retry_attempt + 1}."
- )
- else:
- raise Exception(f"Connection to {self.hostname} failed")
-
- self.send_expect("stty -echo", "#")
- self.send_expect("stty columns 1000", "#")
- self.send_expect("bind 'set enable-bracketed-paste off'", "#")
- except Exception as e:
- self._logger.error(RED(str(e)))
- if getattr(self, "port", None):
- suggestion = (
- f"\nSuggestion: Check if the firewall on {self.hostname} is "
- f"stopped.\n"
+ for retry_attempt in range(retry_attempts):
+ try:
+ self.session = Connection(
+ self.ip,
+ user=self.username,
+ port=self.port,
+ connect_kwargs={"password": self.password},
+ connect_timeout=login_timeout,
)
- self._logger.info(GREEN(suggestion))
-
- raise SSHConnectionError(self.hostname)
+ self.session.open()
- def send_expect(
- self, command: str, prompt: str, timeout: float = 15, verify: bool = False
- ) -> str | int:
- try:
- ret = self.send_expect_base(command, prompt, timeout)
- if verify:
- ret_status = self.send_expect_base("echo $?", prompt, timeout)
- try:
- retval = int(ret_status)
- if retval:
- self._logger.error(f"Command: {command} failure!")
- self._logger.error(ret)
- return retval
- else:
- return ret
- except ValueError:
- return ret
- else:
- return ret
- except Exception as e:
- self._logger.error(
- f"Exception happened in [{command}] and output is "
- f"[{self._get_output()}]"
- )
- raise e
-
- def send_expect_base(self, command: str, prompt: str, timeout: float) -> str:
- self._clean_session()
- original_prompt = self.session.PROMPT
- self.session.PROMPT = prompt
- self._send_line(command)
- self._prompt(command, timeout)
-
- before = self._get_output()
- self.session.PROMPT = original_prompt
- return before
-
- def _clean_session(self) -> None:
- self.session.PROMPT = self.magic_prompt
- self.get_output(timeout=0.01)
- self.session.PROMPT = self.session.UNIQUE_PROMPT
-
- def _send_line(self, command: str) -> None:
- if not self.is_alive():
- raise SSHSessionDeadError(self.hostname)
- if len(command) == 2 and command.startswith("^"):
- self.session.sendcontrol(command[1])
- else:
- self.session.sendline(command)
+ except (ValueError, BadHostKeyException, AuthenticationException) as e:
+ self._logger.exception(e)
+ raise SSHConnectionError(self.hostname) from e
- def _prompt(self, command: str, timeout: float) -> None:
- if not self.session.prompt(timeout):
- raise SSHTimeoutError(command, self._get_output()) from None
+ except (NoValidConnectionsError, socket.error, SSHException) as e:
+ self._logger.debug(traceback.format_exc())
+ self._logger.warning(e)
- def get_output(self, timeout: float = 15) -> str:
- """
- Get all output before timeout
- """
- try:
- self.session.prompt(timeout)
- except Exception:
- pass
-
- before = self._get_output()
- self._flush()
-
- return before
+ error = repr(e)
+ if error not in errors:
+ errors.append(error)
- def _get_output(self) -> str:
- if not self.is_alive():
- raise SSHSessionDeadError(self.hostname)
- before = self.session.before.rsplit("\r\n", 1)[0]
- if before == "[PEXPECT]":
- return ""
- return before
+ self._logger.info(
+ f"Retrying connection: retry number {retry_attempt + 1}."
+ )
- def _flush(self) -> None:
- """
- Clear all session buffer
- """
- self.session.buffer = ""
- self.session.before = ""
+ else:
+ break
+ else:
+ raise SSHConnectionError(self.hostname, errors)
def is_alive(self) -> bool:
- return self.session.isalive()
+ return self.session.is_connected
def _send_command(
- self, command: str, timeout: float, env: EnvVarsDict | None
+ self, command: str, timeout: float, env: dict | None
) -> CommandResult:
- output = self._send_command_get_output(command, timeout, env)
- return_code = int(self._send_command_get_output("echo $?", timeout, None))
+ """Send a command and return the result of the execution.
- # we're capturing only stdout
- return CommandResult(self.name, command, output, "", return_code)
+ Args:
+ command: The command to execute.
+ timeout: Wait at most this many seconds for the execution to complete.
+ env: Extra environment variables that will be used in command execution.
- def _send_command_get_output(
- self, command: str, timeout: float, env: EnvVarsDict | None
- ) -> str:
+ Raises:
+ SSHSessionDeadError: The session died while executing the command.
+ SSHTimeoutError: The command execution timed out.
+ """
try:
- self._clean_session()
- if env:
- command = f"{env} {command}"
- self._send_line(command)
- except Exception as e:
- raise e
+ output = self.session.run(
+ command, env=env, warn=True, hide=True, timeout=timeout
+ )
- output = self.get_output(timeout=timeout)
- self.session.PROMPT = self.session.UNIQUE_PROMPT
- self.session.prompt(0.1)
+ except (UnexpectedExit, ThreadException) as e:
+ self._logger.exception(e)
+ raise SSHSessionDeadError(self.hostname) from e
- return output
+ except CommandTimedOut as e:
+ self._logger.exception(e)
+ raise SSHTimeoutError(command, e.result.stderr) from e
- def _close(self, force: bool = False) -> None:
- if force is True:
- self.session.close()
- else:
- if self.is_alive():
- self.session.logout()
+ return CommandResult(
+ self.name, command, output.stdout, output.stderr, output.return_code
+ )
- def copy_file(
+ def copy_from(
self,
source_file: str | PurePath,
destination_file: str | PurePath,
- source_remote: bool = False,
) -> None:
- """
- Send a local file to a remote host.
- """
- if source_remote:
- source_file = f"{self.username}@{self.ip}:{source_file}"
- else:
- destination_file = f"{self.username}@{self.ip}:{destination_file}"
+ self.session.get(str(destination_file), str(source_file))
- port = ""
- if self.port:
- port = f" -P {self.port}"
-
- command = (
- f"scp -v{port} -o NoHostAuthenticationForLocalhost=yes"
- f" {source_file} {destination_file}"
- )
-
- self._spawn_scp(command)
+ def copy_to(
+ self,
+ source_file: str | PurePath,
+ destination_file: str | PurePath,
+ ) -> None:
+ self.session.put(str(source_file), str(destination_file))
- def _spawn_scp(self, scp_cmd: str) -> None:
- """
- Transfer a file with SCP
- """
- self._logger.info(scp_cmd)
- p: pexpect.spawn = pexpect.spawn(scp_cmd)
- time.sleep(0.5)
- ssh_newkey: str = "Are you sure you want to continue connecting"
- i: int = p.expect(
- [ssh_newkey, "[pP]assword", "# ", pexpect.EOF, pexpect.TIMEOUT], 120
- )
- if i == 0: # add once in trust list
- p.sendline("yes")
- i = p.expect([ssh_newkey, "[pP]assword", pexpect.EOF], 2)
-
- if i == 1:
- time.sleep(0.5)
- p.sendline(self.password)
- p.expect("Exit status 0", 60)
- if i == 4:
- self._logger.error("SCP TIMEOUT error %d" % i)
- p.close()
+ def _close(self, force: bool = False) -> None:
+ self.session.close()
diff --git a/dts/framework/testbed_model/sut_node.py b/dts/framework/testbed_model/sut_node.py
index 2b2b50d982..9dbc390848 100644
--- a/dts/framework/testbed_model/sut_node.py
+++ b/dts/framework/testbed_model/sut_node.py
@@ -10,7 +10,7 @@
from framework.config import BuildTargetConfiguration, NodeConfiguration
from framework.remote_session import CommandResult, OSSession
from framework.settings import SETTINGS
-from framework.utils import EnvVarsDict, MesonArgs
+from framework.utils import MesonArgs
from .hw import LogicalCoreCount, LogicalCoreList, VirtualDevice
from .node import Node
@@ -27,7 +27,7 @@ class SutNode(Node):
_dpdk_prefix_list: list[str]
_dpdk_timestamp: str
_build_target_config: BuildTargetConfiguration | None
- _env_vars: EnvVarsDict
+ _env_vars: dict
_remote_tmp_dir: PurePath
__remote_dpdk_dir: PurePath | None
_dpdk_version: str | None
@@ -38,7 +38,7 @@ def __init__(self, node_config: NodeConfiguration):
super(SutNode, self).__init__(node_config)
self._dpdk_prefix_list = []
self._build_target_config = None
- self._env_vars = EnvVarsDict()
+ self._env_vars = {}
self._remote_tmp_dir = self.main_session.get_remote_tmp_dir()
self.__remote_dpdk_dir = None
self._dpdk_version = None
@@ -94,7 +94,7 @@ def _configure_build_target(
"""
Populate common environment variables and set build target config.
"""
- self._env_vars = EnvVarsDict()
+ self._env_vars = {}
self._build_target_config = build_target_config
self._env_vars.update(
self.main_session.get_dpdk_build_env_vars(build_target_config.arch)
@@ -112,7 +112,7 @@ def _copy_dpdk_tarball(self) -> None:
Copy to and extract DPDK tarball on the SUT node.
"""
self._logger.info("Copying DPDK tarball to SUT.")
- self.main_session.copy_file(SETTINGS.dpdk_tarball_path, self._remote_tmp_dir)
+ self.main_session.copy_to(SETTINGS.dpdk_tarball_path, self._remote_tmp_dir)
# construct remote tarball path
# the basename is the same on local host and on remote Node
@@ -259,7 +259,7 @@ def run_dpdk_app(
Run DPDK application on the remote node.
"""
return self.main_session.send_command(
- f"{app_path} {eal_args}", timeout, verify=True
+ f"{app_path} {eal_args}", timeout, privileged=True, verify=True
)
diff --git a/dts/framework/utils.py b/dts/framework/utils.py
index 55e0b0ef0e..8cfbc6a29d 100644
--- a/dts/framework/utils.py
+++ b/dts/framework/utils.py
@@ -42,19 +42,10 @@ def expand_range(range_str: str) -> list[int]:
return expanded_range
-def GREEN(text: str) -> str:
- return f"\u001B[32;1m{str(text)}\u001B[0m"
-
-
def RED(text: str) -> str:
return f"\u001B[31;1m{str(text)}\u001B[0m"
-class EnvVarsDict(dict):
- def __str__(self) -> str:
- return " ".join(["=".join(item) for item in self.items()])
-
-
class MesonArgs(object):
"""
Aggregate the arguments needed to build DPDK:
diff --git a/dts/poetry.lock b/dts/poetry.lock
index 0b2a007d4d..2438f337cd 100644
--- a/dts/poetry.lock
+++ b/dts/poetry.lock
@@ -12,6 +12,18 @@ docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"]
+[[package]]
+name = "bcrypt"
+version = "4.0.1"
+description = "Modern password hashing for your software and your servers"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[package.extras]
+tests = ["pytest (>=3.2.1,!=3.3.0)"]
+typecheck = ["mypy"]
+
[[package]]
name = "black"
version = "22.10.0"
@@ -33,6 +45,17 @@ d = ["aiohttp (>=3.7.4)"]
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
uvloop = ["uvloop (>=0.15.2)"]
+[[package]]
+name = "cffi"
+version = "1.15.1"
+description = "Foreign Function Interface for Python calling C code."
+category = "main"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+pycparser = "*"
+
[[package]]
name = "click"
version = "8.1.3"
@@ -52,6 +75,52 @@ category = "dev"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+[[package]]
+name = "cryptography"
+version = "40.0.2"
+description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+cffi = ">=1.12"
+
+[package.extras]
+docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
+docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"]
+pep8test = ["black", "ruff", "mypy", "check-manifest"]
+sdist = ["setuptools-rust (>=0.11.4)"]
+ssh = ["bcrypt (>=3.1.5)"]
+test = ["pytest (>=6.2.0)", "pytest-shard (>=0.1.2)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601"]
+test-randomorder = ["pytest-randomly"]
+tox = ["tox"]
+
+[[package]]
+name = "fabric"
+version = "2.7.1"
+description = "High level SSH command execution"
+category = "main"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+invoke = ">=1.3,<2.0"
+paramiko = ">=2.4"
+pathlib2 = "*"
+
+[package.extras]
+pytest = ["mock (>=2.0.0,<3.0)", "pytest (>=3.2.5,<4.0)"]
+testing = ["mock (>=2.0.0,<3.0)"]
+
+[[package]]
+name = "invoke"
+version = "1.7.3"
+description = "Pythonic task execution"
+category = "main"
+optional = false
+python-versions = "*"
+
[[package]]
name = "isort"
version = "5.10.1"
@@ -136,23 +205,41 @@ optional = false
python-versions = "*"
[[package]]
-name = "pathspec"
-version = "0.10.1"
-description = "Utility library for gitignore style pattern matching of file paths."
-category = "dev"
+name = "paramiko"
+version = "3.1.0"
+description = "SSH2 protocol library"
+category = "main"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.6"
+
+[package.dependencies]
+bcrypt = ">=3.2"
+cryptography = ">=3.3"
+pynacl = ">=1.5"
+
+[package.extras]
+all = ["pyasn1 (>=0.1.7)", "invoke (>=2.0)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"]
+gssapi = ["pyasn1 (>=0.1.7)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"]
+invoke = ["invoke (>=2.0)"]
[[package]]
-name = "pexpect"
-version = "4.8.0"
-description = "Pexpect allows easy control of interactive console applications."
+name = "pathlib2"
+version = "2.3.7.post1"
+description = "Object-oriented filesystem paths"
category = "main"
optional = false
python-versions = "*"
[package.dependencies]
-ptyprocess = ">=0.5"
+six = "*"
+
+[[package]]
+name = "pathspec"
+version = "0.10.1"
+description = "Utility library for gitignore style pattern matching of file paths."
+category = "dev"
+optional = false
+python-versions = ">=3.7"
[[package]]
name = "platformdirs"
@@ -166,14 +253,6 @@ python-versions = ">=3.7"
docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"]
-[[package]]
-name = "ptyprocess"
-version = "0.7.0"
-description = "Run a subprocess in a pseudo terminal"
-category = "main"
-optional = false
-python-versions = "*"
-
[[package]]
name = "pycodestyle"
version = "2.9.1"
@@ -182,6 +261,14 @@ category = "dev"
optional = false
python-versions = ">=3.6"
+[[package]]
+name = "pycparser"
+version = "2.21"
+description = "C parser in Python"
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+
[[package]]
name = "pydocstyle"
version = "6.1.1"
@@ -228,6 +315,21 @@ tests = ["pytest (>=7.1.2)", "pytest-mypy", "eradicate (>=2.0.0)", "radon (>=5.1
toml = ["toml (>=0.10.2)"]
vulture = ["vulture"]
+[[package]]
+name = "pynacl"
+version = "1.5.0"
+description = "Python binding to the Networking and Cryptography (NaCl) library"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+
+[package.dependencies]
+cffi = ">=1.4.1"
+
+[package.extras]
+docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
+tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"]
+
[[package]]
name = "pyrsistent"
version = "0.19.1"
@@ -244,6 +346,14 @@ category = "main"
optional = false
python-versions = ">=3.6"
+[[package]]
+name = "six"
+version = "1.16.0"
+description = "Python 2 and 3 compatibility utilities"
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+
[[package]]
name = "snowballstemmer"
version = "2.2.0"
@@ -299,13 +409,18 @@ jsonschema = ">=4,<5"
[metadata]
lock-version = "1.1"
python-versions = "^3.10"
-content-hash = "a0f040b07fc6ce4deb0be078b9a88c2a465cb6bccb9e260a67e92c2403e2319f"
+content-hash = "719c43bcaa5d181921debda884f8f714063df0b2336d61e9f64ecab034e8b139"
[metadata.files]
attrs = []
+bcrypt = []
black = []
+cffi = []
click = []
colorama = []
+cryptography = []
+fabric = []
+invoke = []
isort = []
jsonpatch = []
jsonpointer = []
@@ -313,22 +428,22 @@ jsonschema = []
mccabe = []
mypy = []
mypy-extensions = []
+paramiko = []
+pathlib2 = []
pathspec = []
-pexpect = [
- {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"},
- {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"},
-]
platformdirs = [
{file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
{file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
]
-ptyprocess = []
pycodestyle = []
+pycparser = []
pydocstyle = []
pyflakes = []
pylama = []
+pynacl = []
pyrsistent = []
pyyaml = []
+six = []
snowballstemmer = []
toml = []
tomli = []
diff --git a/dts/pyproject.toml b/dts/pyproject.toml
index a136c91e5e..50bcdb327a 100644
--- a/dts/pyproject.toml
+++ b/dts/pyproject.toml
@@ -9,10 +9,10 @@ authors = ["Owen Hilyard <ohilyard@iol.unh.edu>", "dts@dpdk.org"]
[tool.poetry.dependencies]
python = "^3.10"
-pexpect = "^4.8.0"
warlock = "^2.0.1"
PyYAML = "^6.0"
types-PyYAML = "^6.0.8"
+fabric = "^2.7.1"
[tool.poetry.dev-dependencies]
mypy = "^0.961"
--
2.34.1
^ permalink raw reply [flat|nested] 21+ messages in thread
* Re: [PATCH v3] dts: replace pexpect with fabric
2023-06-09 9:46 ` [PATCH v3] " Juraj Linkeš
@ 2023-06-21 18:33 ` Jeremy Spewock
2023-07-05 19:59 ` Jeremy Spewock
2023-07-12 16:34 ` Thomas Monjalon
2023-07-09 1:45 ` Patrick Robb
1 sibling, 2 replies; 21+ messages in thread
From: Jeremy Spewock @ 2023-06-21 18:33 UTC (permalink / raw)
To: Juraj Linkeš
Cc: thomas, Honnappa.Nagarahalli, lijuan.tu, wathsala.vithanage, probb, dev
[-- Attachment #1: Type: text/plain, Size: 48667 bytes --]
Acked-by: Jeremy Spewock <jspewock@iol.unh.edu>
On Fri, Jun 9, 2023 at 5:46 AM Juraj Linkeš <juraj.linkes@pantheon.tech>
wrote:
> Pexpect is not a dedicated SSH connection library while Fabric is. With
> Fabric, all SSH-related logic is provided and we can just focus on
> what's DTS specific.
>
> Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
> ---
>
> Notes:
> v3: updated passwordless sudo setup on Linux
>
> doc/guides/tools/dts.rst | 29 +-
> dts/conf.yaml | 2 +-
> dts/framework/exception.py | 10 +-
> dts/framework/remote_session/linux_session.py | 31 +-
> dts/framework/remote_session/os_session.py | 51 +++-
> dts/framework/remote_session/posix_session.py | 48 +--
> .../remote_session/remote/remote_session.py | 35 ++-
> .../remote_session/remote/ssh_session.py | 287 ++++++------------
> dts/framework/testbed_model/sut_node.py | 12 +-
> dts/framework/utils.py | 9 -
> dts/poetry.lock | 161 ++++++++--
> dts/pyproject.toml | 2 +-
> 12 files changed, 376 insertions(+), 301 deletions(-)
>
> diff --git a/doc/guides/tools/dts.rst b/doc/guides/tools/dts.rst
> index ebd6dceb6a..c7b31623e4 100644
> --- a/doc/guides/tools/dts.rst
> +++ b/doc/guides/tools/dts.rst
> @@ -95,9 +95,14 @@ Setting up DTS environment
>
> #. **SSH Connection**
>
> - DTS uses Python pexpect for SSH connections between DTS environment
> and the other hosts.
> - The pexpect implementation is a wrapper around the ssh command in the
> DTS environment.
> - This means it'll use the SSH agent providing the ssh command and its
> keys.
> + DTS uses the Fabric Python library for SSH connections between DTS
> environment
> + and the other hosts.
> + The authentication method used is pubkey authentication.
> + Fabric tries to use a passed key/certificate,
> + then any key it can with through an SSH agent,
> + then any "id_rsa", "id_dsa" or "id_ecdsa" key discoverable in
> ``~/.ssh/``
> + (with any matching OpenSSH-style certificates).
> + DTS doesn't pass any keys, so Fabric tries to use the other two
> methods.
>
>
> Setting up System Under Test
> @@ -132,6 +137,21 @@ There are two areas that need to be set up on a
> System Under Test:
> It's possible to use the hugepage configuration already present on
> the SUT.
> If you wish to do so, don't specify the hugepage configuration in
> the DTS config file.
>
> +#. **User with administrator privileges**
> +
> +.. _sut_admin_user:
> +
> + DTS needs administrator privileges to run DPDK applications (such as
> testpmd) on the SUT.
> + The SUT user must be able run commands in privileged mode without
> asking for password.
> + On most Linux distributions, it's a matter of setting up passwordless
> sudo:
> +
> + #. Run ``sudo visudo`` and check that it contains ``%sudo
> ALL=(ALL:ALL) NOPASSWD:ALL``.
> +
> + #. Add the SUT user to the sudo group with:
> +
> + .. code-block:: console
> +
> + sudo usermod -aG sudo <sut_user>
>
> Running DTS
> -----------
> @@ -151,7 +171,8 @@ which is a template that illustrates what can be
> configured in DTS:
> :start-at: executions:
>
>
> -The user must be root or any other user with prompt starting with ``#``.
> +The user must have :ref:`administrator privileges <sut_admin_user>`
> +which don't require password authentication.
> The other fields are mostly self-explanatory
> and documented in more detail in
> ``dts/framework/config/conf_yaml_schema.json``.
>
> diff --git a/dts/conf.yaml b/dts/conf.yaml
> index a9bd8a3ecf..129801d87c 100644
> --- a/dts/conf.yaml
> +++ b/dts/conf.yaml
> @@ -16,7 +16,7 @@ executions:
> nodes:
> - name: "SUT 1"
> hostname: sut1.change.me.localhost
> - user: root
> + user: dtsuser
> arch: x86_64
> os: linux
> lcores: ""
> diff --git a/dts/framework/exception.py b/dts/framework/exception.py
> index ca353d98fc..44ff4e979a 100644
> --- a/dts/framework/exception.py
> +++ b/dts/framework/exception.py
> @@ -62,13 +62,19 @@ class SSHConnectionError(DTSError):
> """
>
> host: str
> + errors: list[str]
> severity: ClassVar[ErrorSeverity] = ErrorSeverity.SSH_ERR
>
> - def __init__(self, host: str):
> + def __init__(self, host: str, errors: list[str] | None = None):
> self.host = host
> + self.errors = [] if errors is None else errors
>
> def __str__(self) -> str:
> - return f"Error trying to connect with {self.host}"
> + message = f"Error trying to connect with {self.host}."
> + if self.errors:
> + message += f" Errors encountered while retrying: {',
> '.join(self.errors)}"
> +
> + return message
>
>
> class SSHSessionDeadError(DTSError):
> diff --git a/dts/framework/remote_session/linux_session.py
> b/dts/framework/remote_session/linux_session.py
> index a1e3bc3a92..f13f399121 100644
> --- a/dts/framework/remote_session/linux_session.py
> +++ b/dts/framework/remote_session/linux_session.py
> @@ -14,10 +14,11 @@ class LinuxSession(PosixSession):
> The implementation of non-Posix compliant parts of Linux remote
> sessions.
> """
>
> + def _get_privileged_command(self, command: str) -> str:
> + return f"sudo -- sh -c '{command}'"
> +
> def get_remote_cpus(self, use_first_core: bool) -> list[LogicalCore]:
> - cpu_info = self.remote_session.send_command(
> - "lscpu -p=CPU,CORE,SOCKET,NODE|grep -v \\#"
> - ).stdout
> + cpu_info = self.send_command("lscpu -p=CPU,CORE,SOCKET,NODE|grep
> -v \\#").stdout
> lcores = []
> for cpu_line in cpu_info.splitlines():
> lcore, core, socket, node = map(int, cpu_line.split(","))
> @@ -45,20 +46,20 @@ def setup_hugepages(self, hugepage_amount: int,
> force_first_numa: bool) -> None:
> self._mount_huge_pages()
>
> def _get_hugepage_size(self) -> int:
> - hugepage_size = self.remote_session.send_command(
> + hugepage_size = self.send_command(
> "awk '/Hugepagesize/ {print $2}' /proc/meminfo"
> ).stdout
> return int(hugepage_size)
>
> def _get_hugepages_total(self) -> int:
> - hugepages_total = self.remote_session.send_command(
> + hugepages_total = self.send_command(
> "awk '/HugePages_Total/ { print $2 }' /proc/meminfo"
> ).stdout
> return int(hugepages_total)
>
> def _get_numa_nodes(self) -> list[int]:
> try:
> - numa_count = self.remote_session.send_command(
> + numa_count = self.send_command(
> "cat /sys/devices/system/node/online", verify=True
> ).stdout
> numa_range = expand_range(numa_count)
> @@ -70,14 +71,12 @@ def _get_numa_nodes(self) -> list[int]:
> def _mount_huge_pages(self) -> None:
> self._logger.info("Re-mounting Hugepages.")
> hugapge_fs_cmd = "awk '/hugetlbfs/ { print $2 }' /proc/mounts"
> - self.remote_session.send_command(f"umount $({hugapge_fs_cmd})")
> - result = self.remote_session.send_command(hugapge_fs_cmd)
> + self.send_command(f"umount $({hugapge_fs_cmd})")
> + result = self.send_command(hugapge_fs_cmd)
> if result.stdout == "":
> remote_mount_path = "/mnt/huge"
> - self.remote_session.send_command(f"mkdir -p
> {remote_mount_path}")
> - self.remote_session.send_command(
> - f"mount -t hugetlbfs nodev {remote_mount_path}"
> - )
> + self.send_command(f"mkdir -p {remote_mount_path}")
> + self.send_command(f"mount -t hugetlbfs nodev
> {remote_mount_path}")
>
> def _supports_numa(self) -> bool:
> # the system supports numa if self._numa_nodes is non-empty and
> there are more
> @@ -94,14 +93,12 @@ def _configure_huge_pages(
> )
> if force_first_numa and self._supports_numa():
> # clear non-numa hugepages
> - self.remote_session.send_command(
> - f"echo 0 | sudo tee {hugepage_config_path}"
> - )
> + self.send_command(f"echo 0 | tee {hugepage_config_path}",
> privileged=True)
> hugepage_config_path = (
>
> f"/sys/devices/system/node/node{self._numa_nodes[0]}/hugepages"
> f"/hugepages-{size}kB/nr_hugepages"
> )
>
> - self.remote_session.send_command(
> - f"echo {amount} | sudo tee {hugepage_config_path}"
> + self.send_command(
> + f"echo {amount} | tee {hugepage_config_path}", privileged=True
> )
> diff --git a/dts/framework/remote_session/os_session.py
> b/dts/framework/remote_session/os_session.py
> index 4c48ae2567..bfd70bd480 100644
> --- a/dts/framework/remote_session/os_session.py
> +++ b/dts/framework/remote_session/os_session.py
> @@ -10,7 +10,7 @@
> from framework.logger import DTSLOG
> from framework.settings import SETTINGS
> from framework.testbed_model import LogicalCore
> -from framework.utils import EnvVarsDict, MesonArgs
> +from framework.utils import MesonArgs
>
> from .remote import CommandResult, RemoteSession, create_remote_session
>
> @@ -53,17 +53,32 @@ def is_alive(self) -> bool:
> def send_command(
> self,
> command: str,
> - timeout: float,
> + timeout: float = SETTINGS.timeout,
> + privileged: bool = False,
> verify: bool = False,
> - env: EnvVarsDict | None = None,
> + env: dict | None = None,
> ) -> CommandResult:
> """
> An all-purpose API in case the command to be executed is already
> OS-agnostic, such as when the path to the executed command has
> been
> constructed beforehand.
> """
> + if privileged:
> + command = self._get_privileged_command(command)
> +
> return self.remote_session.send_command(command, timeout, verify,
> env)
>
> + @abstractmethod
> + def _get_privileged_command(self, command: str) -> str:
> + """Modify the command so that it executes with administrative
> privileges.
> +
> + Args:
> + command: The command to modify.
> +
> + Returns:
> + The modified command that executes with administrative
> privileges.
> + """
> +
> @abstractmethod
> def guess_dpdk_remote_dir(self, remote_dir) -> PurePath:
> """
> @@ -90,17 +105,35 @@ def join_remote_path(self, *args: str | PurePath) ->
> PurePath:
> """
>
> @abstractmethod
> - def copy_file(
> + def copy_from(
> self,
> source_file: str | PurePath,
> destination_file: str | PurePath,
> - source_remote: bool = False,
> ) -> None:
> + """Copy a file from the remote Node to the local filesystem.
> +
> + Copy source_file from the remote Node associated with this remote
> + session to destination_file on the local filesystem.
> +
> + Args:
> + source_file: the file on the remote Node.
> + destination_file: a file or directory path on the local
> filesystem.
> """
> +
> + @abstractmethod
> + def copy_to(
> + self,
> + source_file: str | PurePath,
> + destination_file: str | PurePath,
> + ) -> None:
> + """Copy a file from local filesystem to the remote Node.
> +
> Copy source_file from local filesystem to destination_file
> - on the remote Node associated with the remote session.
> - If source_remote is True, reverse the direction - copy
> source_file from the
> - associated remote Node to destination_file on local storage.
> + on the remote Node associated with this remote session.
> +
> + Args:
> + source_file: the file on the local filesystem.
> + destination_file: a file or directory path on the remote Node.
> """
>
> @abstractmethod
> @@ -128,7 +161,7 @@ def extract_remote_tarball(
> @abstractmethod
> def build_dpdk(
> self,
> - env_vars: EnvVarsDict,
> + env_vars: dict,
> meson_args: MesonArgs,
> remote_dpdk_dir: str | PurePath,
> remote_dpdk_build_dir: str | PurePath,
> diff --git a/dts/framework/remote_session/posix_session.py
> b/dts/framework/remote_session/posix_session.py
> index d38062e8d6..8ca0acb429 100644
> --- a/dts/framework/remote_session/posix_session.py
> +++ b/dts/framework/remote_session/posix_session.py
> @@ -9,7 +9,7 @@
> from framework.config import Architecture
> from framework.exception import DPDKBuildError,
> RemoteCommandExecutionError
> from framework.settings import SETTINGS
> -from framework.utils import EnvVarsDict, MesonArgs
> +from framework.utils import MesonArgs
>
> from .os_session import OSSession
>
> @@ -34,7 +34,7 @@ def combine_short_options(**opts: bool) -> str:
>
> def guess_dpdk_remote_dir(self, remote_dir) -> PurePosixPath:
> remote_guess = self.join_remote_path(remote_dir, "dpdk-*")
> - result = self.remote_session.send_command(f"ls -d {remote_guess}
> | tail -1")
> + result = self.send_command(f"ls -d {remote_guess} | tail -1")
> return PurePosixPath(result.stdout)
>
> def get_remote_tmp_dir(self) -> PurePosixPath:
> @@ -48,7 +48,7 @@ def get_dpdk_build_env_vars(self, arch: Architecture) ->
> dict:
> env_vars = {}
> if arch == Architecture.i686:
> # find the pkg-config path and store it in PKG_CONFIG_LIBDIR
> - out = self.remote_session.send_command("find /usr -type d
> -name pkgconfig")
> + out = self.send_command("find /usr -type d -name pkgconfig")
> pkg_path = ""
> res_path = out.stdout.split("\r\n")
> for cur_path in res_path:
> @@ -65,13 +65,19 @@ def get_dpdk_build_env_vars(self, arch: Architecture)
> -> dict:
> def join_remote_path(self, *args: str | PurePath) -> PurePosixPath:
> return PurePosixPath(*args)
>
> - def copy_file(
> + def copy_from(
> self,
> source_file: str | PurePath,
> destination_file: str | PurePath,
> - source_remote: bool = False,
> ) -> None:
> - self.remote_session.copy_file(source_file, destination_file,
> source_remote)
> + self.remote_session.copy_from(source_file, destination_file)
> +
> + def copy_to(
> + self,
> + source_file: str | PurePath,
> + destination_file: str | PurePath,
> + ) -> None:
> + self.remote_session.copy_to(source_file, destination_file)
>
> def remove_remote_dir(
> self,
> @@ -80,24 +86,24 @@ def remove_remote_dir(
> force: bool = True,
> ) -> None:
> opts = PosixSession.combine_short_options(r=recursive, f=force)
> - self.remote_session.send_command(f"rm{opts} {remote_dir_path}")
> + self.send_command(f"rm{opts} {remote_dir_path}")
>
> def extract_remote_tarball(
> self,
> remote_tarball_path: str | PurePath,
> expected_dir: str | PurePath | None = None,
> ) -> None:
> - self.remote_session.send_command(
> + self.send_command(
> f"tar xfm {remote_tarball_path} "
> f"-C {PurePosixPath(remote_tarball_path).parent}",
> 60,
> )
> if expected_dir:
> - self.remote_session.send_command(f"ls {expected_dir}",
> verify=True)
> + self.send_command(f"ls {expected_dir}", verify=True)
>
> def build_dpdk(
> self,
> - env_vars: EnvVarsDict,
> + env_vars: dict,
> meson_args: MesonArgs,
> remote_dpdk_dir: str | PurePath,
> remote_dpdk_build_dir: str | PurePath,
> @@ -108,7 +114,7 @@ def build_dpdk(
> if rebuild:
> # reconfigure, then build
> self._logger.info("Reconfiguring DPDK build.")
> - self.remote_session.send_command(
> + self.send_command(
> f"meson configure {meson_args}
> {remote_dpdk_build_dir}",
> timeout,
> verify=True,
> @@ -118,7 +124,7 @@ def build_dpdk(
> # fresh build - remove target dir first, then build from
> scratch
> self._logger.info("Configuring DPDK build from scratch.")
> self.remove_remote_dir(remote_dpdk_build_dir)
> - self.remote_session.send_command(
> + self.send_command(
> f"meson setup "
> f"{meson_args} {remote_dpdk_dir}
> {remote_dpdk_build_dir}",
> timeout,
> @@ -127,14 +133,14 @@ def build_dpdk(
> )
>
> self._logger.info("Building DPDK.")
> - self.remote_session.send_command(
> + self.send_command(
> f"ninja -C {remote_dpdk_build_dir}", timeout,
> verify=True, env=env_vars
> )
> except RemoteCommandExecutionError as e:
> raise DPDKBuildError(f"DPDK build failed when doing
> '{e.command}'.")
>
> def get_dpdk_version(self, build_dir: str | PurePath) -> str:
> - out = self.remote_session.send_command(
> + out = self.send_command(
> f"cat {self.join_remote_path(build_dir, 'VERSION')}",
> verify=True
> )
> return out.stdout
> @@ -146,7 +152,7 @@ def kill_cleanup_dpdk_apps(self, dpdk_prefix_list:
> Iterable[str]) -> None:
> # kill and cleanup only if DPDK is running
> dpdk_pids = self._get_dpdk_pids(dpdk_runtime_dirs)
> for dpdk_pid in dpdk_pids:
> - self.remote_session.send_command(f"kill -9 {dpdk_pid}",
> 20)
> + self.send_command(f"kill -9 {dpdk_pid}", 20)
> self._check_dpdk_hugepages(dpdk_runtime_dirs)
> self._remove_dpdk_runtime_dirs(dpdk_runtime_dirs)
>
> @@ -168,7 +174,7 @@ def _list_remote_dirs(self, remote_path: str |
> PurePath) -> list[str] | None:
> Return a list of directories of the remote_dir.
> If remote_path doesn't exist, return None.
> """
> - out = self.remote_session.send_command(
> + out = self.send_command(
> f"ls -l {remote_path} | awk '/^d/ {{print $NF}}'"
> ).stdout
> if "No such file or directory" in out:
> @@ -182,9 +188,7 @@ def _get_dpdk_pids(self, dpdk_runtime_dirs:
> Iterable[str | PurePath]) -> list[in
> for dpdk_runtime_dir in dpdk_runtime_dirs:
> dpdk_config_file = PurePosixPath(dpdk_runtime_dir, "config")
> if self._remote_files_exists(dpdk_config_file):
> - out = self.remote_session.send_command(
> - f"lsof -Fp {dpdk_config_file}"
> - ).stdout
> + out = self.send_command(f"lsof -Fp
> {dpdk_config_file}").stdout
> if out and "No such file or directory" not in out:
> for out_line in out.splitlines():
> match = re.match(pid_regex, out_line)
> @@ -193,7 +197,7 @@ def _get_dpdk_pids(self, dpdk_runtime_dirs:
> Iterable[str | PurePath]) -> list[in
> return pids
>
> def _remote_files_exists(self, remote_path: PurePath) -> bool:
> - result = self.remote_session.send_command(f"test -e
> {remote_path}")
> + result = self.send_command(f"test -e {remote_path}")
> return not result.return_code
>
> def _check_dpdk_hugepages(
> @@ -202,9 +206,7 @@ def _check_dpdk_hugepages(
> for dpdk_runtime_dir in dpdk_runtime_dirs:
> hugepage_info = PurePosixPath(dpdk_runtime_dir,
> "hugepage_info")
> if self._remote_files_exists(hugepage_info):
> - out = self.remote_session.send_command(
> - f"lsof -Fp {hugepage_info}"
> - ).stdout
> + out = self.send_command(f"lsof -Fp
> {hugepage_info}").stdout
> if out and "No such file or directory" not in out:
> self._logger.warning("Some DPDK processes did not
> free hugepages.")
>
> self._logger.warning("*******************************************")
> diff --git a/dts/framework/remote_session/remote/remote_session.py
> b/dts/framework/remote_session/remote/remote_session.py
> index 91dee3cb4f..0647d93de4 100644
> --- a/dts/framework/remote_session/remote/remote_session.py
> +++ b/dts/framework/remote_session/remote/remote_session.py
> @@ -11,7 +11,6 @@
> from framework.exception import RemoteCommandExecutionError
> from framework.logger import DTSLOG
> from framework.settings import SETTINGS
> -from framework.utils import EnvVarsDict
>
>
> @dataclasses.dataclass(slots=True, frozen=True)
> @@ -89,7 +88,7 @@ def send_command(
> command: str,
> timeout: float = SETTINGS.timeout,
> verify: bool = False,
> - env: EnvVarsDict | None = None,
> + env: dict | None = None,
> ) -> CommandResult:
> """
> Send a command to the connected node using optional env vars
> @@ -114,7 +113,7 @@ def send_command(
>
> @abstractmethod
> def _send_command(
> - self, command: str, timeout: float, env: EnvVarsDict | None
> + self, command: str, timeout: float, env: dict | None
> ) -> CommandResult:
> """
> Use the underlying protocol to execute the command using optional
> env vars
> @@ -141,15 +140,33 @@ def is_alive(self) -> bool:
> """
>
> @abstractmethod
> - def copy_file(
> + def copy_from(
> self,
> source_file: str | PurePath,
> destination_file: str | PurePath,
> - source_remote: bool = False,
> ) -> None:
> + """Copy a file from the remote Node to the local filesystem.
> +
> + Copy source_file from the remote Node associated with this remote
> + session to destination_file on the local filesystem.
> +
> + Args:
> + source_file: the file on the remote Node.
> + destination_file: a file or directory path on the local
> filesystem.
> """
> - Copy source_file from local filesystem to destination_file on the
> remote Node
> - associated with the remote session.
> - If source_remote is True, reverse the direction - copy
> source_file from the
> - associated Node to destination_file on local filesystem.
> +
> + @abstractmethod
> + def copy_to(
> + self,
> + source_file: str | PurePath,
> + destination_file: str | PurePath,
> + ) -> None:
> + """Copy a file from local filesystem to the remote Node.
> +
> + Copy source_file from local filesystem to destination_file
> + on the remote Node associated with this remote session.
> +
> + Args:
> + source_file: the file on the local filesystem.
> + destination_file: a file or directory path on the remote Node.
> """
> diff --git a/dts/framework/remote_session/remote/ssh_session.py
> b/dts/framework/remote_session/remote/ssh_session.py
> index 42ff9498a2..8d127f1601 100644
> --- a/dts/framework/remote_session/remote/ssh_session.py
> +++ b/dts/framework/remote_session/remote/ssh_session.py
> @@ -1,29 +1,49 @@
> # SPDX-License-Identifier: BSD-3-Clause
> -# Copyright(c) 2010-2014 Intel Corporation
> -# Copyright(c) 2022-2023 PANTHEON.tech s.r.o.
> -# Copyright(c) 2022-2023 University of New Hampshire
> +# Copyright(c) 2023 PANTHEON.tech s.r.o.
>
> -import time
> +import socket
> +import traceback
> from pathlib import PurePath
>
> -import pexpect # type: ignore
> -from pexpect import pxssh # type: ignore
> +from fabric import Connection # type: ignore[import]
> +from invoke.exceptions import ( # type: ignore[import]
> + CommandTimedOut,
> + ThreadException,
> + UnexpectedExit,
> +)
> +from paramiko.ssh_exception import ( # type: ignore[import]
> + AuthenticationException,
> + BadHostKeyException,
> + NoValidConnectionsError,
> + SSHException,
> +)
>
> from framework.config import NodeConfiguration
> from framework.exception import SSHConnectionError, SSHSessionDeadError,
> SSHTimeoutError
> from framework.logger import DTSLOG
> -from framework.utils import GREEN, RED, EnvVarsDict
>
> from .remote_session import CommandResult, RemoteSession
>
>
> class SSHSession(RemoteSession):
> - """
> - Module for creating Pexpect SSH remote sessions.
> + """A persistent SSH connection to a remote Node.
> +
> + The connection is implemented with the Fabric Python library.
> +
> + Args:
> + node_config: The configuration of the Node to connect to.
> + session_name: The name of the session.
> + logger: The logger used for logging.
> + This should be passed from the parent OSSession.
> +
> + Attributes:
> + session: The underlying Fabric SSH connection.
> +
> + Raises:
> + SSHConnectionError: The connection cannot be established.
> """
>
> - session: pxssh.pxssh
> - magic_prompt: str
> + session: Connection
>
> def __init__(
> self,
> @@ -31,218 +51,91 @@ def __init__(
> session_name: str,
> logger: DTSLOG,
> ):
> - self.magic_prompt = "MAGIC PROMPT"
> super(SSHSession, self).__init__(node_config, session_name,
> logger)
>
> def _connect(self) -> None:
> - """
> - Create connection to assigned node.
> - """
> + errors = []
> retry_attempts = 10
> login_timeout = 20 if self.port else 10
> - password_regex = (
> - r"(?i)(?:password:)|(?:passphrase for key)|(?i)(password for
> .+:)"
> - )
> - try:
> - for retry_attempt in range(retry_attempts):
> - self.session = pxssh.pxssh(encoding="utf-8")
> - try:
> - self.session.login(
> - self.ip,
> - self.username,
> - self.password,
> - original_prompt="[$#>]",
> - port=self.port,
> - login_timeout=login_timeout,
> - password_regex=password_regex,
> - )
> - break
> - except Exception as e:
> - self._logger.warning(e)
> - time.sleep(2)
> - self._logger.info(
> - f"Retrying connection: retry number
> {retry_attempt + 1}."
> - )
> - else:
> - raise Exception(f"Connection to {self.hostname} failed")
> -
> - self.send_expect("stty -echo", "#")
> - self.send_expect("stty columns 1000", "#")
> - self.send_expect("bind 'set enable-bracketed-paste off'", "#")
> - except Exception as e:
> - self._logger.error(RED(str(e)))
> - if getattr(self, "port", None):
> - suggestion = (
> - f"\nSuggestion: Check if the firewall on
> {self.hostname} is "
> - f"stopped.\n"
> + for retry_attempt in range(retry_attempts):
> + try:
> + self.session = Connection(
> + self.ip,
> + user=self.username,
> + port=self.port,
> + connect_kwargs={"password": self.password},
> + connect_timeout=login_timeout,
> )
> - self._logger.info(GREEN(suggestion))
> -
> - raise SSHConnectionError(self.hostname)
> + self.session.open()
>
> - def send_expect(
> - self, command: str, prompt: str, timeout: float = 15, verify:
> bool = False
> - ) -> str | int:
> - try:
> - ret = self.send_expect_base(command, prompt, timeout)
> - if verify:
> - ret_status = self.send_expect_base("echo $?", prompt,
> timeout)
> - try:
> - retval = int(ret_status)
> - if retval:
> - self._logger.error(f"Command: {command} failure!")
> - self._logger.error(ret)
> - return retval
> - else:
> - return ret
> - except ValueError:
> - return ret
> - else:
> - return ret
> - except Exception as e:
> - self._logger.error(
> - f"Exception happened in [{command}] and output is "
> - f"[{self._get_output()}]"
> - )
> - raise e
> -
> - def send_expect_base(self, command: str, prompt: str, timeout: float)
> -> str:
> - self._clean_session()
> - original_prompt = self.session.PROMPT
> - self.session.PROMPT = prompt
> - self._send_line(command)
> - self._prompt(command, timeout)
> -
> - before = self._get_output()
> - self.session.PROMPT = original_prompt
> - return before
> -
> - def _clean_session(self) -> None:
> - self.session.PROMPT = self.magic_prompt
> - self.get_output(timeout=0.01)
> - self.session.PROMPT = self.session.UNIQUE_PROMPT
> -
> - def _send_line(self, command: str) -> None:
> - if not self.is_alive():
> - raise SSHSessionDeadError(self.hostname)
> - if len(command) == 2 and command.startswith("^"):
> - self.session.sendcontrol(command[1])
> - else:
> - self.session.sendline(command)
> + except (ValueError, BadHostKeyException,
> AuthenticationException) as e:
> + self._logger.exception(e)
> + raise SSHConnectionError(self.hostname) from e
>
> - def _prompt(self, command: str, timeout: float) -> None:
> - if not self.session.prompt(timeout):
> - raise SSHTimeoutError(command, self._get_output()) from None
> + except (NoValidConnectionsError, socket.error, SSHException)
> as e:
> + self._logger.debug(traceback.format_exc())
> + self._logger.warning(e)
>
> - def get_output(self, timeout: float = 15) -> str:
> - """
> - Get all output before timeout
> - """
> - try:
> - self.session.prompt(timeout)
> - except Exception:
> - pass
> -
> - before = self._get_output()
> - self._flush()
> -
> - return before
> + error = repr(e)
> + if error not in errors:
> + errors.append(error)
>
> - def _get_output(self) -> str:
> - if not self.is_alive():
> - raise SSHSessionDeadError(self.hostname)
> - before = self.session.before.rsplit("\r\n", 1)[0]
> - if before == "[PEXPECT]":
> - return ""
> - return before
> + self._logger.info(
> + f"Retrying connection: retry number {retry_attempt +
> 1}."
> + )
>
> - def _flush(self) -> None:
> - """
> - Clear all session buffer
> - """
> - self.session.buffer = ""
> - self.session.before = ""
> + else:
> + break
> + else:
> + raise SSHConnectionError(self.hostname, errors)
>
> def is_alive(self) -> bool:
> - return self.session.isalive()
> + return self.session.is_connected
>
> def _send_command(
> - self, command: str, timeout: float, env: EnvVarsDict | None
> + self, command: str, timeout: float, env: dict | None
> ) -> CommandResult:
> - output = self._send_command_get_output(command, timeout, env)
> - return_code = int(self._send_command_get_output("echo $?",
> timeout, None))
> + """Send a command and return the result of the execution.
>
> - # we're capturing only stdout
> - return CommandResult(self.name, command, output, "", return_code)
> + Args:
> + command: The command to execute.
> + timeout: Wait at most this many seconds for the execution to
> complete.
> + env: Extra environment variables that will be used in command
> execution.
>
> - def _send_command_get_output(
> - self, command: str, timeout: float, env: EnvVarsDict | None
> - ) -> str:
> + Raises:
> + SSHSessionDeadError: The session died while executing the
> command.
> + SSHTimeoutError: The command execution timed out.
> + """
> try:
> - self._clean_session()
> - if env:
> - command = f"{env} {command}"
> - self._send_line(command)
> - except Exception as e:
> - raise e
> + output = self.session.run(
> + command, env=env, warn=True, hide=True, timeout=timeout
> + )
>
> - output = self.get_output(timeout=timeout)
> - self.session.PROMPT = self.session.UNIQUE_PROMPT
> - self.session.prompt(0.1)
> + except (UnexpectedExit, ThreadException) as e:
> + self._logger.exception(e)
> + raise SSHSessionDeadError(self.hostname) from e
>
> - return output
> + except CommandTimedOut as e:
> + self._logger.exception(e)
> + raise SSHTimeoutError(command, e.result.stderr) from e
>
> - def _close(self, force: bool = False) -> None:
> - if force is True:
> - self.session.close()
> - else:
> - if self.is_alive():
> - self.session.logout()
> + return CommandResult(
> + self.name, command, output.stdout, output.stderr,
> output.return_code
> + )
>
> - def copy_file(
> + def copy_from(
> self,
> source_file: str | PurePath,
> destination_file: str | PurePath,
> - source_remote: bool = False,
> ) -> None:
> - """
> - Send a local file to a remote host.
> - """
> - if source_remote:
> - source_file = f"{self.username}@{self.ip}:{source_file}"
> - else:
> - destination_file = f"{self.username}@
> {self.ip}:{destination_file}"
> + self.session.get(str(destination_file), str(source_file))
>
> - port = ""
> - if self.port:
> - port = f" -P {self.port}"
> -
> - command = (
> - f"scp -v{port} -o NoHostAuthenticationForLocalhost=yes"
> - f" {source_file} {destination_file}"
> - )
> -
> - self._spawn_scp(command)
> + def copy_to(
> + self,
> + source_file: str | PurePath,
> + destination_file: str | PurePath,
> + ) -> None:
> + self.session.put(str(source_file), str(destination_file))
>
> - def _spawn_scp(self, scp_cmd: str) -> None:
> - """
> - Transfer a file with SCP
> - """
> - self._logger.info(scp_cmd)
> - p: pexpect.spawn = pexpect.spawn(scp_cmd)
> - time.sleep(0.5)
> - ssh_newkey: str = "Are you sure you want to continue connecting"
> - i: int = p.expect(
> - [ssh_newkey, "[pP]assword", "# ", pexpect.EOF,
> pexpect.TIMEOUT], 120
> - )
> - if i == 0: # add once in trust list
> - p.sendline("yes")
> - i = p.expect([ssh_newkey, "[pP]assword", pexpect.EOF], 2)
> -
> - if i == 1:
> - time.sleep(0.5)
> - p.sendline(self.password)
> - p.expect("Exit status 0", 60)
> - if i == 4:
> - self._logger.error("SCP TIMEOUT error %d" % i)
> - p.close()
> + def _close(self, force: bool = False) -> None:
> + self.session.close()
> diff --git a/dts/framework/testbed_model/sut_node.py
> b/dts/framework/testbed_model/sut_node.py
> index 2b2b50d982..9dbc390848 100644
> --- a/dts/framework/testbed_model/sut_node.py
> +++ b/dts/framework/testbed_model/sut_node.py
> @@ -10,7 +10,7 @@
> from framework.config import BuildTargetConfiguration, NodeConfiguration
> from framework.remote_session import CommandResult, OSSession
> from framework.settings import SETTINGS
> -from framework.utils import EnvVarsDict, MesonArgs
> +from framework.utils import MesonArgs
>
> from .hw import LogicalCoreCount, LogicalCoreList, VirtualDevice
> from .node import Node
> @@ -27,7 +27,7 @@ class SutNode(Node):
> _dpdk_prefix_list: list[str]
> _dpdk_timestamp: str
> _build_target_config: BuildTargetConfiguration | None
> - _env_vars: EnvVarsDict
> + _env_vars: dict
> _remote_tmp_dir: PurePath
> __remote_dpdk_dir: PurePath | None
> _dpdk_version: str | None
> @@ -38,7 +38,7 @@ def __init__(self, node_config: NodeConfiguration):
> super(SutNode, self).__init__(node_config)
> self._dpdk_prefix_list = []
> self._build_target_config = None
> - self._env_vars = EnvVarsDict()
> + self._env_vars = {}
> self._remote_tmp_dir = self.main_session.get_remote_tmp_dir()
> self.__remote_dpdk_dir = None
> self._dpdk_version = None
> @@ -94,7 +94,7 @@ def _configure_build_target(
> """
> Populate common environment variables and set build target config.
> """
> - self._env_vars = EnvVarsDict()
> + self._env_vars = {}
> self._build_target_config = build_target_config
> self._env_vars.update(
>
> self.main_session.get_dpdk_build_env_vars(build_target_config.arch)
> @@ -112,7 +112,7 @@ def _copy_dpdk_tarball(self) -> None:
> Copy to and extract DPDK tarball on the SUT node.
> """
> self._logger.info("Copying DPDK tarball to SUT.")
> - self.main_session.copy_file(SETTINGS.dpdk_tarball_path,
> self._remote_tmp_dir)
> + self.main_session.copy_to(SETTINGS.dpdk_tarball_path,
> self._remote_tmp_dir)
>
> # construct remote tarball path
> # the basename is the same on local host and on remote Node
> @@ -259,7 +259,7 @@ def run_dpdk_app(
> Run DPDK application on the remote node.
> """
> return self.main_session.send_command(
> - f"{app_path} {eal_args}", timeout, verify=True
> + f"{app_path} {eal_args}", timeout, privileged=True,
> verify=True
> )
>
>
> diff --git a/dts/framework/utils.py b/dts/framework/utils.py
> index 55e0b0ef0e..8cfbc6a29d 100644
> --- a/dts/framework/utils.py
> +++ b/dts/framework/utils.py
> @@ -42,19 +42,10 @@ def expand_range(range_str: str) -> list[int]:
> return expanded_range
>
>
> -def GREEN(text: str) -> str:
> - return f"\u001B[32;1m{str(text)}\u001B[0m"
> -
> -
> def RED(text: str) -> str:
> return f"\u001B[31;1m{str(text)}\u001B[0m"
>
>
> -class EnvVarsDict(dict):
> - def __str__(self) -> str:
> - return " ".join(["=".join(item) for item in self.items()])
> -
> -
> class MesonArgs(object):
> """
> Aggregate the arguments needed to build DPDK:
> diff --git a/dts/poetry.lock b/dts/poetry.lock
> index 0b2a007d4d..2438f337cd 100644
> --- a/dts/poetry.lock
> +++ b/dts/poetry.lock
> @@ -12,6 +12,18 @@ docs = ["furo", "sphinx", "zope.interface",
> "sphinx-notfound-page"]
> tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest
> (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins",
> "zope.interface", "cloudpickle"]
> tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler",
> "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins",
> "cloudpickle"]
>
> +[[package]]
> +name = "bcrypt"
> +version = "4.0.1"
> +description = "Modern password hashing for your software and your servers"
> +category = "main"
> +optional = false
> +python-versions = ">=3.6"
> +
> +[package.extras]
> +tests = ["pytest (>=3.2.1,!=3.3.0)"]
> +typecheck = ["mypy"]
> +
> [[package]]
> name = "black"
> version = "22.10.0"
> @@ -33,6 +45,17 @@ d = ["aiohttp (>=3.7.4)"]
> jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
> uvloop = ["uvloop (>=0.15.2)"]
>
> +[[package]]
> +name = "cffi"
> +version = "1.15.1"
> +description = "Foreign Function Interface for Python calling C code."
> +category = "main"
> +optional = false
> +python-versions = "*"
> +
> +[package.dependencies]
> +pycparser = "*"
> +
> [[package]]
> name = "click"
> version = "8.1.3"
> @@ -52,6 +75,52 @@ category = "dev"
> optional = false
> python-versions =
> "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
>
> +[[package]]
> +name = "cryptography"
> +version = "40.0.2"
> +description = "cryptography is a package which provides cryptographic
> recipes and primitives to Python developers."
> +category = "main"
> +optional = false
> +python-versions = ">=3.6"
> +
> +[package.dependencies]
> +cffi = ">=1.12"
> +
> +[package.extras]
> +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
> +docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)",
> "sphinxcontrib-spelling (>=4.0.1)"]
> +pep8test = ["black", "ruff", "mypy", "check-manifest"]
> +sdist = ["setuptools-rust (>=0.11.4)"]
> +ssh = ["bcrypt (>=3.1.5)"]
> +test = ["pytest (>=6.2.0)", "pytest-shard (>=0.1.2)", "pytest-benchmark",
> "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601"]
> +test-randomorder = ["pytest-randomly"]
> +tox = ["tox"]
> +
> +[[package]]
> +name = "fabric"
> +version = "2.7.1"
> +description = "High level SSH command execution"
> +category = "main"
> +optional = false
> +python-versions = "*"
> +
> +[package.dependencies]
> +invoke = ">=1.3,<2.0"
> +paramiko = ">=2.4"
> +pathlib2 = "*"
> +
> +[package.extras]
> +pytest = ["mock (>=2.0.0,<3.0)", "pytest (>=3.2.5,<4.0)"]
> +testing = ["mock (>=2.0.0,<3.0)"]
> +
> +[[package]]
> +name = "invoke"
> +version = "1.7.3"
> +description = "Pythonic task execution"
> +category = "main"
> +optional = false
> +python-versions = "*"
> +
> [[package]]
> name = "isort"
> version = "5.10.1"
> @@ -136,23 +205,41 @@ optional = false
> python-versions = "*"
>
> [[package]]
> -name = "pathspec"
> -version = "0.10.1"
> -description = "Utility library for gitignore style pattern matching of
> file paths."
> -category = "dev"
> +name = "paramiko"
> +version = "3.1.0"
> +description = "SSH2 protocol library"
> +category = "main"
> optional = false
> -python-versions = ">=3.7"
> +python-versions = ">=3.6"
> +
> +[package.dependencies]
> +bcrypt = ">=3.2"
> +cryptography = ">=3.3"
> +pynacl = ">=1.5"
> +
> +[package.extras]
> +all = ["pyasn1 (>=0.1.7)", "invoke (>=2.0)", "gssapi (>=1.4.1)", "pywin32
> (>=2.1.8)"]
> +gssapi = ["pyasn1 (>=0.1.7)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"]
> +invoke = ["invoke (>=2.0)"]
>
> [[package]]
> -name = "pexpect"
> -version = "4.8.0"
> -description = "Pexpect allows easy control of interactive console
> applications."
> +name = "pathlib2"
> +version = "2.3.7.post1"
> +description = "Object-oriented filesystem paths"
> category = "main"
> optional = false
> python-versions = "*"
>
> [package.dependencies]
> -ptyprocess = ">=0.5"
> +six = "*"
> +
> +[[package]]
> +name = "pathspec"
> +version = "0.10.1"
> +description = "Utility library for gitignore style pattern matching of
> file paths."
> +category = "dev"
> +optional = false
> +python-versions = ">=3.7"
>
> [[package]]
> name = "platformdirs"
> @@ -166,14 +253,6 @@ python-versions = ">=3.7"
> docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)",
> "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
> test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)",
> "pytest (>=6)"]
>
> -[[package]]
> -name = "ptyprocess"
> -version = "0.7.0"
> -description = "Run a subprocess in a pseudo terminal"
> -category = "main"
> -optional = false
> -python-versions = "*"
> -
> [[package]]
> name = "pycodestyle"
> version = "2.9.1"
> @@ -182,6 +261,14 @@ category = "dev"
> optional = false
> python-versions = ">=3.6"
>
> +[[package]]
> +name = "pycparser"
> +version = "2.21"
> +description = "C parser in Python"
> +category = "main"
> +optional = false
> +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
> +
> [[package]]
> name = "pydocstyle"
> version = "6.1.1"
> @@ -228,6 +315,21 @@ tests = ["pytest (>=7.1.2)", "pytest-mypy",
> "eradicate (>=2.0.0)", "radon (>=5.1
> toml = ["toml (>=0.10.2)"]
> vulture = ["vulture"]
>
> +[[package]]
> +name = "pynacl"
> +version = "1.5.0"
> +description = "Python binding to the Networking and Cryptography (NaCl)
> library"
> +category = "main"
> +optional = false
> +python-versions = ">=3.6"
> +
> +[package.dependencies]
> +cffi = ">=1.4.1"
> +
> +[package.extras]
> +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
> +tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"]
> +
> [[package]]
> name = "pyrsistent"
> version = "0.19.1"
> @@ -244,6 +346,14 @@ category = "main"
> optional = false
> python-versions = ">=3.6"
>
> +[[package]]
> +name = "six"
> +version = "1.16.0"
> +description = "Python 2 and 3 compatibility utilities"
> +category = "main"
> +optional = false
> +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
> +
> [[package]]
> name = "snowballstemmer"
> version = "2.2.0"
> @@ -299,13 +409,18 @@ jsonschema = ">=4,<5"
> [metadata]
> lock-version = "1.1"
> python-versions = "^3.10"
> -content-hash =
> "a0f040b07fc6ce4deb0be078b9a88c2a465cb6bccb9e260a67e92c2403e2319f"
> +content-hash =
> "719c43bcaa5d181921debda884f8f714063df0b2336d61e9f64ecab034e8b139"
>
> [metadata.files]
> attrs = []
> +bcrypt = []
> black = []
> +cffi = []
> click = []
> colorama = []
> +cryptography = []
> +fabric = []
> +invoke = []
> isort = []
> jsonpatch = []
> jsonpointer = []
> @@ -313,22 +428,22 @@ jsonschema = []
> mccabe = []
> mypy = []
> mypy-extensions = []
> +paramiko = []
> +pathlib2 = []
> pathspec = []
> -pexpect = [
> - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash =
> "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"},
> - {file = "pexpect-4.8.0.tar.gz", hash =
> "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"},
> -]
> platformdirs = [
> {file = "platformdirs-2.5.2-py3-none-any.whl", hash =
> "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
> {file = "platformdirs-2.5.2.tar.gz", hash =
> "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
> ]
> -ptyprocess = []
> pycodestyle = []
> +pycparser = []
> pydocstyle = []
> pyflakes = []
> pylama = []
> +pynacl = []
> pyrsistent = []
> pyyaml = []
> +six = []
> snowballstemmer = []
> toml = []
> tomli = []
> diff --git a/dts/pyproject.toml b/dts/pyproject.toml
> index a136c91e5e..50bcdb327a 100644
> --- a/dts/pyproject.toml
> +++ b/dts/pyproject.toml
> @@ -9,10 +9,10 @@ authors = ["Owen Hilyard <ohilyard@iol.unh.edu>", "
> dts@dpdk.org"]
>
> [tool.poetry.dependencies]
> python = "^3.10"
> -pexpect = "^4.8.0"
> warlock = "^2.0.1"
> PyYAML = "^6.0"
> types-PyYAML = "^6.0.8"
> +fabric = "^2.7.1"
>
> [tool.poetry.dev-dependencies]
> mypy = "^0.961"
> --
> 2.34.1
>
>
[-- Attachment #2: Type: text/html, Size: 60092 bytes --]
^ permalink raw reply [flat|nested] 21+ messages in thread
* Re: [PATCH v3] dts: replace pexpect with fabric
2023-06-21 18:33 ` Jeremy Spewock
@ 2023-07-05 19:59 ` Jeremy Spewock
2023-07-12 16:34 ` Thomas Monjalon
1 sibling, 0 replies; 21+ messages in thread
From: Jeremy Spewock @ 2023-07-05 19:59 UTC (permalink / raw)
To: Juraj Linkeš
Cc: thomas, Honnappa.Nagarahalli, lijuan.tu, wathsala.vithanage, probb, dev
[-- Attachment #1: Type: text/plain, Size: 50197 bytes --]
Tested-by: Jeremy Spewock <jspewock@iol.unh.edu>
On Wed, Jun 21, 2023 at 2:33 PM Jeremy Spewock <jspewock@iol.unh.edu> wrote:
> Acked-by: Jeremy Spewock <jspewock@iol.unh.edu>
>
> On Fri, Jun 9, 2023 at 5:46 AM Juraj Linkeš <juraj.linkes@pantheon.tech>
> wrote:
>
>> Pexpect is not a dedicated SSH connection library while Fabric is. With
>> Fabric, all SSH-related logic is provided and we can just focus on
>> what's DTS specific.
>>
>> Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
>> ---
>>
>> Notes:
>> v3: updated passwordless sudo setup on Linux
>>
>> doc/guides/tools/dts.rst | 29 +-
>> dts/conf.yaml | 2 +-
>> dts/framework/exception.py | 10 +-
>> dts/framework/remote_session/linux_session.py | 31 +-
>> dts/framework/remote_session/os_session.py | 51 +++-
>> dts/framework/remote_session/posix_session.py | 48 +--
>> .../remote_session/remote/remote_session.py | 35 ++-
>> .../remote_session/remote/ssh_session.py | 287 ++++++------------
>> dts/framework/testbed_model/sut_node.py | 12 +-
>> dts/framework/utils.py | 9 -
>> dts/poetry.lock | 161 ++++++++--
>> dts/pyproject.toml | 2 +-
>> 12 files changed, 376 insertions(+), 301 deletions(-)
>>
>> diff --git a/doc/guides/tools/dts.rst b/doc/guides/tools/dts.rst
>> index ebd6dceb6a..c7b31623e4 100644
>> --- a/doc/guides/tools/dts.rst
>> +++ b/doc/guides/tools/dts.rst
>> @@ -95,9 +95,14 @@ Setting up DTS environment
>>
>> #. **SSH Connection**
>>
>> - DTS uses Python pexpect for SSH connections between DTS environment
>> and the other hosts.
>> - The pexpect implementation is a wrapper around the ssh command in the
>> DTS environment.
>> - This means it'll use the SSH agent providing the ssh command and its
>> keys.
>> + DTS uses the Fabric Python library for SSH connections between DTS
>> environment
>> + and the other hosts.
>> + The authentication method used is pubkey authentication.
>> + Fabric tries to use a passed key/certificate,
>> + then any key it can with through an SSH agent,
>> + then any "id_rsa", "id_dsa" or "id_ecdsa" key discoverable in
>> ``~/.ssh/``
>> + (with any matching OpenSSH-style certificates).
>> + DTS doesn't pass any keys, so Fabric tries to use the other two
>> methods.
>>
>>
>> Setting up System Under Test
>> @@ -132,6 +137,21 @@ There are two areas that need to be set up on a
>> System Under Test:
>> It's possible to use the hugepage configuration already present on
>> the SUT.
>> If you wish to do so, don't specify the hugepage configuration in
>> the DTS config file.
>>
>> +#. **User with administrator privileges**
>> +
>> +.. _sut_admin_user:
>> +
>> + DTS needs administrator privileges to run DPDK applications (such as
>> testpmd) on the SUT.
>> + The SUT user must be able run commands in privileged mode without
>> asking for password.
>> + On most Linux distributions, it's a matter of setting up passwordless
>> sudo:
>> +
>> + #. Run ``sudo visudo`` and check that it contains ``%sudo
>> ALL=(ALL:ALL) NOPASSWD:ALL``.
>> +
>> + #. Add the SUT user to the sudo group with:
>> +
>> + .. code-block:: console
>> +
>> + sudo usermod -aG sudo <sut_user>
>>
>> Running DTS
>> -----------
>> @@ -151,7 +171,8 @@ which is a template that illustrates what can be
>> configured in DTS:
>> :start-at: executions:
>>
>>
>> -The user must be root or any other user with prompt starting with ``#``.
>> +The user must have :ref:`administrator privileges <sut_admin_user>`
>> +which don't require password authentication.
>> The other fields are mostly self-explanatory
>> and documented in more detail in
>> ``dts/framework/config/conf_yaml_schema.json``.
>>
>> diff --git a/dts/conf.yaml b/dts/conf.yaml
>> index a9bd8a3ecf..129801d87c 100644
>> --- a/dts/conf.yaml
>> +++ b/dts/conf.yaml
>> @@ -16,7 +16,7 @@ executions:
>> nodes:
>> - name: "SUT 1"
>> hostname: sut1.change.me.localhost
>> - user: root
>> + user: dtsuser
>> arch: x86_64
>> os: linux
>> lcores: ""
>> diff --git a/dts/framework/exception.py b/dts/framework/exception.py
>> index ca353d98fc..44ff4e979a 100644
>> --- a/dts/framework/exception.py
>> +++ b/dts/framework/exception.py
>> @@ -62,13 +62,19 @@ class SSHConnectionError(DTSError):
>> """
>>
>> host: str
>> + errors: list[str]
>> severity: ClassVar[ErrorSeverity] = ErrorSeverity.SSH_ERR
>>
>> - def __init__(self, host: str):
>> + def __init__(self, host: str, errors: list[str] | None = None):
>> self.host = host
>> + self.errors = [] if errors is None else errors
>>
>> def __str__(self) -> str:
>> - return f"Error trying to connect with {self.host}"
>> + message = f"Error trying to connect with {self.host}."
>> + if self.errors:
>> + message += f" Errors encountered while retrying: {',
>> '.join(self.errors)}"
>> +
>> + return message
>>
>>
>> class SSHSessionDeadError(DTSError):
>> diff --git a/dts/framework/remote_session/linux_session.py
>> b/dts/framework/remote_session/linux_session.py
>> index a1e3bc3a92..f13f399121 100644
>> --- a/dts/framework/remote_session/linux_session.py
>> +++ b/dts/framework/remote_session/linux_session.py
>> @@ -14,10 +14,11 @@ class LinuxSession(PosixSession):
>> The implementation of non-Posix compliant parts of Linux remote
>> sessions.
>> """
>>
>> + def _get_privileged_command(self, command: str) -> str:
>> + return f"sudo -- sh -c '{command}'"
>> +
>> def get_remote_cpus(self, use_first_core: bool) -> list[LogicalCore]:
>> - cpu_info = self.remote_session.send_command(
>> - "lscpu -p=CPU,CORE,SOCKET,NODE|grep -v \\#"
>> - ).stdout
>> + cpu_info = self.send_command("lscpu -p=CPU,CORE,SOCKET,NODE|grep
>> -v \\#").stdout
>> lcores = []
>> for cpu_line in cpu_info.splitlines():
>> lcore, core, socket, node = map(int, cpu_line.split(","))
>> @@ -45,20 +46,20 @@ def setup_hugepages(self, hugepage_amount: int,
>> force_first_numa: bool) -> None:
>> self._mount_huge_pages()
>>
>> def _get_hugepage_size(self) -> int:
>> - hugepage_size = self.remote_session.send_command(
>> + hugepage_size = self.send_command(
>> "awk '/Hugepagesize/ {print $2}' /proc/meminfo"
>> ).stdout
>> return int(hugepage_size)
>>
>> def _get_hugepages_total(self) -> int:
>> - hugepages_total = self.remote_session.send_command(
>> + hugepages_total = self.send_command(
>> "awk '/HugePages_Total/ { print $2 }' /proc/meminfo"
>> ).stdout
>> return int(hugepages_total)
>>
>> def _get_numa_nodes(self) -> list[int]:
>> try:
>> - numa_count = self.remote_session.send_command(
>> + numa_count = self.send_command(
>> "cat /sys/devices/system/node/online", verify=True
>> ).stdout
>> numa_range = expand_range(numa_count)
>> @@ -70,14 +71,12 @@ def _get_numa_nodes(self) -> list[int]:
>> def _mount_huge_pages(self) -> None:
>> self._logger.info("Re-mounting Hugepages.")
>> hugapge_fs_cmd = "awk '/hugetlbfs/ { print $2 }' /proc/mounts"
>> - self.remote_session.send_command(f"umount $({hugapge_fs_cmd})")
>> - result = self.remote_session.send_command(hugapge_fs_cmd)
>> + self.send_command(f"umount $({hugapge_fs_cmd})")
>> + result = self.send_command(hugapge_fs_cmd)
>> if result.stdout == "":
>> remote_mount_path = "/mnt/huge"
>> - self.remote_session.send_command(f"mkdir -p
>> {remote_mount_path}")
>> - self.remote_session.send_command(
>> - f"mount -t hugetlbfs nodev {remote_mount_path}"
>> - )
>> + self.send_command(f"mkdir -p {remote_mount_path}")
>> + self.send_command(f"mount -t hugetlbfs nodev
>> {remote_mount_path}")
>>
>> def _supports_numa(self) -> bool:
>> # the system supports numa if self._numa_nodes is non-empty and
>> there are more
>> @@ -94,14 +93,12 @@ def _configure_huge_pages(
>> )
>> if force_first_numa and self._supports_numa():
>> # clear non-numa hugepages
>> - self.remote_session.send_command(
>> - f"echo 0 | sudo tee {hugepage_config_path}"
>> - )
>> + self.send_command(f"echo 0 | tee {hugepage_config_path}",
>> privileged=True)
>> hugepage_config_path = (
>>
>> f"/sys/devices/system/node/node{self._numa_nodes[0]}/hugepages"
>> f"/hugepages-{size}kB/nr_hugepages"
>> )
>>
>> - self.remote_session.send_command(
>> - f"echo {amount} | sudo tee {hugepage_config_path}"
>> + self.send_command(
>> + f"echo {amount} | tee {hugepage_config_path}",
>> privileged=True
>> )
>> diff --git a/dts/framework/remote_session/os_session.py
>> b/dts/framework/remote_session/os_session.py
>> index 4c48ae2567..bfd70bd480 100644
>> --- a/dts/framework/remote_session/os_session.py
>> +++ b/dts/framework/remote_session/os_session.py
>> @@ -10,7 +10,7 @@
>> from framework.logger import DTSLOG
>> from framework.settings import SETTINGS
>> from framework.testbed_model import LogicalCore
>> -from framework.utils import EnvVarsDict, MesonArgs
>> +from framework.utils import MesonArgs
>>
>> from .remote import CommandResult, RemoteSession, create_remote_session
>>
>> @@ -53,17 +53,32 @@ def is_alive(self) -> bool:
>> def send_command(
>> self,
>> command: str,
>> - timeout: float,
>> + timeout: float = SETTINGS.timeout,
>> + privileged: bool = False,
>> verify: bool = False,
>> - env: EnvVarsDict | None = None,
>> + env: dict | None = None,
>> ) -> CommandResult:
>> """
>> An all-purpose API in case the command to be executed is already
>> OS-agnostic, such as when the path to the executed command has
>> been
>> constructed beforehand.
>> """
>> + if privileged:
>> + command = self._get_privileged_command(command)
>> +
>> return self.remote_session.send_command(command, timeout,
>> verify, env)
>>
>> + @abstractmethod
>> + def _get_privileged_command(self, command: str) -> str:
>> + """Modify the command so that it executes with administrative
>> privileges.
>> +
>> + Args:
>> + command: The command to modify.
>> +
>> + Returns:
>> + The modified command that executes with administrative
>> privileges.
>> + """
>> +
>> @abstractmethod
>> def guess_dpdk_remote_dir(self, remote_dir) -> PurePath:
>> """
>> @@ -90,17 +105,35 @@ def join_remote_path(self, *args: str | PurePath) ->
>> PurePath:
>> """
>>
>> @abstractmethod
>> - def copy_file(
>> + def copy_from(
>> self,
>> source_file: str | PurePath,
>> destination_file: str | PurePath,
>> - source_remote: bool = False,
>> ) -> None:
>> + """Copy a file from the remote Node to the local filesystem.
>> +
>> + Copy source_file from the remote Node associated with this remote
>> + session to destination_file on the local filesystem.
>> +
>> + Args:
>> + source_file: the file on the remote Node.
>> + destination_file: a file or directory path on the local
>> filesystem.
>> """
>> +
>> + @abstractmethod
>> + def copy_to(
>> + self,
>> + source_file: str | PurePath,
>> + destination_file: str | PurePath,
>> + ) -> None:
>> + """Copy a file from local filesystem to the remote Node.
>> +
>> Copy source_file from local filesystem to destination_file
>> - on the remote Node associated with the remote session.
>> - If source_remote is True, reverse the direction - copy
>> source_file from the
>> - associated remote Node to destination_file on local storage.
>> + on the remote Node associated with this remote session.
>> +
>> + Args:
>> + source_file: the file on the local filesystem.
>> + destination_file: a file or directory path on the remote
>> Node.
>> """
>>
>> @abstractmethod
>> @@ -128,7 +161,7 @@ def extract_remote_tarball(
>> @abstractmethod
>> def build_dpdk(
>> self,
>> - env_vars: EnvVarsDict,
>> + env_vars: dict,
>> meson_args: MesonArgs,
>> remote_dpdk_dir: str | PurePath,
>> remote_dpdk_build_dir: str | PurePath,
>> diff --git a/dts/framework/remote_session/posix_session.py
>> b/dts/framework/remote_session/posix_session.py
>> index d38062e8d6..8ca0acb429 100644
>> --- a/dts/framework/remote_session/posix_session.py
>> +++ b/dts/framework/remote_session/posix_session.py
>> @@ -9,7 +9,7 @@
>> from framework.config import Architecture
>> from framework.exception import DPDKBuildError,
>> RemoteCommandExecutionError
>> from framework.settings import SETTINGS
>> -from framework.utils import EnvVarsDict, MesonArgs
>> +from framework.utils import MesonArgs
>>
>> from .os_session import OSSession
>>
>> @@ -34,7 +34,7 @@ def combine_short_options(**opts: bool) -> str:
>>
>> def guess_dpdk_remote_dir(self, remote_dir) -> PurePosixPath:
>> remote_guess = self.join_remote_path(remote_dir, "dpdk-*")
>> - result = self.remote_session.send_command(f"ls -d {remote_guess}
>> | tail -1")
>> + result = self.send_command(f"ls -d {remote_guess} | tail -1")
>> return PurePosixPath(result.stdout)
>>
>> def get_remote_tmp_dir(self) -> PurePosixPath:
>> @@ -48,7 +48,7 @@ def get_dpdk_build_env_vars(self, arch: Architecture)
>> -> dict:
>> env_vars = {}
>> if arch == Architecture.i686:
>> # find the pkg-config path and store it in PKG_CONFIG_LIBDIR
>> - out = self.remote_session.send_command("find /usr -type d
>> -name pkgconfig")
>> + out = self.send_command("find /usr -type d -name pkgconfig")
>> pkg_path = ""
>> res_path = out.stdout.split("\r\n")
>> for cur_path in res_path:
>> @@ -65,13 +65,19 @@ def get_dpdk_build_env_vars(self, arch: Architecture)
>> -> dict:
>> def join_remote_path(self, *args: str | PurePath) -> PurePosixPath:
>> return PurePosixPath(*args)
>>
>> - def copy_file(
>> + def copy_from(
>> self,
>> source_file: str | PurePath,
>> destination_file: str | PurePath,
>> - source_remote: bool = False,
>> ) -> None:
>> - self.remote_session.copy_file(source_file, destination_file,
>> source_remote)
>> + self.remote_session.copy_from(source_file, destination_file)
>> +
>> + def copy_to(
>> + self,
>> + source_file: str | PurePath,
>> + destination_file: str | PurePath,
>> + ) -> None:
>> + self.remote_session.copy_to(source_file, destination_file)
>>
>> def remove_remote_dir(
>> self,
>> @@ -80,24 +86,24 @@ def remove_remote_dir(
>> force: bool = True,
>> ) -> None:
>> opts = PosixSession.combine_short_options(r=recursive, f=force)
>> - self.remote_session.send_command(f"rm{opts} {remote_dir_path}")
>> + self.send_command(f"rm{opts} {remote_dir_path}")
>>
>> def extract_remote_tarball(
>> self,
>> remote_tarball_path: str | PurePath,
>> expected_dir: str | PurePath | None = None,
>> ) -> None:
>> - self.remote_session.send_command(
>> + self.send_command(
>> f"tar xfm {remote_tarball_path} "
>> f"-C {PurePosixPath(remote_tarball_path).parent}",
>> 60,
>> )
>> if expected_dir:
>> - self.remote_session.send_command(f"ls {expected_dir}",
>> verify=True)
>> + self.send_command(f"ls {expected_dir}", verify=True)
>>
>> def build_dpdk(
>> self,
>> - env_vars: EnvVarsDict,
>> + env_vars: dict,
>> meson_args: MesonArgs,
>> remote_dpdk_dir: str | PurePath,
>> remote_dpdk_build_dir: str | PurePath,
>> @@ -108,7 +114,7 @@ def build_dpdk(
>> if rebuild:
>> # reconfigure, then build
>> self._logger.info("Reconfiguring DPDK build.")
>> - self.remote_session.send_command(
>> + self.send_command(
>> f"meson configure {meson_args}
>> {remote_dpdk_build_dir}",
>> timeout,
>> verify=True,
>> @@ -118,7 +124,7 @@ def build_dpdk(
>> # fresh build - remove target dir first, then build from
>> scratch
>> self._logger.info("Configuring DPDK build from
>> scratch.")
>> self.remove_remote_dir(remote_dpdk_build_dir)
>> - self.remote_session.send_command(
>> + self.send_command(
>> f"meson setup "
>> f"{meson_args} {remote_dpdk_dir}
>> {remote_dpdk_build_dir}",
>> timeout,
>> @@ -127,14 +133,14 @@ def build_dpdk(
>> )
>>
>> self._logger.info("Building DPDK.")
>> - self.remote_session.send_command(
>> + self.send_command(
>> f"ninja -C {remote_dpdk_build_dir}", timeout,
>> verify=True, env=env_vars
>> )
>> except RemoteCommandExecutionError as e:
>> raise DPDKBuildError(f"DPDK build failed when doing
>> '{e.command}'.")
>>
>> def get_dpdk_version(self, build_dir: str | PurePath) -> str:
>> - out = self.remote_session.send_command(
>> + out = self.send_command(
>> f"cat {self.join_remote_path(build_dir, 'VERSION')}",
>> verify=True
>> )
>> return out.stdout
>> @@ -146,7 +152,7 @@ def kill_cleanup_dpdk_apps(self, dpdk_prefix_list:
>> Iterable[str]) -> None:
>> # kill and cleanup only if DPDK is running
>> dpdk_pids = self._get_dpdk_pids(dpdk_runtime_dirs)
>> for dpdk_pid in dpdk_pids:
>> - self.remote_session.send_command(f"kill -9 {dpdk_pid}",
>> 20)
>> + self.send_command(f"kill -9 {dpdk_pid}", 20)
>> self._check_dpdk_hugepages(dpdk_runtime_dirs)
>> self._remove_dpdk_runtime_dirs(dpdk_runtime_dirs)
>>
>> @@ -168,7 +174,7 @@ def _list_remote_dirs(self, remote_path: str |
>> PurePath) -> list[str] | None:
>> Return a list of directories of the remote_dir.
>> If remote_path doesn't exist, return None.
>> """
>> - out = self.remote_session.send_command(
>> + out = self.send_command(
>> f"ls -l {remote_path} | awk '/^d/ {{print $NF}}'"
>> ).stdout
>> if "No such file or directory" in out:
>> @@ -182,9 +188,7 @@ def _get_dpdk_pids(self, dpdk_runtime_dirs:
>> Iterable[str | PurePath]) -> list[in
>> for dpdk_runtime_dir in dpdk_runtime_dirs:
>> dpdk_config_file = PurePosixPath(dpdk_runtime_dir, "config")
>> if self._remote_files_exists(dpdk_config_file):
>> - out = self.remote_session.send_command(
>> - f"lsof -Fp {dpdk_config_file}"
>> - ).stdout
>> + out = self.send_command(f"lsof -Fp
>> {dpdk_config_file}").stdout
>> if out and "No such file or directory" not in out:
>> for out_line in out.splitlines():
>> match = re.match(pid_regex, out_line)
>> @@ -193,7 +197,7 @@ def _get_dpdk_pids(self, dpdk_runtime_dirs:
>> Iterable[str | PurePath]) -> list[in
>> return pids
>>
>> def _remote_files_exists(self, remote_path: PurePath) -> bool:
>> - result = self.remote_session.send_command(f"test -e
>> {remote_path}")
>> + result = self.send_command(f"test -e {remote_path}")
>> return not result.return_code
>>
>> def _check_dpdk_hugepages(
>> @@ -202,9 +206,7 @@ def _check_dpdk_hugepages(
>> for dpdk_runtime_dir in dpdk_runtime_dirs:
>> hugepage_info = PurePosixPath(dpdk_runtime_dir,
>> "hugepage_info")
>> if self._remote_files_exists(hugepage_info):
>> - out = self.remote_session.send_command(
>> - f"lsof -Fp {hugepage_info}"
>> - ).stdout
>> + out = self.send_command(f"lsof -Fp
>> {hugepage_info}").stdout
>> if out and "No such file or directory" not in out:
>> self._logger.warning("Some DPDK processes did not
>> free hugepages.")
>>
>> self._logger.warning("*******************************************")
>> diff --git a/dts/framework/remote_session/remote/remote_session.py
>> b/dts/framework/remote_session/remote/remote_session.py
>> index 91dee3cb4f..0647d93de4 100644
>> --- a/dts/framework/remote_session/remote/remote_session.py
>> +++ b/dts/framework/remote_session/remote/remote_session.py
>> @@ -11,7 +11,6 @@
>> from framework.exception import RemoteCommandExecutionError
>> from framework.logger import DTSLOG
>> from framework.settings import SETTINGS
>> -from framework.utils import EnvVarsDict
>>
>>
>> @dataclasses.dataclass(slots=True, frozen=True)
>> @@ -89,7 +88,7 @@ def send_command(
>> command: str,
>> timeout: float = SETTINGS.timeout,
>> verify: bool = False,
>> - env: EnvVarsDict | None = None,
>> + env: dict | None = None,
>> ) -> CommandResult:
>> """
>> Send a command to the connected node using optional env vars
>> @@ -114,7 +113,7 @@ def send_command(
>>
>> @abstractmethod
>> def _send_command(
>> - self, command: str, timeout: float, env: EnvVarsDict | None
>> + self, command: str, timeout: float, env: dict | None
>> ) -> CommandResult:
>> """
>> Use the underlying protocol to execute the command using
>> optional env vars
>> @@ -141,15 +140,33 @@ def is_alive(self) -> bool:
>> """
>>
>> @abstractmethod
>> - def copy_file(
>> + def copy_from(
>> self,
>> source_file: str | PurePath,
>> destination_file: str | PurePath,
>> - source_remote: bool = False,
>> ) -> None:
>> + """Copy a file from the remote Node to the local filesystem.
>> +
>> + Copy source_file from the remote Node associated with this remote
>> + session to destination_file on the local filesystem.
>> +
>> + Args:
>> + source_file: the file on the remote Node.
>> + destination_file: a file or directory path on the local
>> filesystem.
>> """
>> - Copy source_file from local filesystem to destination_file on
>> the remote Node
>> - associated with the remote session.
>> - If source_remote is True, reverse the direction - copy
>> source_file from the
>> - associated Node to destination_file on local filesystem.
>> +
>> + @abstractmethod
>> + def copy_to(
>> + self,
>> + source_file: str | PurePath,
>> + destination_file: str | PurePath,
>> + ) -> None:
>> + """Copy a file from local filesystem to the remote Node.
>> +
>> + Copy source_file from local filesystem to destination_file
>> + on the remote Node associated with this remote session.
>> +
>> + Args:
>> + source_file: the file on the local filesystem.
>> + destination_file: a file or directory path on the remote
>> Node.
>> """
>> diff --git a/dts/framework/remote_session/remote/ssh_session.py
>> b/dts/framework/remote_session/remote/ssh_session.py
>> index 42ff9498a2..8d127f1601 100644
>> --- a/dts/framework/remote_session/remote/ssh_session.py
>> +++ b/dts/framework/remote_session/remote/ssh_session.py
>> @@ -1,29 +1,49 @@
>> # SPDX-License-Identifier: BSD-3-Clause
>> -# Copyright(c) 2010-2014 Intel Corporation
>> -# Copyright(c) 2022-2023 PANTHEON.tech s.r.o.
>> -# Copyright(c) 2022-2023 University of New Hampshire
>> +# Copyright(c) 2023 PANTHEON.tech s.r.o.
>>
>> -import time
>> +import socket
>> +import traceback
>> from pathlib import PurePath
>>
>> -import pexpect # type: ignore
>> -from pexpect import pxssh # type: ignore
>> +from fabric import Connection # type: ignore[import]
>> +from invoke.exceptions import ( # type: ignore[import]
>> + CommandTimedOut,
>> + ThreadException,
>> + UnexpectedExit,
>> +)
>> +from paramiko.ssh_exception import ( # type: ignore[import]
>> + AuthenticationException,
>> + BadHostKeyException,
>> + NoValidConnectionsError,
>> + SSHException,
>> +)
>>
>> from framework.config import NodeConfiguration
>> from framework.exception import SSHConnectionError, SSHSessionDeadError,
>> SSHTimeoutError
>> from framework.logger import DTSLOG
>> -from framework.utils import GREEN, RED, EnvVarsDict
>>
>> from .remote_session import CommandResult, RemoteSession
>>
>>
>> class SSHSession(RemoteSession):
>> - """
>> - Module for creating Pexpect SSH remote sessions.
>> + """A persistent SSH connection to a remote Node.
>> +
>> + The connection is implemented with the Fabric Python library.
>> +
>> + Args:
>> + node_config: The configuration of the Node to connect to.
>> + session_name: The name of the session.
>> + logger: The logger used for logging.
>> + This should be passed from the parent OSSession.
>> +
>> + Attributes:
>> + session: The underlying Fabric SSH connection.
>> +
>> + Raises:
>> + SSHConnectionError: The connection cannot be established.
>> """
>>
>> - session: pxssh.pxssh
>> - magic_prompt: str
>> + session: Connection
>>
>> def __init__(
>> self,
>> @@ -31,218 +51,91 @@ def __init__(
>> session_name: str,
>> logger: DTSLOG,
>> ):
>> - self.magic_prompt = "MAGIC PROMPT"
>> super(SSHSession, self).__init__(node_config, session_name,
>> logger)
>>
>> def _connect(self) -> None:
>> - """
>> - Create connection to assigned node.
>> - """
>> + errors = []
>> retry_attempts = 10
>> login_timeout = 20 if self.port else 10
>> - password_regex = (
>> - r"(?i)(?:password:)|(?:passphrase for key)|(?i)(password for
>> .+:)"
>> - )
>> - try:
>> - for retry_attempt in range(retry_attempts):
>> - self.session = pxssh.pxssh(encoding="utf-8")
>> - try:
>> - self.session.login(
>> - self.ip,
>> - self.username,
>> - self.password,
>> - original_prompt="[$#>]",
>> - port=self.port,
>> - login_timeout=login_timeout,
>> - password_regex=password_regex,
>> - )
>> - break
>> - except Exception as e:
>> - self._logger.warning(e)
>> - time.sleep(2)
>> - self._logger.info(
>> - f"Retrying connection: retry number
>> {retry_attempt + 1}."
>> - )
>> - else:
>> - raise Exception(f"Connection to {self.hostname} failed")
>> -
>> - self.send_expect("stty -echo", "#")
>> - self.send_expect("stty columns 1000", "#")
>> - self.send_expect("bind 'set enable-bracketed-paste off'",
>> "#")
>> - except Exception as e:
>> - self._logger.error(RED(str(e)))
>> - if getattr(self, "port", None):
>> - suggestion = (
>> - f"\nSuggestion: Check if the firewall on
>> {self.hostname} is "
>> - f"stopped.\n"
>> + for retry_attempt in range(retry_attempts):
>> + try:
>> + self.session = Connection(
>> + self.ip,
>> + user=self.username,
>> + port=self.port,
>> + connect_kwargs={"password": self.password},
>> + connect_timeout=login_timeout,
>> )
>> - self._logger.info(GREEN(suggestion))
>> -
>> - raise SSHConnectionError(self.hostname)
>> + self.session.open()
>>
>> - def send_expect(
>> - self, command: str, prompt: str, timeout: float = 15, verify:
>> bool = False
>> - ) -> str | int:
>> - try:
>> - ret = self.send_expect_base(command, prompt, timeout)
>> - if verify:
>> - ret_status = self.send_expect_base("echo $?", prompt,
>> timeout)
>> - try:
>> - retval = int(ret_status)
>> - if retval:
>> - self._logger.error(f"Command: {command}
>> failure!")
>> - self._logger.error(ret)
>> - return retval
>> - else:
>> - return ret
>> - except ValueError:
>> - return ret
>> - else:
>> - return ret
>> - except Exception as e:
>> - self._logger.error(
>> - f"Exception happened in [{command}] and output is "
>> - f"[{self._get_output()}]"
>> - )
>> - raise e
>> -
>> - def send_expect_base(self, command: str, prompt: str, timeout:
>> float) -> str:
>> - self._clean_session()
>> - original_prompt = self.session.PROMPT
>> - self.session.PROMPT = prompt
>> - self._send_line(command)
>> - self._prompt(command, timeout)
>> -
>> - before = self._get_output()
>> - self.session.PROMPT = original_prompt
>> - return before
>> -
>> - def _clean_session(self) -> None:
>> - self.session.PROMPT = self.magic_prompt
>> - self.get_output(timeout=0.01)
>> - self.session.PROMPT = self.session.UNIQUE_PROMPT
>> -
>> - def _send_line(self, command: str) -> None:
>> - if not self.is_alive():
>> - raise SSHSessionDeadError(self.hostname)
>> - if len(command) == 2 and command.startswith("^"):
>> - self.session.sendcontrol(command[1])
>> - else:
>> - self.session.sendline(command)
>> + except (ValueError, BadHostKeyException,
>> AuthenticationException) as e:
>> + self._logger.exception(e)
>> + raise SSHConnectionError(self.hostname) from e
>>
>> - def _prompt(self, command: str, timeout: float) -> None:
>> - if not self.session.prompt(timeout):
>> - raise SSHTimeoutError(command, self._get_output()) from None
>> + except (NoValidConnectionsError, socket.error, SSHException)
>> as e:
>> + self._logger.debug(traceback.format_exc())
>> + self._logger.warning(e)
>>
>> - def get_output(self, timeout: float = 15) -> str:
>> - """
>> - Get all output before timeout
>> - """
>> - try:
>> - self.session.prompt(timeout)
>> - except Exception:
>> - pass
>> -
>> - before = self._get_output()
>> - self._flush()
>> -
>> - return before
>> + error = repr(e)
>> + if error not in errors:
>> + errors.append(error)
>>
>> - def _get_output(self) -> str:
>> - if not self.is_alive():
>> - raise SSHSessionDeadError(self.hostname)
>> - before = self.session.before.rsplit("\r\n", 1)[0]
>> - if before == "[PEXPECT]":
>> - return ""
>> - return before
>> + self._logger.info(
>> + f"Retrying connection: retry number {retry_attempt +
>> 1}."
>> + )
>>
>> - def _flush(self) -> None:
>> - """
>> - Clear all session buffer
>> - """
>> - self.session.buffer = ""
>> - self.session.before = ""
>> + else:
>> + break
>> + else:
>> + raise SSHConnectionError(self.hostname, errors)
>>
>> def is_alive(self) -> bool:
>> - return self.session.isalive()
>> + return self.session.is_connected
>>
>> def _send_command(
>> - self, command: str, timeout: float, env: EnvVarsDict | None
>> + self, command: str, timeout: float, env: dict | None
>> ) -> CommandResult:
>> - output = self._send_command_get_output(command, timeout, env)
>> - return_code = int(self._send_command_get_output("echo $?",
>> timeout, None))
>> + """Send a command and return the result of the execution.
>>
>> - # we're capturing only stdout
>> - return CommandResult(self.name, command, output, "",
>> return_code)
>> + Args:
>> + command: The command to execute.
>> + timeout: Wait at most this many seconds for the execution to
>> complete.
>> + env: Extra environment variables that will be used in
>> command execution.
>>
>> - def _send_command_get_output(
>> - self, command: str, timeout: float, env: EnvVarsDict | None
>> - ) -> str:
>> + Raises:
>> + SSHSessionDeadError: The session died while executing the
>> command.
>> + SSHTimeoutError: The command execution timed out.
>> + """
>> try:
>> - self._clean_session()
>> - if env:
>> - command = f"{env} {command}"
>> - self._send_line(command)
>> - except Exception as e:
>> - raise e
>> + output = self.session.run(
>> + command, env=env, warn=True, hide=True, timeout=timeout
>> + )
>>
>> - output = self.get_output(timeout=timeout)
>> - self.session.PROMPT = self.session.UNIQUE_PROMPT
>> - self.session.prompt(0.1)
>> + except (UnexpectedExit, ThreadException) as e:
>> + self._logger.exception(e)
>> + raise SSHSessionDeadError(self.hostname) from e
>>
>> - return output
>> + except CommandTimedOut as e:
>> + self._logger.exception(e)
>> + raise SSHTimeoutError(command, e.result.stderr) from e
>>
>> - def _close(self, force: bool = False) -> None:
>> - if force is True:
>> - self.session.close()
>> - else:
>> - if self.is_alive():
>> - self.session.logout()
>> + return CommandResult(
>> + self.name, command, output.stdout, output.stderr,
>> output.return_code
>> + )
>>
>> - def copy_file(
>> + def copy_from(
>> self,
>> source_file: str | PurePath,
>> destination_file: str | PurePath,
>> - source_remote: bool = False,
>> ) -> None:
>> - """
>> - Send a local file to a remote host.
>> - """
>> - if source_remote:
>> - source_file = f"{self.username}@{self.ip}:{source_file}"
>> - else:
>> - destination_file = f"{self.username}@
>> {self.ip}:{destination_file}"
>> + self.session.get(str(destination_file), str(source_file))
>>
>> - port = ""
>> - if self.port:
>> - port = f" -P {self.port}"
>> -
>> - command = (
>> - f"scp -v{port} -o NoHostAuthenticationForLocalhost=yes"
>> - f" {source_file} {destination_file}"
>> - )
>> -
>> - self._spawn_scp(command)
>> + def copy_to(
>> + self,
>> + source_file: str | PurePath,
>> + destination_file: str | PurePath,
>> + ) -> None:
>> + self.session.put(str(source_file), str(destination_file))
>>
>> - def _spawn_scp(self, scp_cmd: str) -> None:
>> - """
>> - Transfer a file with SCP
>> - """
>> - self._logger.info(scp_cmd)
>> - p: pexpect.spawn = pexpect.spawn(scp_cmd)
>> - time.sleep(0.5)
>> - ssh_newkey: str = "Are you sure you want to continue connecting"
>> - i: int = p.expect(
>> - [ssh_newkey, "[pP]assword", "# ", pexpect.EOF,
>> pexpect.TIMEOUT], 120
>> - )
>> - if i == 0: # add once in trust list
>> - p.sendline("yes")
>> - i = p.expect([ssh_newkey, "[pP]assword", pexpect.EOF], 2)
>> -
>> - if i == 1:
>> - time.sleep(0.5)
>> - p.sendline(self.password)
>> - p.expect("Exit status 0", 60)
>> - if i == 4:
>> - self._logger.error("SCP TIMEOUT error %d" % i)
>> - p.close()
>> + def _close(self, force: bool = False) -> None:
>> + self.session.close()
>> diff --git a/dts/framework/testbed_model/sut_node.py
>> b/dts/framework/testbed_model/sut_node.py
>> index 2b2b50d982..9dbc390848 100644
>> --- a/dts/framework/testbed_model/sut_node.py
>> +++ b/dts/framework/testbed_model/sut_node.py
>> @@ -10,7 +10,7 @@
>> from framework.config import BuildTargetConfiguration, NodeConfiguration
>> from framework.remote_session import CommandResult, OSSession
>> from framework.settings import SETTINGS
>> -from framework.utils import EnvVarsDict, MesonArgs
>> +from framework.utils import MesonArgs
>>
>> from .hw import LogicalCoreCount, LogicalCoreList, VirtualDevice
>> from .node import Node
>> @@ -27,7 +27,7 @@ class SutNode(Node):
>> _dpdk_prefix_list: list[str]
>> _dpdk_timestamp: str
>> _build_target_config: BuildTargetConfiguration | None
>> - _env_vars: EnvVarsDict
>> + _env_vars: dict
>> _remote_tmp_dir: PurePath
>> __remote_dpdk_dir: PurePath | None
>> _dpdk_version: str | None
>> @@ -38,7 +38,7 @@ def __init__(self, node_config: NodeConfiguration):
>> super(SutNode, self).__init__(node_config)
>> self._dpdk_prefix_list = []
>> self._build_target_config = None
>> - self._env_vars = EnvVarsDict()
>> + self._env_vars = {}
>> self._remote_tmp_dir = self.main_session.get_remote_tmp_dir()
>> self.__remote_dpdk_dir = None
>> self._dpdk_version = None
>> @@ -94,7 +94,7 @@ def _configure_build_target(
>> """
>> Populate common environment variables and set build target
>> config.
>> """
>> - self._env_vars = EnvVarsDict()
>> + self._env_vars = {}
>> self._build_target_config = build_target_config
>> self._env_vars.update(
>>
>> self.main_session.get_dpdk_build_env_vars(build_target_config.arch)
>> @@ -112,7 +112,7 @@ def _copy_dpdk_tarball(self) -> None:
>> Copy to and extract DPDK tarball on the SUT node.
>> """
>> self._logger.info("Copying DPDK tarball to SUT.")
>> - self.main_session.copy_file(SETTINGS.dpdk_tarball_path,
>> self._remote_tmp_dir)
>> + self.main_session.copy_to(SETTINGS.dpdk_tarball_path,
>> self._remote_tmp_dir)
>>
>> # construct remote tarball path
>> # the basename is the same on local host and on remote Node
>> @@ -259,7 +259,7 @@ def run_dpdk_app(
>> Run DPDK application on the remote node.
>> """
>> return self.main_session.send_command(
>> - f"{app_path} {eal_args}", timeout, verify=True
>> + f"{app_path} {eal_args}", timeout, privileged=True,
>> verify=True
>> )
>>
>>
>> diff --git a/dts/framework/utils.py b/dts/framework/utils.py
>> index 55e0b0ef0e..8cfbc6a29d 100644
>> --- a/dts/framework/utils.py
>> +++ b/dts/framework/utils.py
>> @@ -42,19 +42,10 @@ def expand_range(range_str: str) -> list[int]:
>> return expanded_range
>>
>>
>> -def GREEN(text: str) -> str:
>> - return f"\u001B[32;1m{str(text)}\u001B[0m"
>> -
>> -
>> def RED(text: str) -> str:
>> return f"\u001B[31;1m{str(text)}\u001B[0m"
>>
>>
>> -class EnvVarsDict(dict):
>> - def __str__(self) -> str:
>> - return " ".join(["=".join(item) for item in self.items()])
>> -
>> -
>> class MesonArgs(object):
>> """
>> Aggregate the arguments needed to build DPDK:
>> diff --git a/dts/poetry.lock b/dts/poetry.lock
>> index 0b2a007d4d..2438f337cd 100644
>> --- a/dts/poetry.lock
>> +++ b/dts/poetry.lock
>> @@ -12,6 +12,18 @@ docs = ["furo", "sphinx", "zope.interface",
>> "sphinx-notfound-page"]
>> tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest
>> (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins",
>> "zope.interface", "cloudpickle"]
>> tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler",
>> "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins",
>> "cloudpickle"]
>>
>> +[[package]]
>> +name = "bcrypt"
>> +version = "4.0.1"
>> +description = "Modern password hashing for your software and your
>> servers"
>> +category = "main"
>> +optional = false
>> +python-versions = ">=3.6"
>> +
>> +[package.extras]
>> +tests = ["pytest (>=3.2.1,!=3.3.0)"]
>> +typecheck = ["mypy"]
>> +
>> [[package]]
>> name = "black"
>> version = "22.10.0"
>> @@ -33,6 +45,17 @@ d = ["aiohttp (>=3.7.4)"]
>> jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
>> uvloop = ["uvloop (>=0.15.2)"]
>>
>> +[[package]]
>> +name = "cffi"
>> +version = "1.15.1"
>> +description = "Foreign Function Interface for Python calling C code."
>> +category = "main"
>> +optional = false
>> +python-versions = "*"
>> +
>> +[package.dependencies]
>> +pycparser = "*"
>> +
>> [[package]]
>> name = "click"
>> version = "8.1.3"
>> @@ -52,6 +75,52 @@ category = "dev"
>> optional = false
>> python-versions =
>> "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
>>
>> +[[package]]
>> +name = "cryptography"
>> +version = "40.0.2"
>> +description = "cryptography is a package which provides cryptographic
>> recipes and primitives to Python developers."
>> +category = "main"
>> +optional = false
>> +python-versions = ">=3.6"
>> +
>> +[package.dependencies]
>> +cffi = ">=1.12"
>> +
>> +[package.extras]
>> +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
>> +docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)",
>> "sphinxcontrib-spelling (>=4.0.1)"]
>> +pep8test = ["black", "ruff", "mypy", "check-manifest"]
>> +sdist = ["setuptools-rust (>=0.11.4)"]
>> +ssh = ["bcrypt (>=3.1.5)"]
>> +test = ["pytest (>=6.2.0)", "pytest-shard (>=0.1.2)",
>> "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist",
>> "pretend", "iso8601"]
>> +test-randomorder = ["pytest-randomly"]
>> +tox = ["tox"]
>> +
>> +[[package]]
>> +name = "fabric"
>> +version = "2.7.1"
>> +description = "High level SSH command execution"
>> +category = "main"
>> +optional = false
>> +python-versions = "*"
>> +
>> +[package.dependencies]
>> +invoke = ">=1.3,<2.0"
>> +paramiko = ">=2.4"
>> +pathlib2 = "*"
>> +
>> +[package.extras]
>> +pytest = ["mock (>=2.0.0,<3.0)", "pytest (>=3.2.5,<4.0)"]
>> +testing = ["mock (>=2.0.0,<3.0)"]
>> +
>> +[[package]]
>> +name = "invoke"
>> +version = "1.7.3"
>> +description = "Pythonic task execution"
>> +category = "main"
>> +optional = false
>> +python-versions = "*"
>> +
>> [[package]]
>> name = "isort"
>> version = "5.10.1"
>> @@ -136,23 +205,41 @@ optional = false
>> python-versions = "*"
>>
>> [[package]]
>> -name = "pathspec"
>> -version = "0.10.1"
>> -description = "Utility library for gitignore style pattern matching of
>> file paths."
>> -category = "dev"
>> +name = "paramiko"
>> +version = "3.1.0"
>> +description = "SSH2 protocol library"
>> +category = "main"
>> optional = false
>> -python-versions = ">=3.7"
>> +python-versions = ">=3.6"
>> +
>> +[package.dependencies]
>> +bcrypt = ">=3.2"
>> +cryptography = ">=3.3"
>> +pynacl = ">=1.5"
>> +
>> +[package.extras]
>> +all = ["pyasn1 (>=0.1.7)", "invoke (>=2.0)", "gssapi (>=1.4.1)",
>> "pywin32 (>=2.1.8)"]
>> +gssapi = ["pyasn1 (>=0.1.7)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"]
>> +invoke = ["invoke (>=2.0)"]
>>
>> [[package]]
>> -name = "pexpect"
>> -version = "4.8.0"
>> -description = "Pexpect allows easy control of interactive console
>> applications."
>> +name = "pathlib2"
>> +version = "2.3.7.post1"
>> +description = "Object-oriented filesystem paths"
>> category = "main"
>> optional = false
>> python-versions = "*"
>>
>> [package.dependencies]
>> -ptyprocess = ">=0.5"
>> +six = "*"
>> +
>> +[[package]]
>> +name = "pathspec"
>> +version = "0.10.1"
>> +description = "Utility library for gitignore style pattern matching of
>> file paths."
>> +category = "dev"
>> +optional = false
>> +python-versions = ">=3.7"
>>
>> [[package]]
>> name = "platformdirs"
>> @@ -166,14 +253,6 @@ python-versions = ">=3.7"
>> docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)",
>> "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
>> test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock
>> (>=3.6)", "pytest (>=6)"]
>>
>> -[[package]]
>> -name = "ptyprocess"
>> -version = "0.7.0"
>> -description = "Run a subprocess in a pseudo terminal"
>> -category = "main"
>> -optional = false
>> -python-versions = "*"
>> -
>> [[package]]
>> name = "pycodestyle"
>> version = "2.9.1"
>> @@ -182,6 +261,14 @@ category = "dev"
>> optional = false
>> python-versions = ">=3.6"
>>
>> +[[package]]
>> +name = "pycparser"
>> +version = "2.21"
>> +description = "C parser in Python"
>> +category = "main"
>> +optional = false
>> +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
>> +
>> [[package]]
>> name = "pydocstyle"
>> version = "6.1.1"
>> @@ -228,6 +315,21 @@ tests = ["pytest (>=7.1.2)", "pytest-mypy",
>> "eradicate (>=2.0.0)", "radon (>=5.1
>> toml = ["toml (>=0.10.2)"]
>> vulture = ["vulture"]
>>
>> +[[package]]
>> +name = "pynacl"
>> +version = "1.5.0"
>> +description = "Python binding to the Networking and Cryptography (NaCl)
>> library"
>> +category = "main"
>> +optional = false
>> +python-versions = ">=3.6"
>> +
>> +[package.dependencies]
>> +cffi = ">=1.4.1"
>> +
>> +[package.extras]
>> +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
>> +tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"]
>> +
>> [[package]]
>> name = "pyrsistent"
>> version = "0.19.1"
>> @@ -244,6 +346,14 @@ category = "main"
>> optional = false
>> python-versions = ">=3.6"
>>
>> +[[package]]
>> +name = "six"
>> +version = "1.16.0"
>> +description = "Python 2 and 3 compatibility utilities"
>> +category = "main"
>> +optional = false
>> +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
>> +
>> [[package]]
>> name = "snowballstemmer"
>> version = "2.2.0"
>> @@ -299,13 +409,18 @@ jsonschema = ">=4,<5"
>> [metadata]
>> lock-version = "1.1"
>> python-versions = "^3.10"
>> -content-hash =
>> "a0f040b07fc6ce4deb0be078b9a88c2a465cb6bccb9e260a67e92c2403e2319f"
>> +content-hash =
>> "719c43bcaa5d181921debda884f8f714063df0b2336d61e9f64ecab034e8b139"
>>
>> [metadata.files]
>> attrs = []
>> +bcrypt = []
>> black = []
>> +cffi = []
>> click = []
>> colorama = []
>> +cryptography = []
>> +fabric = []
>> +invoke = []
>> isort = []
>> jsonpatch = []
>> jsonpointer = []
>> @@ -313,22 +428,22 @@ jsonschema = []
>> mccabe = []
>> mypy = []
>> mypy-extensions = []
>> +paramiko = []
>> +pathlib2 = []
>> pathspec = []
>> -pexpect = [
>> - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash =
>> "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"},
>> - {file = "pexpect-4.8.0.tar.gz", hash =
>> "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"},
>> -]
>> platformdirs = [
>> {file = "platformdirs-2.5.2-py3-none-any.whl", hash =
>> "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
>> {file = "platformdirs-2.5.2.tar.gz", hash =
>> "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
>> ]
>> -ptyprocess = []
>> pycodestyle = []
>> +pycparser = []
>> pydocstyle = []
>> pyflakes = []
>> pylama = []
>> +pynacl = []
>> pyrsistent = []
>> pyyaml = []
>> +six = []
>> snowballstemmer = []
>> toml = []
>> tomli = []
>> diff --git a/dts/pyproject.toml b/dts/pyproject.toml
>> index a136c91e5e..50bcdb327a 100644
>> --- a/dts/pyproject.toml
>> +++ b/dts/pyproject.toml
>> @@ -9,10 +9,10 @@ authors = ["Owen Hilyard <ohilyard@iol.unh.edu>", "
>> dts@dpdk.org"]
>>
>> [tool.poetry.dependencies]
>> python = "^3.10"
>> -pexpect = "^4.8.0"
>> warlock = "^2.0.1"
>> PyYAML = "^6.0"
>> types-PyYAML = "^6.0.8"
>> +fabric = "^2.7.1"
>>
>> [tool.poetry.dev-dependencies]
>> mypy = "^0.961"
>> --
>> 2.34.1
>>
>>
[-- Attachment #2: Type: text/html, Size: 60637 bytes --]
^ permalink raw reply [flat|nested] 21+ messages in thread
* Re: [PATCH v3] dts: replace pexpect with fabric
2023-06-09 9:46 ` [PATCH v3] " Juraj Linkeš
2023-06-21 18:33 ` Jeremy Spewock
@ 2023-07-09 1:45 ` Patrick Robb
1 sibling, 0 replies; 21+ messages in thread
From: Patrick Robb @ 2023-07-09 1:45 UTC (permalink / raw)
To: Juraj Linkeš
Cc: thomas, Honnappa.Nagarahalli, lijuan.tu, wathsala.vithanage,
jspewock, dev
[-- Attachment #1: Type: text/plain, Size: 48815 bytes --]
Tested-by: Patrick Robb <probb@iol.unh.edu>
On Fri, Jun 9, 2023 at 5:46 AM Juraj Linkeš <juraj.linkes@pantheon.tech>
wrote:
> Pexpect is not a dedicated SSH connection library while Fabric is. With
> Fabric, all SSH-related logic is provided and we can just focus on
> what's DTS specific.
>
> Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
> ---
>
> Notes:
> v3: updated passwordless sudo setup on Linux
>
> doc/guides/tools/dts.rst | 29 +-
> dts/conf.yaml | 2 +-
> dts/framework/exception.py | 10 +-
> dts/framework/remote_session/linux_session.py | 31 +-
> dts/framework/remote_session/os_session.py | 51 +++-
> dts/framework/remote_session/posix_session.py | 48 +--
> .../remote_session/remote/remote_session.py | 35 ++-
> .../remote_session/remote/ssh_session.py | 287 ++++++------------
> dts/framework/testbed_model/sut_node.py | 12 +-
> dts/framework/utils.py | 9 -
> dts/poetry.lock | 161 ++++++++--
> dts/pyproject.toml | 2 +-
> 12 files changed, 376 insertions(+), 301 deletions(-)
>
> diff --git a/doc/guides/tools/dts.rst b/doc/guides/tools/dts.rst
> index ebd6dceb6a..c7b31623e4 100644
> --- a/doc/guides/tools/dts.rst
> +++ b/doc/guides/tools/dts.rst
> @@ -95,9 +95,14 @@ Setting up DTS environment
>
> #. **SSH Connection**
>
> - DTS uses Python pexpect for SSH connections between DTS environment
> and the other hosts.
> - The pexpect implementation is a wrapper around the ssh command in the
> DTS environment.
> - This means it'll use the SSH agent providing the ssh command and its
> keys.
> + DTS uses the Fabric Python library for SSH connections between DTS
> environment
> + and the other hosts.
> + The authentication method used is pubkey authentication.
> + Fabric tries to use a passed key/certificate,
> + then any key it can with through an SSH agent,
> + then any "id_rsa", "id_dsa" or "id_ecdsa" key discoverable in
> ``~/.ssh/``
> + (with any matching OpenSSH-style certificates).
> + DTS doesn't pass any keys, so Fabric tries to use the other two
> methods.
>
>
> Setting up System Under Test
> @@ -132,6 +137,21 @@ There are two areas that need to be set up on a
> System Under Test:
> It's possible to use the hugepage configuration already present on
> the SUT.
> If you wish to do so, don't specify the hugepage configuration in
> the DTS config file.
>
> +#. **User with administrator privileges**
> +
> +.. _sut_admin_user:
> +
> + DTS needs administrator privileges to run DPDK applications (such as
> testpmd) on the SUT.
> + The SUT user must be able run commands in privileged mode without
> asking for password.
> + On most Linux distributions, it's a matter of setting up passwordless
> sudo:
> +
> + #. Run ``sudo visudo`` and check that it contains ``%sudo
> ALL=(ALL:ALL) NOPASSWD:ALL``.
> +
> + #. Add the SUT user to the sudo group with:
> +
> + .. code-block:: console
> +
> + sudo usermod -aG sudo <sut_user>
>
> Running DTS
> -----------
> @@ -151,7 +171,8 @@ which is a template that illustrates what can be
> configured in DTS:
> :start-at: executions:
>
>
> -The user must be root or any other user with prompt starting with ``#``.
> +The user must have :ref:`administrator privileges <sut_admin_user>`
> +which don't require password authentication.
> The other fields are mostly self-explanatory
> and documented in more detail in
> ``dts/framework/config/conf_yaml_schema.json``.
>
> diff --git a/dts/conf.yaml b/dts/conf.yaml
> index a9bd8a3ecf..129801d87c 100644
> --- a/dts/conf.yaml
> +++ b/dts/conf.yaml
> @@ -16,7 +16,7 @@ executions:
> nodes:
> - name: "SUT 1"
> hostname: sut1.change.me.localhost
> - user: root
> + user: dtsuser
> arch: x86_64
> os: linux
> lcores: ""
> diff --git a/dts/framework/exception.py b/dts/framework/exception.py
> index ca353d98fc..44ff4e979a 100644
> --- a/dts/framework/exception.py
> +++ b/dts/framework/exception.py
> @@ -62,13 +62,19 @@ class SSHConnectionError(DTSError):
> """
>
> host: str
> + errors: list[str]
> severity: ClassVar[ErrorSeverity] = ErrorSeverity.SSH_ERR
>
> - def __init__(self, host: str):
> + def __init__(self, host: str, errors: list[str] | None = None):
> self.host = host
> + self.errors = [] if errors is None else errors
>
> def __str__(self) -> str:
> - return f"Error trying to connect with {self.host}"
> + message = f"Error trying to connect with {self.host}."
> + if self.errors:
> + message += f" Errors encountered while retrying: {',
> '.join(self.errors)}"
> +
> + return message
>
>
> class SSHSessionDeadError(DTSError):
> diff --git a/dts/framework/remote_session/linux_session.py
> b/dts/framework/remote_session/linux_session.py
> index a1e3bc3a92..f13f399121 100644
> --- a/dts/framework/remote_session/linux_session.py
> +++ b/dts/framework/remote_session/linux_session.py
> @@ -14,10 +14,11 @@ class LinuxSession(PosixSession):
> The implementation of non-Posix compliant parts of Linux remote
> sessions.
> """
>
> + def _get_privileged_command(self, command: str) -> str:
> + return f"sudo -- sh -c '{command}'"
> +
> def get_remote_cpus(self, use_first_core: bool) -> list[LogicalCore]:
> - cpu_info = self.remote_session.send_command(
> - "lscpu -p=CPU,CORE,SOCKET,NODE|grep -v \\#"
> - ).stdout
> + cpu_info = self.send_command("lscpu -p=CPU,CORE,SOCKET,NODE|grep
> -v \\#").stdout
> lcores = []
> for cpu_line in cpu_info.splitlines():
> lcore, core, socket, node = map(int, cpu_line.split(","))
> @@ -45,20 +46,20 @@ def setup_hugepages(self, hugepage_amount: int,
> force_first_numa: bool) -> None:
> self._mount_huge_pages()
>
> def _get_hugepage_size(self) -> int:
> - hugepage_size = self.remote_session.send_command(
> + hugepage_size = self.send_command(
> "awk '/Hugepagesize/ {print $2}' /proc/meminfo"
> ).stdout
> return int(hugepage_size)
>
> def _get_hugepages_total(self) -> int:
> - hugepages_total = self.remote_session.send_command(
> + hugepages_total = self.send_command(
> "awk '/HugePages_Total/ { print $2 }' /proc/meminfo"
> ).stdout
> return int(hugepages_total)
>
> def _get_numa_nodes(self) -> list[int]:
> try:
> - numa_count = self.remote_session.send_command(
> + numa_count = self.send_command(
> "cat /sys/devices/system/node/online", verify=True
> ).stdout
> numa_range = expand_range(numa_count)
> @@ -70,14 +71,12 @@ def _get_numa_nodes(self) -> list[int]:
> def _mount_huge_pages(self) -> None:
> self._logger.info("Re-mounting Hugepages.")
> hugapge_fs_cmd = "awk '/hugetlbfs/ { print $2 }' /proc/mounts"
> - self.remote_session.send_command(f"umount $({hugapge_fs_cmd})")
> - result = self.remote_session.send_command(hugapge_fs_cmd)
> + self.send_command(f"umount $({hugapge_fs_cmd})")
> + result = self.send_command(hugapge_fs_cmd)
> if result.stdout == "":
> remote_mount_path = "/mnt/huge"
> - self.remote_session.send_command(f"mkdir -p
> {remote_mount_path}")
> - self.remote_session.send_command(
> - f"mount -t hugetlbfs nodev {remote_mount_path}"
> - )
> + self.send_command(f"mkdir -p {remote_mount_path}")
> + self.send_command(f"mount -t hugetlbfs nodev
> {remote_mount_path}")
>
> def _supports_numa(self) -> bool:
> # the system supports numa if self._numa_nodes is non-empty and
> there are more
> @@ -94,14 +93,12 @@ def _configure_huge_pages(
> )
> if force_first_numa and self._supports_numa():
> # clear non-numa hugepages
> - self.remote_session.send_command(
> - f"echo 0 | sudo tee {hugepage_config_path}"
> - )
> + self.send_command(f"echo 0 | tee {hugepage_config_path}",
> privileged=True)
> hugepage_config_path = (
>
> f"/sys/devices/system/node/node{self._numa_nodes[0]}/hugepages"
> f"/hugepages-{size}kB/nr_hugepages"
> )
>
> - self.remote_session.send_command(
> - f"echo {amount} | sudo tee {hugepage_config_path}"
> + self.send_command(
> + f"echo {amount} | tee {hugepage_config_path}", privileged=True
> )
> diff --git a/dts/framework/remote_session/os_session.py
> b/dts/framework/remote_session/os_session.py
> index 4c48ae2567..bfd70bd480 100644
> --- a/dts/framework/remote_session/os_session.py
> +++ b/dts/framework/remote_session/os_session.py
> @@ -10,7 +10,7 @@
> from framework.logger import DTSLOG
> from framework.settings import SETTINGS
> from framework.testbed_model import LogicalCore
> -from framework.utils import EnvVarsDict, MesonArgs
> +from framework.utils import MesonArgs
>
> from .remote import CommandResult, RemoteSession, create_remote_session
>
> @@ -53,17 +53,32 @@ def is_alive(self) -> bool:
> def send_command(
> self,
> command: str,
> - timeout: float,
> + timeout: float = SETTINGS.timeout,
> + privileged: bool = False,
> verify: bool = False,
> - env: EnvVarsDict | None = None,
> + env: dict | None = None,
> ) -> CommandResult:
> """
> An all-purpose API in case the command to be executed is already
> OS-agnostic, such as when the path to the executed command has
> been
> constructed beforehand.
> """
> + if privileged:
> + command = self._get_privileged_command(command)
> +
> return self.remote_session.send_command(command, timeout, verify,
> env)
>
> + @abstractmethod
> + def _get_privileged_command(self, command: str) -> str:
> + """Modify the command so that it executes with administrative
> privileges.
> +
> + Args:
> + command: The command to modify.
> +
> + Returns:
> + The modified command that executes with administrative
> privileges.
> + """
> +
> @abstractmethod
> def guess_dpdk_remote_dir(self, remote_dir) -> PurePath:
> """
> @@ -90,17 +105,35 @@ def join_remote_path(self, *args: str | PurePath) ->
> PurePath:
> """
>
> @abstractmethod
> - def copy_file(
> + def copy_from(
> self,
> source_file: str | PurePath,
> destination_file: str | PurePath,
> - source_remote: bool = False,
> ) -> None:
> + """Copy a file from the remote Node to the local filesystem.
> +
> + Copy source_file from the remote Node associated with this remote
> + session to destination_file on the local filesystem.
> +
> + Args:
> + source_file: the file on the remote Node.
> + destination_file: a file or directory path on the local
> filesystem.
> """
> +
> + @abstractmethod
> + def copy_to(
> + self,
> + source_file: str | PurePath,
> + destination_file: str | PurePath,
> + ) -> None:
> + """Copy a file from local filesystem to the remote Node.
> +
> Copy source_file from local filesystem to destination_file
> - on the remote Node associated with the remote session.
> - If source_remote is True, reverse the direction - copy
> source_file from the
> - associated remote Node to destination_file on local storage.
> + on the remote Node associated with this remote session.
> +
> + Args:
> + source_file: the file on the local filesystem.
> + destination_file: a file or directory path on the remote Node.
> """
>
> @abstractmethod
> @@ -128,7 +161,7 @@ def extract_remote_tarball(
> @abstractmethod
> def build_dpdk(
> self,
> - env_vars: EnvVarsDict,
> + env_vars: dict,
> meson_args: MesonArgs,
> remote_dpdk_dir: str | PurePath,
> remote_dpdk_build_dir: str | PurePath,
> diff --git a/dts/framework/remote_session/posix_session.py
> b/dts/framework/remote_session/posix_session.py
> index d38062e8d6..8ca0acb429 100644
> --- a/dts/framework/remote_session/posix_session.py
> +++ b/dts/framework/remote_session/posix_session.py
> @@ -9,7 +9,7 @@
> from framework.config import Architecture
> from framework.exception import DPDKBuildError,
> RemoteCommandExecutionError
> from framework.settings import SETTINGS
> -from framework.utils import EnvVarsDict, MesonArgs
> +from framework.utils import MesonArgs
>
> from .os_session import OSSession
>
> @@ -34,7 +34,7 @@ def combine_short_options(**opts: bool) -> str:
>
> def guess_dpdk_remote_dir(self, remote_dir) -> PurePosixPath:
> remote_guess = self.join_remote_path(remote_dir, "dpdk-*")
> - result = self.remote_session.send_command(f"ls -d {remote_guess}
> | tail -1")
> + result = self.send_command(f"ls -d {remote_guess} | tail -1")
> return PurePosixPath(result.stdout)
>
> def get_remote_tmp_dir(self) -> PurePosixPath:
> @@ -48,7 +48,7 @@ def get_dpdk_build_env_vars(self, arch: Architecture) ->
> dict:
> env_vars = {}
> if arch == Architecture.i686:
> # find the pkg-config path and store it in PKG_CONFIG_LIBDIR
> - out = self.remote_session.send_command("find /usr -type d
> -name pkgconfig")
> + out = self.send_command("find /usr -type d -name pkgconfig")
> pkg_path = ""
> res_path = out.stdout.split("\r\n")
> for cur_path in res_path:
> @@ -65,13 +65,19 @@ def get_dpdk_build_env_vars(self, arch: Architecture)
> -> dict:
> def join_remote_path(self, *args: str | PurePath) -> PurePosixPath:
> return PurePosixPath(*args)
>
> - def copy_file(
> + def copy_from(
> self,
> source_file: str | PurePath,
> destination_file: str | PurePath,
> - source_remote: bool = False,
> ) -> None:
> - self.remote_session.copy_file(source_file, destination_file,
> source_remote)
> + self.remote_session.copy_from(source_file, destination_file)
> +
> + def copy_to(
> + self,
> + source_file: str | PurePath,
> + destination_file: str | PurePath,
> + ) -> None:
> + self.remote_session.copy_to(source_file, destination_file)
>
> def remove_remote_dir(
> self,
> @@ -80,24 +86,24 @@ def remove_remote_dir(
> force: bool = True,
> ) -> None:
> opts = PosixSession.combine_short_options(r=recursive, f=force)
> - self.remote_session.send_command(f"rm{opts} {remote_dir_path}")
> + self.send_command(f"rm{opts} {remote_dir_path}")
>
> def extract_remote_tarball(
> self,
> remote_tarball_path: str | PurePath,
> expected_dir: str | PurePath | None = None,
> ) -> None:
> - self.remote_session.send_command(
> + self.send_command(
> f"tar xfm {remote_tarball_path} "
> f"-C {PurePosixPath(remote_tarball_path).parent}",
> 60,
> )
> if expected_dir:
> - self.remote_session.send_command(f"ls {expected_dir}",
> verify=True)
> + self.send_command(f"ls {expected_dir}", verify=True)
>
> def build_dpdk(
> self,
> - env_vars: EnvVarsDict,
> + env_vars: dict,
> meson_args: MesonArgs,
> remote_dpdk_dir: str | PurePath,
> remote_dpdk_build_dir: str | PurePath,
> @@ -108,7 +114,7 @@ def build_dpdk(
> if rebuild:
> # reconfigure, then build
> self._logger.info("Reconfiguring DPDK build.")
> - self.remote_session.send_command(
> + self.send_command(
> f"meson configure {meson_args}
> {remote_dpdk_build_dir}",
> timeout,
> verify=True,
> @@ -118,7 +124,7 @@ def build_dpdk(
> # fresh build - remove target dir first, then build from
> scratch
> self._logger.info("Configuring DPDK build from scratch.")
> self.remove_remote_dir(remote_dpdk_build_dir)
> - self.remote_session.send_command(
> + self.send_command(
> f"meson setup "
> f"{meson_args} {remote_dpdk_dir}
> {remote_dpdk_build_dir}",
> timeout,
> @@ -127,14 +133,14 @@ def build_dpdk(
> )
>
> self._logger.info("Building DPDK.")
> - self.remote_session.send_command(
> + self.send_command(
> f"ninja -C {remote_dpdk_build_dir}", timeout,
> verify=True, env=env_vars
> )
> except RemoteCommandExecutionError as e:
> raise DPDKBuildError(f"DPDK build failed when doing
> '{e.command}'.")
>
> def get_dpdk_version(self, build_dir: str | PurePath) -> str:
> - out = self.remote_session.send_command(
> + out = self.send_command(
> f"cat {self.join_remote_path(build_dir, 'VERSION')}",
> verify=True
> )
> return out.stdout
> @@ -146,7 +152,7 @@ def kill_cleanup_dpdk_apps(self, dpdk_prefix_list:
> Iterable[str]) -> None:
> # kill and cleanup only if DPDK is running
> dpdk_pids = self._get_dpdk_pids(dpdk_runtime_dirs)
> for dpdk_pid in dpdk_pids:
> - self.remote_session.send_command(f"kill -9 {dpdk_pid}",
> 20)
> + self.send_command(f"kill -9 {dpdk_pid}", 20)
> self._check_dpdk_hugepages(dpdk_runtime_dirs)
> self._remove_dpdk_runtime_dirs(dpdk_runtime_dirs)
>
> @@ -168,7 +174,7 @@ def _list_remote_dirs(self, remote_path: str |
> PurePath) -> list[str] | None:
> Return a list of directories of the remote_dir.
> If remote_path doesn't exist, return None.
> """
> - out = self.remote_session.send_command(
> + out = self.send_command(
> f"ls -l {remote_path} | awk '/^d/ {{print $NF}}'"
> ).stdout
> if "No such file or directory" in out:
> @@ -182,9 +188,7 @@ def _get_dpdk_pids(self, dpdk_runtime_dirs:
> Iterable[str | PurePath]) -> list[in
> for dpdk_runtime_dir in dpdk_runtime_dirs:
> dpdk_config_file = PurePosixPath(dpdk_runtime_dir, "config")
> if self._remote_files_exists(dpdk_config_file):
> - out = self.remote_session.send_command(
> - f"lsof -Fp {dpdk_config_file}"
> - ).stdout
> + out = self.send_command(f"lsof -Fp
> {dpdk_config_file}").stdout
> if out and "No such file or directory" not in out:
> for out_line in out.splitlines():
> match = re.match(pid_regex, out_line)
> @@ -193,7 +197,7 @@ def _get_dpdk_pids(self, dpdk_runtime_dirs:
> Iterable[str | PurePath]) -> list[in
> return pids
>
> def _remote_files_exists(self, remote_path: PurePath) -> bool:
> - result = self.remote_session.send_command(f"test -e
> {remote_path}")
> + result = self.send_command(f"test -e {remote_path}")
> return not result.return_code
>
> def _check_dpdk_hugepages(
> @@ -202,9 +206,7 @@ def _check_dpdk_hugepages(
> for dpdk_runtime_dir in dpdk_runtime_dirs:
> hugepage_info = PurePosixPath(dpdk_runtime_dir,
> "hugepage_info")
> if self._remote_files_exists(hugepage_info):
> - out = self.remote_session.send_command(
> - f"lsof -Fp {hugepage_info}"
> - ).stdout
> + out = self.send_command(f"lsof -Fp
> {hugepage_info}").stdout
> if out and "No such file or directory" not in out:
> self._logger.warning("Some DPDK processes did not
> free hugepages.")
>
> self._logger.warning("*******************************************")
> diff --git a/dts/framework/remote_session/remote/remote_session.py
> b/dts/framework/remote_session/remote/remote_session.py
> index 91dee3cb4f..0647d93de4 100644
> --- a/dts/framework/remote_session/remote/remote_session.py
> +++ b/dts/framework/remote_session/remote/remote_session.py
> @@ -11,7 +11,6 @@
> from framework.exception import RemoteCommandExecutionError
> from framework.logger import DTSLOG
> from framework.settings import SETTINGS
> -from framework.utils import EnvVarsDict
>
>
> @dataclasses.dataclass(slots=True, frozen=True)
> @@ -89,7 +88,7 @@ def send_command(
> command: str,
> timeout: float = SETTINGS.timeout,
> verify: bool = False,
> - env: EnvVarsDict | None = None,
> + env: dict | None = None,
> ) -> CommandResult:
> """
> Send a command to the connected node using optional env vars
> @@ -114,7 +113,7 @@ def send_command(
>
> @abstractmethod
> def _send_command(
> - self, command: str, timeout: float, env: EnvVarsDict | None
> + self, command: str, timeout: float, env: dict | None
> ) -> CommandResult:
> """
> Use the underlying protocol to execute the command using optional
> env vars
> @@ -141,15 +140,33 @@ def is_alive(self) -> bool:
> """
>
> @abstractmethod
> - def copy_file(
> + def copy_from(
> self,
> source_file: str | PurePath,
> destination_file: str | PurePath,
> - source_remote: bool = False,
> ) -> None:
> + """Copy a file from the remote Node to the local filesystem.
> +
> + Copy source_file from the remote Node associated with this remote
> + session to destination_file on the local filesystem.
> +
> + Args:
> + source_file: the file on the remote Node.
> + destination_file: a file or directory path on the local
> filesystem.
> """
> - Copy source_file from local filesystem to destination_file on the
> remote Node
> - associated with the remote session.
> - If source_remote is True, reverse the direction - copy
> source_file from the
> - associated Node to destination_file on local filesystem.
> +
> + @abstractmethod
> + def copy_to(
> + self,
> + source_file: str | PurePath,
> + destination_file: str | PurePath,
> + ) -> None:
> + """Copy a file from local filesystem to the remote Node.
> +
> + Copy source_file from local filesystem to destination_file
> + on the remote Node associated with this remote session.
> +
> + Args:
> + source_file: the file on the local filesystem.
> + destination_file: a file or directory path on the remote Node.
> """
> diff --git a/dts/framework/remote_session/remote/ssh_session.py
> b/dts/framework/remote_session/remote/ssh_session.py
> index 42ff9498a2..8d127f1601 100644
> --- a/dts/framework/remote_session/remote/ssh_session.py
> +++ b/dts/framework/remote_session/remote/ssh_session.py
> @@ -1,29 +1,49 @@
> # SPDX-License-Identifier: BSD-3-Clause
> -# Copyright(c) 2010-2014 Intel Corporation
> -# Copyright(c) 2022-2023 PANTHEON.tech s.r.o.
> -# Copyright(c) 2022-2023 University of New Hampshire
> +# Copyright(c) 2023 PANTHEON.tech s.r.o.
>
> -import time
> +import socket
> +import traceback
> from pathlib import PurePath
>
> -import pexpect # type: ignore
> -from pexpect import pxssh # type: ignore
> +from fabric import Connection # type: ignore[import]
> +from invoke.exceptions import ( # type: ignore[import]
> + CommandTimedOut,
> + ThreadException,
> + UnexpectedExit,
> +)
> +from paramiko.ssh_exception import ( # type: ignore[import]
> + AuthenticationException,
> + BadHostKeyException,
> + NoValidConnectionsError,
> + SSHException,
> +)
>
> from framework.config import NodeConfiguration
> from framework.exception import SSHConnectionError, SSHSessionDeadError,
> SSHTimeoutError
> from framework.logger import DTSLOG
> -from framework.utils import GREEN, RED, EnvVarsDict
>
> from .remote_session import CommandResult, RemoteSession
>
>
> class SSHSession(RemoteSession):
> - """
> - Module for creating Pexpect SSH remote sessions.
> + """A persistent SSH connection to a remote Node.
> +
> + The connection is implemented with the Fabric Python library.
> +
> + Args:
> + node_config: The configuration of the Node to connect to.
> + session_name: The name of the session.
> + logger: The logger used for logging.
> + This should be passed from the parent OSSession.
> +
> + Attributes:
> + session: The underlying Fabric SSH connection.
> +
> + Raises:
> + SSHConnectionError: The connection cannot be established.
> """
>
> - session: pxssh.pxssh
> - magic_prompt: str
> + session: Connection
>
> def __init__(
> self,
> @@ -31,218 +51,91 @@ def __init__(
> session_name: str,
> logger: DTSLOG,
> ):
> - self.magic_prompt = "MAGIC PROMPT"
> super(SSHSession, self).__init__(node_config, session_name,
> logger)
>
> def _connect(self) -> None:
> - """
> - Create connection to assigned node.
> - """
> + errors = []
> retry_attempts = 10
> login_timeout = 20 if self.port else 10
> - password_regex = (
> - r"(?i)(?:password:)|(?:passphrase for key)|(?i)(password for
> .+:)"
> - )
> - try:
> - for retry_attempt in range(retry_attempts):
> - self.session = pxssh.pxssh(encoding="utf-8")
> - try:
> - self.session.login(
> - self.ip,
> - self.username,
> - self.password,
> - original_prompt="[$#>]",
> - port=self.port,
> - login_timeout=login_timeout,
> - password_regex=password_regex,
> - )
> - break
> - except Exception as e:
> - self._logger.warning(e)
> - time.sleep(2)
> - self._logger.info(
> - f"Retrying connection: retry number
> {retry_attempt + 1}."
> - )
> - else:
> - raise Exception(f"Connection to {self.hostname} failed")
> -
> - self.send_expect("stty -echo", "#")
> - self.send_expect("stty columns 1000", "#")
> - self.send_expect("bind 'set enable-bracketed-paste off'", "#")
> - except Exception as e:
> - self._logger.error(RED(str(e)))
> - if getattr(self, "port", None):
> - suggestion = (
> - f"\nSuggestion: Check if the firewall on
> {self.hostname} is "
> - f"stopped.\n"
> + for retry_attempt in range(retry_attempts):
> + try:
> + self.session = Connection(
> + self.ip,
> + user=self.username,
> + port=self.port,
> + connect_kwargs={"password": self.password},
> + connect_timeout=login_timeout,
> )
> - self._logger.info(GREEN(suggestion))
> -
> - raise SSHConnectionError(self.hostname)
> + self.session.open()
>
> - def send_expect(
> - self, command: str, prompt: str, timeout: float = 15, verify:
> bool = False
> - ) -> str | int:
> - try:
> - ret = self.send_expect_base(command, prompt, timeout)
> - if verify:
> - ret_status = self.send_expect_base("echo $?", prompt,
> timeout)
> - try:
> - retval = int(ret_status)
> - if retval:
> - self._logger.error(f"Command: {command} failure!")
> - self._logger.error(ret)
> - return retval
> - else:
> - return ret
> - except ValueError:
> - return ret
> - else:
> - return ret
> - except Exception as e:
> - self._logger.error(
> - f"Exception happened in [{command}] and output is "
> - f"[{self._get_output()}]"
> - )
> - raise e
> -
> - def send_expect_base(self, command: str, prompt: str, timeout: float)
> -> str:
> - self._clean_session()
> - original_prompt = self.session.PROMPT
> - self.session.PROMPT = prompt
> - self._send_line(command)
> - self._prompt(command, timeout)
> -
> - before = self._get_output()
> - self.session.PROMPT = original_prompt
> - return before
> -
> - def _clean_session(self) -> None:
> - self.session.PROMPT = self.magic_prompt
> - self.get_output(timeout=0.01)
> - self.session.PROMPT = self.session.UNIQUE_PROMPT
> -
> - def _send_line(self, command: str) -> None:
> - if not self.is_alive():
> - raise SSHSessionDeadError(self.hostname)
> - if len(command) == 2 and command.startswith("^"):
> - self.session.sendcontrol(command[1])
> - else:
> - self.session.sendline(command)
> + except (ValueError, BadHostKeyException,
> AuthenticationException) as e:
> + self._logger.exception(e)
> + raise SSHConnectionError(self.hostname) from e
>
> - def _prompt(self, command: str, timeout: float) -> None:
> - if not self.session.prompt(timeout):
> - raise SSHTimeoutError(command, self._get_output()) from None
> + except (NoValidConnectionsError, socket.error, SSHException)
> as e:
> + self._logger.debug(traceback.format_exc())
> + self._logger.warning(e)
>
> - def get_output(self, timeout: float = 15) -> str:
> - """
> - Get all output before timeout
> - """
> - try:
> - self.session.prompt(timeout)
> - except Exception:
> - pass
> -
> - before = self._get_output()
> - self._flush()
> -
> - return before
> + error = repr(e)
> + if error not in errors:
> + errors.append(error)
>
> - def _get_output(self) -> str:
> - if not self.is_alive():
> - raise SSHSessionDeadError(self.hostname)
> - before = self.session.before.rsplit("\r\n", 1)[0]
> - if before == "[PEXPECT]":
> - return ""
> - return before
> + self._logger.info(
> + f"Retrying connection: retry number {retry_attempt +
> 1}."
> + )
>
> - def _flush(self) -> None:
> - """
> - Clear all session buffer
> - """
> - self.session.buffer = ""
> - self.session.before = ""
> + else:
> + break
> + else:
> + raise SSHConnectionError(self.hostname, errors)
>
> def is_alive(self) -> bool:
> - return self.session.isalive()
> + return self.session.is_connected
>
> def _send_command(
> - self, command: str, timeout: float, env: EnvVarsDict | None
> + self, command: str, timeout: float, env: dict | None
> ) -> CommandResult:
> - output = self._send_command_get_output(command, timeout, env)
> - return_code = int(self._send_command_get_output("echo $?",
> timeout, None))
> + """Send a command and return the result of the execution.
>
> - # we're capturing only stdout
> - return CommandResult(self.name, command, output, "", return_code)
> + Args:
> + command: The command to execute.
> + timeout: Wait at most this many seconds for the execution to
> complete.
> + env: Extra environment variables that will be used in command
> execution.
>
> - def _send_command_get_output(
> - self, command: str, timeout: float, env: EnvVarsDict | None
> - ) -> str:
> + Raises:
> + SSHSessionDeadError: The session died while executing the
> command.
> + SSHTimeoutError: The command execution timed out.
> + """
> try:
> - self._clean_session()
> - if env:
> - command = f"{env} {command}"
> - self._send_line(command)
> - except Exception as e:
> - raise e
> + output = self.session.run(
> + command, env=env, warn=True, hide=True, timeout=timeout
> + )
>
> - output = self.get_output(timeout=timeout)
> - self.session.PROMPT = self.session.UNIQUE_PROMPT
> - self.session.prompt(0.1)
> + except (UnexpectedExit, ThreadException) as e:
> + self._logger.exception(e)
> + raise SSHSessionDeadError(self.hostname) from e
>
> - return output
> + except CommandTimedOut as e:
> + self._logger.exception(e)
> + raise SSHTimeoutError(command, e.result.stderr) from e
>
> - def _close(self, force: bool = False) -> None:
> - if force is True:
> - self.session.close()
> - else:
> - if self.is_alive():
> - self.session.logout()
> + return CommandResult(
> + self.name, command, output.stdout, output.stderr,
> output.return_code
> + )
>
> - def copy_file(
> + def copy_from(
> self,
> source_file: str | PurePath,
> destination_file: str | PurePath,
> - source_remote: bool = False,
> ) -> None:
> - """
> - Send a local file to a remote host.
> - """
> - if source_remote:
> - source_file = f"{self.username}@{self.ip}:{source_file}"
> - else:
> - destination_file = f"{self.username}@
> {self.ip}:{destination_file}"
> + self.session.get(str(destination_file), str(source_file))
>
> - port = ""
> - if self.port:
> - port = f" -P {self.port}"
> -
> - command = (
> - f"scp -v{port} -o NoHostAuthenticationForLocalhost=yes"
> - f" {source_file} {destination_file}"
> - )
> -
> - self._spawn_scp(command)
> + def copy_to(
> + self,
> + source_file: str | PurePath,
> + destination_file: str | PurePath,
> + ) -> None:
> + self.session.put(str(source_file), str(destination_file))
>
> - def _spawn_scp(self, scp_cmd: str) -> None:
> - """
> - Transfer a file with SCP
> - """
> - self._logger.info(scp_cmd)
> - p: pexpect.spawn = pexpect.spawn(scp_cmd)
> - time.sleep(0.5)
> - ssh_newkey: str = "Are you sure you want to continue connecting"
> - i: int = p.expect(
> - [ssh_newkey, "[pP]assword", "# ", pexpect.EOF,
> pexpect.TIMEOUT], 120
> - )
> - if i == 0: # add once in trust list
> - p.sendline("yes")
> - i = p.expect([ssh_newkey, "[pP]assword", pexpect.EOF], 2)
> -
> - if i == 1:
> - time.sleep(0.5)
> - p.sendline(self.password)
> - p.expect("Exit status 0", 60)
> - if i == 4:
> - self._logger.error("SCP TIMEOUT error %d" % i)
> - p.close()
> + def _close(self, force: bool = False) -> None:
> + self.session.close()
> diff --git a/dts/framework/testbed_model/sut_node.py
> b/dts/framework/testbed_model/sut_node.py
> index 2b2b50d982..9dbc390848 100644
> --- a/dts/framework/testbed_model/sut_node.py
> +++ b/dts/framework/testbed_model/sut_node.py
> @@ -10,7 +10,7 @@
> from framework.config import BuildTargetConfiguration, NodeConfiguration
> from framework.remote_session import CommandResult, OSSession
> from framework.settings import SETTINGS
> -from framework.utils import EnvVarsDict, MesonArgs
> +from framework.utils import MesonArgs
>
> from .hw import LogicalCoreCount, LogicalCoreList, VirtualDevice
> from .node import Node
> @@ -27,7 +27,7 @@ class SutNode(Node):
> _dpdk_prefix_list: list[str]
> _dpdk_timestamp: str
> _build_target_config: BuildTargetConfiguration | None
> - _env_vars: EnvVarsDict
> + _env_vars: dict
> _remote_tmp_dir: PurePath
> __remote_dpdk_dir: PurePath | None
> _dpdk_version: str | None
> @@ -38,7 +38,7 @@ def __init__(self, node_config: NodeConfiguration):
> super(SutNode, self).__init__(node_config)
> self._dpdk_prefix_list = []
> self._build_target_config = None
> - self._env_vars = EnvVarsDict()
> + self._env_vars = {}
> self._remote_tmp_dir = self.main_session.get_remote_tmp_dir()
> self.__remote_dpdk_dir = None
> self._dpdk_version = None
> @@ -94,7 +94,7 @@ def _configure_build_target(
> """
> Populate common environment variables and set build target config.
> """
> - self._env_vars = EnvVarsDict()
> + self._env_vars = {}
> self._build_target_config = build_target_config
> self._env_vars.update(
>
> self.main_session.get_dpdk_build_env_vars(build_target_config.arch)
> @@ -112,7 +112,7 @@ def _copy_dpdk_tarball(self) -> None:
> Copy to and extract DPDK tarball on the SUT node.
> """
> self._logger.info("Copying DPDK tarball to SUT.")
> - self.main_session.copy_file(SETTINGS.dpdk_tarball_path,
> self._remote_tmp_dir)
> + self.main_session.copy_to(SETTINGS.dpdk_tarball_path,
> self._remote_tmp_dir)
>
> # construct remote tarball path
> # the basename is the same on local host and on remote Node
> @@ -259,7 +259,7 @@ def run_dpdk_app(
> Run DPDK application on the remote node.
> """
> return self.main_session.send_command(
> - f"{app_path} {eal_args}", timeout, verify=True
> + f"{app_path} {eal_args}", timeout, privileged=True,
> verify=True
> )
>
>
> diff --git a/dts/framework/utils.py b/dts/framework/utils.py
> index 55e0b0ef0e..8cfbc6a29d 100644
> --- a/dts/framework/utils.py
> +++ b/dts/framework/utils.py
> @@ -42,19 +42,10 @@ def expand_range(range_str: str) -> list[int]:
> return expanded_range
>
>
> -def GREEN(text: str) -> str:
> - return f"\u001B[32;1m{str(text)}\u001B[0m"
> -
> -
> def RED(text: str) -> str:
> return f"\u001B[31;1m{str(text)}\u001B[0m"
>
>
> -class EnvVarsDict(dict):
> - def __str__(self) -> str:
> - return " ".join(["=".join(item) for item in self.items()])
> -
> -
> class MesonArgs(object):
> """
> Aggregate the arguments needed to build DPDK:
> diff --git a/dts/poetry.lock b/dts/poetry.lock
> index 0b2a007d4d..2438f337cd 100644
> --- a/dts/poetry.lock
> +++ b/dts/poetry.lock
> @@ -12,6 +12,18 @@ docs = ["furo", "sphinx", "zope.interface",
> "sphinx-notfound-page"]
> tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest
> (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins",
> "zope.interface", "cloudpickle"]
> tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler",
> "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins",
> "cloudpickle"]
>
> +[[package]]
> +name = "bcrypt"
> +version = "4.0.1"
> +description = "Modern password hashing for your software and your servers"
> +category = "main"
> +optional = false
> +python-versions = ">=3.6"
> +
> +[package.extras]
> +tests = ["pytest (>=3.2.1,!=3.3.0)"]
> +typecheck = ["mypy"]
> +
> [[package]]
> name = "black"
> version = "22.10.0"
> @@ -33,6 +45,17 @@ d = ["aiohttp (>=3.7.4)"]
> jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
> uvloop = ["uvloop (>=0.15.2)"]
>
> +[[package]]
> +name = "cffi"
> +version = "1.15.1"
> +description = "Foreign Function Interface for Python calling C code."
> +category = "main"
> +optional = false
> +python-versions = "*"
> +
> +[package.dependencies]
> +pycparser = "*"
> +
> [[package]]
> name = "click"
> version = "8.1.3"
> @@ -52,6 +75,52 @@ category = "dev"
> optional = false
> python-versions =
> "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
>
> +[[package]]
> +name = "cryptography"
> +version = "40.0.2"
> +description = "cryptography is a package which provides cryptographic
> recipes and primitives to Python developers."
> +category = "main"
> +optional = false
> +python-versions = ">=3.6"
> +
> +[package.dependencies]
> +cffi = ">=1.12"
> +
> +[package.extras]
> +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
> +docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)",
> "sphinxcontrib-spelling (>=4.0.1)"]
> +pep8test = ["black", "ruff", "mypy", "check-manifest"]
> +sdist = ["setuptools-rust (>=0.11.4)"]
> +ssh = ["bcrypt (>=3.1.5)"]
> +test = ["pytest (>=6.2.0)", "pytest-shard (>=0.1.2)", "pytest-benchmark",
> "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601"]
> +test-randomorder = ["pytest-randomly"]
> +tox = ["tox"]
> +
> +[[package]]
> +name = "fabric"
> +version = "2.7.1"
> +description = "High level SSH command execution"
> +category = "main"
> +optional = false
> +python-versions = "*"
> +
> +[package.dependencies]
> +invoke = ">=1.3,<2.0"
> +paramiko = ">=2.4"
> +pathlib2 = "*"
> +
> +[package.extras]
> +pytest = ["mock (>=2.0.0,<3.0)", "pytest (>=3.2.5,<4.0)"]
> +testing = ["mock (>=2.0.0,<3.0)"]
> +
> +[[package]]
> +name = "invoke"
> +version = "1.7.3"
> +description = "Pythonic task execution"
> +category = "main"
> +optional = false
> +python-versions = "*"
> +
> [[package]]
> name = "isort"
> version = "5.10.1"
> @@ -136,23 +205,41 @@ optional = false
> python-versions = "*"
>
> [[package]]
> -name = "pathspec"
> -version = "0.10.1"
> -description = "Utility library for gitignore style pattern matching of
> file paths."
> -category = "dev"
> +name = "paramiko"
> +version = "3.1.0"
> +description = "SSH2 protocol library"
> +category = "main"
> optional = false
> -python-versions = ">=3.7"
> +python-versions = ">=3.6"
> +
> +[package.dependencies]
> +bcrypt = ">=3.2"
> +cryptography = ">=3.3"
> +pynacl = ">=1.5"
> +
> +[package.extras]
> +all = ["pyasn1 (>=0.1.7)", "invoke (>=2.0)", "gssapi (>=1.4.1)", "pywin32
> (>=2.1.8)"]
> +gssapi = ["pyasn1 (>=0.1.7)", "gssapi (>=1.4.1)", "pywin32 (>=2.1.8)"]
> +invoke = ["invoke (>=2.0)"]
>
> [[package]]
> -name = "pexpect"
> -version = "4.8.0"
> -description = "Pexpect allows easy control of interactive console
> applications."
> +name = "pathlib2"
> +version = "2.3.7.post1"
> +description = "Object-oriented filesystem paths"
> category = "main"
> optional = false
> python-versions = "*"
>
> [package.dependencies]
> -ptyprocess = ">=0.5"
> +six = "*"
> +
> +[[package]]
> +name = "pathspec"
> +version = "0.10.1"
> +description = "Utility library for gitignore style pattern matching of
> file paths."
> +category = "dev"
> +optional = false
> +python-versions = ">=3.7"
>
> [[package]]
> name = "platformdirs"
> @@ -166,14 +253,6 @@ python-versions = ">=3.7"
> docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)",
> "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"]
> test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)",
> "pytest (>=6)"]
>
> -[[package]]
> -name = "ptyprocess"
> -version = "0.7.0"
> -description = "Run a subprocess in a pseudo terminal"
> -category = "main"
> -optional = false
> -python-versions = "*"
> -
> [[package]]
> name = "pycodestyle"
> version = "2.9.1"
> @@ -182,6 +261,14 @@ category = "dev"
> optional = false
> python-versions = ">=3.6"
>
> +[[package]]
> +name = "pycparser"
> +version = "2.21"
> +description = "C parser in Python"
> +category = "main"
> +optional = false
> +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
> +
> [[package]]
> name = "pydocstyle"
> version = "6.1.1"
> @@ -228,6 +315,21 @@ tests = ["pytest (>=7.1.2)", "pytest-mypy",
> "eradicate (>=2.0.0)", "radon (>=5.1
> toml = ["toml (>=0.10.2)"]
> vulture = ["vulture"]
>
> +[[package]]
> +name = "pynacl"
> +version = "1.5.0"
> +description = "Python binding to the Networking and Cryptography (NaCl)
> library"
> +category = "main"
> +optional = false
> +python-versions = ">=3.6"
> +
> +[package.dependencies]
> +cffi = ">=1.4.1"
> +
> +[package.extras]
> +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"]
> +tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"]
> +
> [[package]]
> name = "pyrsistent"
> version = "0.19.1"
> @@ -244,6 +346,14 @@ category = "main"
> optional = false
> python-versions = ">=3.6"
>
> +[[package]]
> +name = "six"
> +version = "1.16.0"
> +description = "Python 2 and 3 compatibility utilities"
> +category = "main"
> +optional = false
> +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
> +
> [[package]]
> name = "snowballstemmer"
> version = "2.2.0"
> @@ -299,13 +409,18 @@ jsonschema = ">=4,<5"
> [metadata]
> lock-version = "1.1"
> python-versions = "^3.10"
> -content-hash =
> "a0f040b07fc6ce4deb0be078b9a88c2a465cb6bccb9e260a67e92c2403e2319f"
> +content-hash =
> "719c43bcaa5d181921debda884f8f714063df0b2336d61e9f64ecab034e8b139"
>
> [metadata.files]
> attrs = []
> +bcrypt = []
> black = []
> +cffi = []
> click = []
> colorama = []
> +cryptography = []
> +fabric = []
> +invoke = []
> isort = []
> jsonpatch = []
> jsonpointer = []
> @@ -313,22 +428,22 @@ jsonschema = []
> mccabe = []
> mypy = []
> mypy-extensions = []
> +paramiko = []
> +pathlib2 = []
> pathspec = []
> -pexpect = [
> - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash =
> "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"},
> - {file = "pexpect-4.8.0.tar.gz", hash =
> "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"},
> -]
> platformdirs = [
> {file = "platformdirs-2.5.2-py3-none-any.whl", hash =
> "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"},
> {file = "platformdirs-2.5.2.tar.gz", hash =
> "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"},
> ]
> -ptyprocess = []
> pycodestyle = []
> +pycparser = []
> pydocstyle = []
> pyflakes = []
> pylama = []
> +pynacl = []
> pyrsistent = []
> pyyaml = []
> +six = []
> snowballstemmer = []
> toml = []
> tomli = []
> diff --git a/dts/pyproject.toml b/dts/pyproject.toml
> index a136c91e5e..50bcdb327a 100644
> --- a/dts/pyproject.toml
> +++ b/dts/pyproject.toml
> @@ -9,10 +9,10 @@ authors = ["Owen Hilyard <ohilyard@iol.unh.edu>", "
> dts@dpdk.org"]
>
> [tool.poetry.dependencies]
> python = "^3.10"
> -pexpect = "^4.8.0"
> warlock = "^2.0.1"
> PyYAML = "^6.0"
> types-PyYAML = "^6.0.8"
> +fabric = "^2.7.1"
>
> [tool.poetry.dev-dependencies]
> mypy = "^0.961"
> --
> 2.34.1
>
>
--
Patrick Robb
Technical Service Manager
UNH InterOperability Laboratory
21 Madbury Rd, Suite 100, Durham, NH 03824
www.iol.unh.edu
[-- Attachment #2: Type: text/html, Size: 62046 bytes --]
^ permalink raw reply [flat|nested] 21+ messages in thread
* Re: [PATCH v3] dts: replace pexpect with fabric
2023-06-21 18:33 ` Jeremy Spewock
2023-07-05 19:59 ` Jeremy Spewock
@ 2023-07-12 16:34 ` Thomas Monjalon
1 sibling, 0 replies; 21+ messages in thread
From: Thomas Monjalon @ 2023-07-12 16:34 UTC (permalink / raw)
To: Juraj Linkeš
Cc: dev, Honnappa.Nagarahalli, lijuan.tu, wathsala.vithanage, probb,
dev, Jeremy Spewock
21/06/2023 20:33, Jeremy Spewock:
> Acked-by: Jeremy Spewock <jspewock@iol.unh.edu>
>
> On Fri, Jun 9, 2023 at 5:46 AM Juraj Linkeš <juraj.linkes@pantheon.tech>
> wrote:
>
> > Pexpect is not a dedicated SSH connection library while Fabric is. With
> > Fabric, all SSH-related logic is provided and we can just focus on
> > what's DTS specific.
> >
> > Signed-off-by: Juraj Linkeš <juraj.linkes@pantheon.tech>
Applied, thanks.
^ permalink raw reply [flat|nested] 21+ messages in thread
end of thread, other threads:[~2023-07-12 16:34 UTC | newest]
Thread overview: 21+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2023-04-03 11:46 [PATCH v1 1/2] dts: fabric requirements Juraj Linkeš
2023-04-03 11:46 ` [PATCH v1 2/2] dts: replace pexpect with fabric Juraj Linkeš
2023-04-24 13:35 ` [PATCH v2] " Juraj Linkeš
2023-04-28 19:03 ` Jeremy Spewock
2023-05-02 13:00 ` Juraj Linkeš
2023-05-03 17:54 ` Jeremy Spewock
2023-06-09 9:46 ` [PATCH v3] " Juraj Linkeš
2023-06-21 18:33 ` Jeremy Spewock
2023-07-05 19:59 ` Jeremy Spewock
2023-07-12 16:34 ` Thomas Monjalon
2023-07-09 1:45 ` Patrick Robb
2023-04-03 12:33 ` [PATCH v1 1/2] dts: fabric requirements Thomas Monjalon
2023-04-03 14:56 ` Juraj Linkeš
2023-04-03 15:17 ` Thomas Monjalon
2023-04-04 11:51 ` Juraj Linkeš
2023-04-11 14:48 ` Thomas Monjalon
2023-04-12 13:42 ` Juraj Linkeš
2023-04-12 15:24 ` Thomas Monjalon
2023-04-12 15:38 ` Honnappa Nagarahalli
2023-04-13 6:50 ` Juraj Linkeš
2023-04-13 7:49 ` Juraj Linkeš
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for NNTP newsgroup(s).