mirror of
https://github.com/python-kasa/python-kasa.git
synced 2024-12-22 19:23:34 +00:00
Add perftest to devtools (#236)
* Add perftest to devtools * Add example output from the perf script * Rename to avoid pytest collection * Fix git mv failing to remove the original file..
This commit is contained in:
parent
85a618f7c6
commit
d75e1adaba
61
devtools/README.md
Normal file
61
devtools/README.md
Normal file
@ -0,0 +1,61 @@
|
||||
# Tools for developers
|
||||
|
||||
This directory contains some simple scripts that can be useful for developers.
|
||||
|
||||
## dump_devinfo
|
||||
* Queries the device and returns a fixture that can be added to the test suite
|
||||
|
||||
```shell
|
||||
Usage: dump_devinfo.py [OPTIONS] HOST
|
||||
|
||||
Generate devinfo file for given device.
|
||||
|
||||
Options:
|
||||
-d, --debug
|
||||
--help Show this message and exit.
|
||||
```
|
||||
|
||||
## parse_pcap
|
||||
|
||||
* Requires dpkt (pip install dpkt)
|
||||
* Reads a pcap file and prints out the device communications
|
||||
|
||||
```shell
|
||||
Usage: parse_pcap.py [OPTIONS] FILE
|
||||
|
||||
Parse pcap file and pretty print the communications and some statistics.
|
||||
|
||||
Options:
|
||||
--help Show this message and exit.
|
||||
```
|
||||
|
||||
## perftest
|
||||
|
||||
* Runs several rounds of update cycles for the given list of addresses, and prints out statistics about the performance
|
||||
|
||||
```shell
|
||||
Usage: perf_test.py [OPTIONS] [ADDRS]...
|
||||
|
||||
Options:
|
||||
--rounds INTEGER
|
||||
--help Show this message and exit.
|
||||
```
|
||||
|
||||
```shell
|
||||
$ python perf_test.py 192.168.xx.x 192.168.xx.y 192.168.xx.z 192.168.xx.f
|
||||
Running 5 rounds on ('192.168.xx.x', '192.168.xx.y', '192.168.xx.z', '192.168.xx.f')
|
||||
=== Testing using gather on all devices ===
|
||||
took
|
||||
count mean std min 25% 50% 75% max
|
||||
type
|
||||
concurrently 5.0 0.097161 0.045544 0.05260 0.055332 0.088811 0.143082 0.145981
|
||||
sequential 5.0 0.150506 0.005798 0.14162 0.149065 0.150499 0.155579 0.155768
|
||||
=== Testing per-device performance ===
|
||||
took
|
||||
count mean std min 25% 50% 75% max
|
||||
id
|
||||
<id>-HS110(EU) 5.0 0.044917 0.014984 0.035836 0.037728 0.037950 0.041610 0.071458
|
||||
<id>-KL130(EU) 5.0 0.067626 0.032027 0.046451 0.046797 0.048406 0.076136 0.120342
|
||||
<id>-HS110(EU) 5.0 0.055700 0.016174 0.042086 0.045578 0.048905 0.059869 0.082064
|
||||
<id>-KP303(UK) 5.0 0.010298 0.003765 0.007773 0.007968 0.008546 0.010439 0.016763
|
||||
```
|
93
devtools/perftest.py
Normal file
93
devtools/perftest.py
Normal file
@ -0,0 +1,93 @@
|
||||
"""Script for testing update performance on devices."""
|
||||
import asyncio
|
||||
import time
|
||||
|
||||
import asyncclick as click
|
||||
import pandas as pd
|
||||
|
||||
from kasa import Discover
|
||||
|
||||
|
||||
async def _update(dev, lock=None):
|
||||
if lock is not None:
|
||||
await lock.acquire()
|
||||
await asyncio.sleep(2)
|
||||
try:
|
||||
start_time = time.time()
|
||||
# print("%s >> Updating" % id(dev))
|
||||
await dev.update()
|
||||
# print("%s >> done in %s" % (id(dev), time.time() - start_time))
|
||||
return {"id": f"{id(dev)}-{dev.model}", "took": (time.time() - start_time)}
|
||||
finally:
|
||||
if lock is not None:
|
||||
lock.release()
|
||||
|
||||
|
||||
async def _update_concurrently(devs):
|
||||
start_time = time.time()
|
||||
update_futures = [asyncio.ensure_future(_update(dev)) for dev in devs]
|
||||
await asyncio.gather(*update_futures)
|
||||
return {"type": "concurrently", "took": (time.time() - start_time)}
|
||||
|
||||
|
||||
async def _update_sequentially(devs):
|
||||
start_time = time.time()
|
||||
|
||||
for dev in devs:
|
||||
await _update(dev)
|
||||
|
||||
return {"type": "sequential", "took": (time.time() - start_time)}
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.argument("addrs", nargs=-1)
|
||||
@click.option("--rounds", default=5)
|
||||
async def main(addrs, rounds):
|
||||
"""Test update performance on given devices."""
|
||||
print(f"Running {rounds} rounds on {addrs}")
|
||||
devs = []
|
||||
|
||||
for addr in addrs:
|
||||
try:
|
||||
dev = await Discover.discover_single(addr)
|
||||
devs.append(dev)
|
||||
except Exception as ex:
|
||||
print(f"unable to add {addr}: {ex}")
|
||||
|
||||
data = []
|
||||
test_gathered = True
|
||||
|
||||
if test_gathered:
|
||||
print("=== Testing using gather on all devices ===")
|
||||
for i in range(rounds):
|
||||
data.append(await _update_concurrently(devs))
|
||||
await asyncio.sleep(2)
|
||||
|
||||
await asyncio.sleep(5)
|
||||
|
||||
for i in range(rounds):
|
||||
data.append(await _update_sequentially(devs))
|
||||
await asyncio.sleep(2)
|
||||
|
||||
df = pd.DataFrame(data)
|
||||
print(df.groupby("type").describe())
|
||||
|
||||
print("=== Testing per-device performance ===")
|
||||
|
||||
futs = []
|
||||
data = []
|
||||
locks = {dev: asyncio.Lock() for dev in devs}
|
||||
for i in range(rounds):
|
||||
for dev in devs:
|
||||
futs.append(asyncio.ensure_future(_update(dev, locks[dev])))
|
||||
|
||||
for fut in asyncio.as_completed(futs):
|
||||
res = await fut
|
||||
data.append(res)
|
||||
|
||||
df = pd.DataFrame(data)
|
||||
print(df.groupby("id").describe())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(_anyio_backend="asyncio")
|
Loading…
Reference in New Issue
Block a user