Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[PH] Replace usage of os.path with Path and PurePath #571

29 changes: 16 additions & 13 deletions tests/performance_tests/log_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,10 @@
import re
import numpy as np
import json
import glob
import gzip
import math

harnessPath = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(harnessPath)
from pathlib import Path, PurePath
sys.path.append(str(PurePath(PurePath(Path(__file__).absolute()).parent).parent))

from TestHarness import Utils
from dataclasses import dataclass, asdict, field
Expand All @@ -24,10 +22,10 @@

@dataclass
class ArtifactPaths:
nodeosLogPath: str = ""
trxGenLogDirPath: str = ""
blockTrxDataPath: str = ""
blockDataPath: str = ""
nodeosLogPath: Path = Path("")
trxGenLogDirPath: Path = Path("")
blockTrxDataPath: Path = Path("")
blockDataPath: Path = Path("")

@dataclass
class TpsTestConfig:
Expand Down Expand Up @@ -178,8 +176,11 @@ def printBlockData(self):
def assertEquality(self, other):
assert self == other, f"Error: Actual log:\n{self}\ndid not match expected log:\n{other}"

def selectedOpen(path):
return gzip.open if path.suffix == '.gz' else open

def scrapeLog(data, path):
selectedopen = gzip.open if path.endswith('.gz') else open
selectedopen = selectedOpen(path)
with selectedopen(path, 'rt') as f:
blockResult = re.findall(r'Received block ([0-9a-fA-F]*).* #(\d+) .*trxs: (\d+)(.*)', f.read())
if data.startBlock is None:
Expand All @@ -202,23 +203,23 @@ def scrapeLog(data, path):
print("Error: Unknown log format")

def scrapeTrxGenLog(trxSent, path):
selectedopen = gzip.open if path.endswith('.gz') else open
selectedopen = selectedOpen(path)
with selectedopen(path, 'rt') as f:
trxSent.update(dict([(x[0], x[1]) for x in (line.rstrip('\n').split(',') for line in f)]))

def scrapeBlockTrxDataLog(trxDict, path):
selectedopen = gzip.open if path.endswith('.gz') else open
selectedopen = selectedOpen(path)
with selectedopen(path, 'rt') as f:
trxDict.update(dict([(x[0], trxData(x[1], x[2], x[3])) for x in (line.rstrip('\n').split(',') for line in f)]))

def scrapeBlockDataLog(blockDict, path):
selectedopen = gzip.open if path.endswith('.gz') else open
selectedopen = selectedOpen(path)
with selectedopen(path, 'rt') as f:
blockDict.update(dict([(x[0], blkData(x[1], x[2], x[3], x[4])) for x in (line.rstrip('\n').split(',') for line in f)]))

def scrapeTrxGenTrxSentDataLogs(trxSent, trxGenLogDirPath, quiet):
filesScraped = []
for fileName in glob.glob(f"{trxGenLogDirPath}/trx_data_output_*.txt"):
for fileName in trxGenLogDirPath.glob("trx_data_output_*.txt"):
filesScraped.append(fileName)
scrapeTrxGenLog(trxSent, fileName)

Expand Down Expand Up @@ -380,6 +381,8 @@ class LogReaderEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, datetime):
return obj.isoformat()
if isinstance(obj, PurePath):
return str(obj)
if obj is None:
return "Unknown"
return json.JSONEncoder.default(self, obj)
Expand Down
6 changes: 4 additions & 2 deletions tests/performance_tests/log_reader_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,15 @@
# Also ensures that all versions of nodeos logs can be handled
import log_reader

from pathlib import Path

testSuccessful = False

# Test log scraping for 3.2 log format
dataCurrent = log_reader.chainData()
dataCurrent.startBlock = None
dataCurrent.ceaseBlock = None
log_reader.scrapeLog(dataCurrent, "tests/performance_tests/nodeos_log_3_2.txt.gz")
log_reader.scrapeLog(dataCurrent, Path("tests")/"performance_tests"/"nodeos_log_3_2.txt.gz")

expectedCurrent = log_reader.chainData()
expectedCurrent.startBlock = 2
Expand Down Expand Up @@ -101,7 +103,7 @@
dataOld = log_reader.chainData()
dataOld.startBlock = None
dataOld.ceaseBlock = None
log_reader.scrapeLog(dataOld, "tests/performance_tests/nodeos_log_2_0_14.txt.gz")
log_reader.scrapeLog(dataOld, Path("tests")/"performance_tests"/"nodeos_log_2_0_14.txt.gz")
expectedOld = log_reader.chainData()
expectedOld.startBlock = 2
expectedOld.ceaseBlock = 93
Expand Down
36 changes: 18 additions & 18 deletions tests/performance_tests/performance_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@
import json
import shutil

harnessPath = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(harnessPath)
from pathlib import Path, PurePath
sys.path.append(str(PurePath(PurePath(Path(__file__).absolute()).parent).parent))

from NodeosPluginArgs import ChainPluginArgs, HttpPluginArgs, NetPluginArgs, ProducerPluginArgs
from TestHarness import TestHelper, Utils
Expand All @@ -34,7 +34,7 @@ class PerfTestBasicResult:
trxExpectMet: bool = False
basicTestSuccess: bool = False
testAnalysisBlockCnt: int = 0
logsDir: str = ""
logsDir: Path = Path("")
testStart: datetime = None
testEnd: datetime = None

Expand All @@ -56,7 +56,7 @@ class PtConfig:
delTestReport: bool=False
numAddlBlocksToPrune: int=2
quiet: bool=False
logDirRoot: str="."
logDirRoot: Path=Path(".")
skipTpsTests: bool=False
calcProducerThreads: str="none"
calcChainThreads: str="none"
Expand All @@ -79,16 +79,16 @@ class PerfTestSearchResults:

@dataclass
class LoggingConfig:
logDirBase: str = f"./{os.path.splitext(os.path.basename(__file__))[0]}"
logDirBase: Path = Path(".")/PurePath(PurePath(__file__).name).stem[0]
logDirTimestamp: str = f"{datetime.utcnow().strftime('%Y-%m-%d_%H-%M-%S')}"
logDirPath: str = field(default_factory=str, init=False)
ptbLogsDirPath: str = field(default_factory=str, init=False)
pluginThreadOptLogsDirPath: str = field(default_factory=str, init=False)
logDirPath: Path = field(default_factory=Path, init=False)
ptbLogsDirPath: Path = field(default_factory=Path, init=False)
pluginThreadOptLogsDirPath: Path = field(default_factory=Path, init=False)

def __post_init__(self):
self.logDirPath = f"{self.logDirBase}/{self.logDirTimestamp}"
self.ptbLogsDirPath = f"{self.logDirPath}/testRunLogs"
self.pluginThreadOptLogsDirPath = f"{self.logDirPath}/pluginThreadOptRunLogs"
self.logDirPath = self.logDirBase/Path(self.logDirTimestamp)
self.ptbLogsDirPath = self.logDirPath/Path("testRunLogs")
self.pluginThreadOptLogsDirPath = self.logDirPath/Path("pluginThreadOptRunLogs")

def __init__(self, testHelperConfig: PerformanceTestBasic.TestHelperConfig=PerformanceTestBasic.TestHelperConfig(),
clusterConfig: PerformanceTestBasic.ClusterConfig=PerformanceTestBasic.ClusterConfig(), ptConfig=PtConfig()):
Expand All @@ -98,10 +98,10 @@ def __init__(self, testHelperConfig: PerformanceTestBasic.TestHelperConfig=Perfo

self.testsStart = datetime.utcnow()

self.loggingConfig = PerformanceTest.LoggingConfig(logDirBase=f"{self.ptConfig.logDirRoot}/{os.path.splitext(os.path.basename(__file__))[0]}",
self.loggingConfig = PerformanceTest.LoggingConfig(logDirBase=Path(self.ptConfig.logDirRoot)/PurePath(PurePath(__file__).name).stem[0],
logDirTimestamp=f"{self.testsStart.strftime('%Y-%m-%d_%H-%M-%S')}")

def performPtbBinarySearch(self, clusterConfig: PerformanceTestBasic.ClusterConfig, logDirRoot: str, delReport: bool, quiet: bool, delPerfLogs: bool) -> TpsTestResult.PerfTestSearchResults:
def performPtbBinarySearch(self, clusterConfig: PerformanceTestBasic.ClusterConfig, logDirRoot: Path, delReport: bool, quiet: bool, delPerfLogs: bool) -> TpsTestResult.PerfTestSearchResults:
floor = 0
ceiling = self.ptConfig.maxTpsToTest
binSearchTarget = self.ptConfig.maxTpsToTest
Expand Down Expand Up @@ -216,7 +216,7 @@ class PluginThreadOptResult:
def optimizePluginThreadCount(self, optPlugin: PluginThreadOpt, optType: PluginThreadOptRunType=PluginThreadOptRunType.LOCAL_MAX,
minThreadCount: int=2, maxThreadCount: int=os.cpu_count()) -> PluginThreadOptResult:

resultsFile = f"{self.loggingConfig.pluginThreadOptLogsDirPath}/{optPlugin.value}ThreadResults.txt"
resultsFile = self.loggingConfig.pluginThreadOptLogsDirPath/Path(f"{optPlugin.value}ThreadResults.txt")

threadToMaxTpsDict: dict = {}

Expand Down Expand Up @@ -311,7 +311,7 @@ def testDirsCleanup(self):
try:
def removeArtifacts(path):
print(f"Checking if test artifacts dir exists: {path}")
if os.path.isdir(f"{path}"):
if Path(path).is_dir():
print(f"Cleaning up test artifacts dir and all contents of: {path}")
shutil.rmtree(f"{path}")

Expand All @@ -327,7 +327,7 @@ def testDirsSetup(self):
try:
def createArtifactsDir(path):
print(f"Checking if test artifacts dir exists: {path}")
if not os.path.isdir(f"{path}"):
if not Path(path).is_dir():
print(f"Creating test artifacts dir: {path}")
os.mkdir(f"{path}")

Expand Down Expand Up @@ -432,7 +432,7 @@ def runTest(self):
print(f"Full Performance Test Report: {jsonReport}")

if not self.ptConfig.delReport:
self.exportReportAsJSON(jsonReport, f"{self.loggingConfig.logDirPath}/report.json")
self.exportReportAsJSON(jsonReport, self.loggingConfig.logDirPath/Path("report.json"))

if self.ptConfig.delPerfLogs:
print(f"Cleaning up logs directory: {self.loggingConfig.logDirPath}")
Expand Down Expand Up @@ -519,7 +519,7 @@ def main():
delTestReport=args.del_test_report,
numAddlBlocksToPrune=args.num_blocks_to_prune,
quiet=args.quiet,
logDirRoot=".",
logDirRoot=Path("."),
skipTpsTests=args.skip_tps_test,
calcProducerThreads=args.calc_producer_threads,
calcChainThreads=args.calc_chain_threads,
Expand Down
47 changes: 23 additions & 24 deletions tests/performance_tests/performance_test_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@
import log_reader
import launch_transaction_generators as ltg

harnessPath = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(harnessPath)
from pathlib import Path, PurePath
sys.path.append(str(PurePath(PurePath(Path(__file__).absolute()).parent).parent))

from NodeosPluginArgs import ChainPluginArgs, HttpClientPluginArgs, HttpPluginArgs, NetPluginArgs, ProducerPluginArgs, ResourceMonitorPluginArgs, SignatureProviderPluginArgs, StateHistoryPluginArgs, TraceApiPluginArgs
from TestHarness import Cluster, TestHelper, Utils, WalletMgr
Expand Down Expand Up @@ -70,7 +70,7 @@ def __str__(self) -> str:
topo: str = "mesh"
extraNodeosArgs: ExtraNodeosArgs = ExtraNodeosArgs()
useBiosBootFile: bool = False
genesisPath: str = "tests/performance_tests/genesis.json"
genesisPath: Path = Path("tests")/"performance_tests"/"genesis.json"
maximumP2pPerHost: int = 5000
maximumClients: int = 0
loggingDict: dict = field(default_factory=lambda: { "bios": "off" })
Expand All @@ -89,7 +89,7 @@ class PtbConfig:
testTrxGenDurationSec: int=30
tpsLimitPerGenerator: int=4000
numAddlBlocksToPrune: int=2
logDirRoot: str="."
logDirRoot: Path=Path(".")
delReport: bool=False
quiet: bool=False
delPerfLogs: bool=False
Expand All @@ -100,13 +100,13 @@ def __post_init__(self):

@dataclass
class LoggingConfig:
logDirBase: str = f"./{os.path.splitext(os.path.basename(__file__))[0]}"
logDirBase: Path = Path(".")/PurePath(PurePath(__file__).name).stem[0]
logDirTimestamp: str = f"{datetime.utcnow().strftime('%Y-%m-%d_%H-%M-%S')}"
logDirTimestampedOptSuffix: str = ""
logDirPath: str = field(default_factory=str, init=False)
logDirPath: Path = field(default_factory=Path, init=False)

def __post_init__(self):
self.logDirPath = f"{self.logDirBase}/{self.logDirTimestamp}{self.logDirTimestampedOptSuffix}"
self.logDirPath = self.logDirBase/Path(f"{self.logDirTimestamp}{self.logDirTimestampedOptSuffix}")

def __init__(self, testHelperConfig: TestHelperConfig=TestHelperConfig(), clusterConfig: ClusterConfig=ClusterConfig(), ptbConfig=PtbConfig()):
self.testHelperConfig = testHelperConfig
Expand All @@ -121,26 +121,25 @@ def __init__(self, testHelperConfig: TestHelperConfig=TestHelperConfig(), cluste

self.testStart = datetime.utcnow()

self.loggingConfig = PerformanceTestBasic.LoggingConfig(logDirBase=f"{self.ptbConfig.logDirRoot}/{os.path.splitext(os.path.basename(__file__))[0]}",
self.loggingConfig = PerformanceTestBasic.LoggingConfig(logDirBase=Path(self.ptbConfig.logDirRoot)/PurePath(PurePath(__file__).name).stem[0],
logDirTimestamp=f"{self.testStart.strftime('%Y-%m-%d_%H-%M-%S')}",
logDirTimestampedOptSuffix = f"-{self.ptbConfig.targetTps}")

self.trxGenLogDirPath = f"{self.loggingConfig.logDirPath}/trxGenLogs"
self.varLogsDirPath = f"{self.loggingConfig.logDirPath}/var"
self.etcLogsDirPath = f"{self.loggingConfig.logDirPath}/etc"
self.etcEosioLogsDirPath = f"{self.etcLogsDirPath}/eosio"
self.blockDataLogDirPath = f"{self.loggingConfig.logDirPath}/blockDataLogs"
self.blockDataPath = f"{self.blockDataLogDirPath}/blockData.txt"
self.blockTrxDataPath = f"{self.blockDataLogDirPath}/blockTrxData.txt"
self.reportPath = f"{self.loggingConfig.logDirPath}/data.json"
self.trxGenLogDirPath = self.loggingConfig.logDirPath/Path("trxGenLogs")
self.varLogsDirPath = self.loggingConfig.logDirPath/Path("var")
self.etcLogsDirPath = self.loggingConfig.logDirPath/Path("etc")
self.etcEosioLogsDirPath = self.etcLogsDirPath/Path("eosio")
self.blockDataLogDirPath = self.loggingConfig.logDirPath/Path("blockDataLogs")
self.blockDataPath = self.blockDataLogDirPath/Path("blockData.txt")
self.blockTrxDataPath = self.blockDataLogDirPath/Path("blockTrxData.txt")
self.reportPath = self.loggingConfig.logDirPath/Path("data.json")

# Setup Expectations for Producer and Validation Node IDs
# Producer Nodes are index [0, pnodes) and validation nodes/non-producer nodes [pnodes, _totalNodes)
# Use first producer node and first non-producer node
self.producerNodeId = 0
self.validationNodeId = self.clusterConfig.pnodes

self.nodeosLogPath = f'var/lib/node_{str(self.validationNodeId).zfill(2)}/stderr.txt'
self.nodeosLogPath = Path("var")/"lib"/f"node_{str(self.validationNodeId).zfill(2)}"/"stderr.txt"

# Setup cluster and its wallet manager
self.walletMgr=WalletMgr(True)
Expand All @@ -155,7 +154,7 @@ def testDirsCleanup(self, delReport: bool=False):
try:
def removeArtifacts(path):
print(f"Checking if test artifacts dir exists: {path}")
if os.path.isdir(f"{path}"):
if Path(path).is_dir():
print(f"Cleaning up test artifacts dir and all contents of: {path}")
shutil.rmtree(f"{path}")

Expand All @@ -177,7 +176,7 @@ def testDirsSetup(self):
try:
def createArtifactsDir(path):
print(f"Checking if test artifacts dir exists: {path}")
if not os.path.isdir(f"{path}"):
if not Path(path).is_dir():
print(f"Creating test artifacts dir: {path}")
os.mkdir(f"{path}")

Expand All @@ -194,7 +193,7 @@ def createArtifactsDir(path):
print(error)

def fileOpenMode(self, filePath) -> str:
if os.path.exists(filePath):
if filePath.exists():
append_write = 'a'
else:
append_write = 'w'
Expand Down Expand Up @@ -299,18 +298,18 @@ def captureLowLevelArtifacts(self):
except Exception as e:
print(f"Failed to move 'var' to '{self.varLogsDirPath}': {type(e)}: {e}")

etcEosioDir = "etc/eosio"
etcEosioDir = Path("etc")/"eosio"
for path in os.listdir(etcEosioDir):
if path == "launcher":
try:
# Need to copy here since testnet.template is only generated at compile time then reused, therefore
# it needs to remain in etc/eosio/launcher for subsequent tests.
shutil.copytree(f"{etcEosioDir}/{path}", f"{self.etcEosioLogsDirPath}/{path}")
shutil.copytree(etcEosioDir/Path(path), self.etcEosioLogsDirPath/Path(path))
except Exception as e:
print(f"Failed to copy '{etcEosioDir}/{path}' to '{self.etcEosioLogsDirPath}/{path}': {type(e)}: {e}")
else:
try:
shutil.move(f"{etcEosioDir}/{path}", f"{self.etcEosioLogsDirPath}/{path}")
shutil.move(etcEosioDir/Path(path), self.etcEosioLogsDirPath/Path(path))
except Exception as e:
print(f"Failed to move '{etcEosioDir}/{path}' to '{self.etcEosioLogsDirPath}/{path}': {type(e)}: {e}")

Expand Down
10 changes: 6 additions & 4 deletions tests/performance_tests/read_log_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
import log_reader
import launch_transaction_generators as ltg

from pathlib import Path

parser = argparse.ArgumentParser(add_help=False)
parser.add_argument("--target-tps", type=int, help="The target transfers per second to send during test", default=8000)
parser.add_argument("--test-duration-sec", type=int, help="The duration of transfer trx generation for the test in seconds", default=30)
Expand All @@ -18,14 +20,14 @@
parser.add_argument("--json-path", type=str, help="Path to save json output", default="data.json")
parser.add_argument("--quiet", type=bool, help="Whether to quiet printing intermediate results and reports to stdout", default=False)
args = parser.parse_args()
nodeosLogPath=args.log_path
nodeosLogPath=Path(args.log_path)
blockDataLogDirPath = args.block_data_logs_dir
trxGenLogDirPath = args.trx_data_logs_dir
trxGenLogDirPath = Path(args.trx_data_logs_dir)
data = log_reader.chainData()
data.startBlock = args.start_block
data.ceaseBlock = args.cease_block
blockDataPath = f"{blockDataLogDirPath}/blockData.txt"
blockTrxDataPath = f"{blockDataLogDirPath}/blockTrxData.txt"
blockDataPath = Path(blockDataLogDirPath)/"blockData.txt"
blockTrxDataPath = Path(blockDataLogDirPath)/"blockTrxData.txt"
tpsLimitPerGenerator=args.tps_limit_per_generator
targetTps=args.target_tps
tpsTrxGensConfig = ltg.TpsTrxGensConfig(targetTps=targetTps, tpsLimitPerGenerator=tpsLimitPerGenerator)
Expand Down