diff --git a/.github/workflows/coverage.yaml b/.github/workflows/coverage.yaml index 6107ecf3c..80826766b 100644 --- a/.github/workflows/coverage.yaml +++ b/.github/workflows/coverage.yaml @@ -44,6 +44,7 @@ jobs: mpiexec -n 2 --use-hwthread-cpus coverage run --rcfile=pyproject.toml -m pytest --cov=armi --cov-config=pyproject.toml --cov-report=lcov --cov-append --ignore=venv armi/tests/test_mpiParameters.py || true mpiexec -n 2 --use-hwthread-cpus coverage run --rcfile=pyproject.toml -m pytest --cov=armi --cov-config=pyproject.toml --cov-report=lcov --cov-append --ignore=venv armi/tests/test_mpiDirectoryChangers.py || true coverage combine --rcfile=pyproject.toml --keep -a + coverage report --rcfile=pyproject.toml -i --skip-empty --skip-covered --sort=cover --fail-under=90 - name: Publish to coveralls.io uses: coverallsapp/github-action@v2 with: diff --git a/armi/cases/case.py b/armi/cases/case.py index cadcbaac0..faebff6dd 100644 --- a/armi/cases/case.py +++ b/armi/cases/case.py @@ -855,7 +855,7 @@ def _copyInputsHelper( ------- destFilePath (or origFile) : str """ - sourceName = os.path.basename(sourcePath) + sourceName = pathlib.Path(sourcePath).name destFilePath = os.path.join(destPath, sourceName) try: pathTools.copyOrWarn(fileDescription, sourcePath, destFilePath) @@ -880,9 +880,10 @@ def copyInterfaceInputs( This function should now be able to handle the updating of: - a single file (relative or absolute) - - a list of files (relative or absolute), and + - a list of files (relative or absolute) - a file entry that has a wildcard processing into multiple files. Glob is used to offer support for wildcards. + - a directory and its contents If the file paths are absolute, do nothing. The case will be able to find the file. @@ -950,7 +951,7 @@ def copyInterfaceInputs( path = pathlib.Path(f) if not WILDCARD and not RELATIVE: try: - if path.is_absolute() and path.exists() and path.is_file(): + if path.is_absolute() and path.exists(): # Path is absolute, no settings modification or filecopy needed newFiles.append(path) continue diff --git a/armi/tests/test_interfaces.py b/armi/tests/test_interfaces.py index 2ae280457..1f23afdd3 100644 --- a/armi/tests/test_interfaces.py +++ b/armi/tests/test_interfaces.py @@ -14,12 +14,9 @@ """Tests the Interface.""" import unittest -import os from armi import interfaces from armi import settings -from armi.tests import TEST_ROOT -from armi.utils import textProcessors class DummyInterface(interfaces.Interface): @@ -75,25 +72,6 @@ def test_duplicate(self): self.assertEqual(i.enabled(), iDup.enabled()) -class TestTextProcessor(unittest.TestCase): - """Test Text processor.""" - - def setUp(self): - self.tp = textProcessors.TextProcessor(os.path.join(TEST_ROOT, "geom.xml")) - - def test_fsearch(self): - """Test fsearch in re mode.""" - line = self.tp.fsearch("xml") - self.assertIn("version", line) - self.assertEqual(self.tp.fsearch("xml"), "") - - def test_fsearch_text(self): - """Test fsearch in text mode.""" - line = self.tp.fsearch("xml", textFlag=True) - self.assertIn("version", line) - self.assertEqual(self.tp.fsearch("xml"), "") - - class TestTightCoupler(unittest.TestCase): """Test the tight coupler class.""" diff --git a/armi/utils/__init__.py b/armi/utils/__init__.py index f5943d02a..ae9c68443 100644 --- a/armi/utils/__init__.py +++ b/armi/utils/__init__.py @@ -24,7 +24,6 @@ import shutil import sys import tempfile -import threading import time from armi import runLog @@ -38,37 +37,33 @@ def getFileSHA1Hash(filePath, digits=40): """ - Generate a SHA-1 hash of the input file. + Generate a SHA-1 hash of input files. Parameters ---------- filePath : str - Path to file to obtain the SHA-1 hash + Path to file or directory to obtain the SHA-1 hash digits : int, optional Number of digits to include in the hash (40 digit maximum for SHA-1) """ sha1 = hashlib.sha1() - with open(filePath, "rb") as f: - while True: - data = f.read(_HASH_BUFFER_SIZE) - if not data: - break - sha1.update(data) - - return sha1.hexdigest()[:digits] + filesToHash = [] + if os.path.isdir(filePath): + for root, _, files in os.walk(filePath): + for file in sorted(files): + filesToHash.append(os.path.join(root, file)) + else: + filesToHash.append(filePath) + for file in filesToHash: + with open(file, "rb") as f: + while True: + data = f.read(_HASH_BUFFER_SIZE) + if not data: + break + sha1.update(data) -def copyWithoutBlocking(src, dest): - """ - Copy a file in a separate thread to avoid blocking while IO completes. - - Useful for copying large files while ARMI moves along. - """ - files = "{} to {}".format(src, dest) - runLog.extra("Copying (without blocking) {}".format(files)) - t = threading.Thread(target=shutil.copy, args=(src, dest)) - t.start() - return t + return sha1.hexdigest()[:digits] def getPowerFractions(cs): diff --git a/armi/utils/pathTools.py b/armi/utils/pathTools.py index 5eb9d97a9..d80354a09 100644 --- a/armi/utils/pathTools.py +++ b/armi/utils/pathTools.py @@ -41,29 +41,34 @@ def armiAbsPath(*pathParts): return os.path.abspath(os.path.join(*pathParts)) -def copyOrWarn(fileDescription, sourcePath, destinationPath): - """Copy a file, or warn if the file doesn't exist. +def copyOrWarn(filepathDescription, sourcePath, destinationPath): + """Copy a file or directory, or warn if the filepath doesn't exist. Parameters ---------- - fileDescription : str + filepathDescription : str a description of the file and/or operation being performed. sourcePath : str - Path of the file to be copied. + Filepath to be copied. destinationPath : str - Path for the copied file. + Copied filepath. """ try: - shutil.copy(sourcePath, destinationPath) + if os.path.isdir(sourcePath): + shutil.copytree(sourcePath, destinationPath, dirs_exist_ok=True) + else: + shutil.copy(sourcePath, destinationPath) runLog.debug( - "Copied {}: {} -> {}".format(fileDescription, sourcePath, destinationPath) + "Copied {}: {} -> {}".format( + filepathDescription, sourcePath, destinationPath + ) ) except shutil.SameFileError: pass except Exception as e: runLog.warning( "Could not copy {} from {} to {}\nError was: {}".format( - fileDescription, sourcePath, destinationPath, e + filepathDescription, sourcePath, destinationPath, e ) ) diff --git a/armi/utils/tests/test_pathTools.py b/armi/utils/tests/test_pathTools.py index 47c4a8b87..2b6f046db 100644 --- a/armi/utils/tests/test_pathTools.py +++ b/armi/utils/tests/test_pathTools.py @@ -26,6 +26,41 @@ class PathToolsTests(unittest.TestCase): + def test_copyOrWarnFile(self): + with TemporaryDirectoryChanger(): + # Test a successful copy + path = "test.txt" + pathCopy = "testcopy.txt" + with open(path, "w") as f1: + f1.write("test") + pathTools.copyOrWarn("Test File", path, pathCopy) + self.assertTrue(os.path.exists(pathCopy)) + + # Test an exception + with self.assertRaises(Exception) as e: + pathTools.copyOrWarn("Test File", "FileDoesntExist.txt", pathCopy) + self.assertIn("Could not copy", e) + self.assertIn("No such file or directory", e) + + def test_copyOrWarnDir(self): + with TemporaryDirectoryChanger(): + # Test a successful copy + pathDir = "testDir" + path = os.path.join(pathDir, "test.txt") + pathDirCopy = "testcopy" + os.mkdir(pathDir) + with open(path, "w") as f1: + f1.write("test") + pathTools.copyOrWarn("Test File", pathDir, pathDirCopy) + self.assertTrue(os.path.exists(pathDirCopy)) + self.assertTrue(os.path.exists(os.path.join(pathDirCopy, "test.txt"))) + + # Test an exception + with self.assertRaises(Exception) as e: + pathTools.copyOrWarn("Test File", "DirDoesntExist", pathDirCopy) + self.assertIn("Could not copy", e) + self.assertIn("No such file or directory", e) + def test_separateModuleAndAttribute(self): self.assertRaises( ValueError, pathTools.separateModuleAndAttribute, r"path/with/no/colon" diff --git a/armi/utils/tests/test_textProcessors.py b/armi/utils/tests/test_textProcessors.py index a7a56bcdc..1d15afcc6 100644 --- a/armi/utils/tests/test_textProcessors.py +++ b/armi/utils/tests/test_textProcessors.py @@ -13,17 +13,41 @@ # limitations under the License. """Tests for functions in textProcessors.py.""" from io import StringIO +import logging import os import pathlib import ruamel import unittest +from armi import runLog +from armi.tests import mockRunLogs +from armi.tests import TEST_ROOT from armi.utils import textProcessors +from armi.utils.directoryChangers import TemporaryDirectoryChanger THIS_DIR = os.path.dirname(__file__) RES_DIR = os.path.join(THIS_DIR, "resources") +class TestTextProcessor(unittest.TestCase): + """Test Text processor.""" + + def setUp(self): + self.tp = textProcessors.TextProcessor(os.path.join(TEST_ROOT, "geom.xml")) + + def test_fsearch(self): + """Test fsearch in re mode.""" + line = self.tp.fsearch("xml") + self.assertIn("version", line) + self.assertEqual(self.tp.fsearch("xml"), "") + + def test_fsearchText(self): + """Test fsearch in text mode.""" + line = self.tp.fsearch("xml", textFlag=True) + self.assertIn("version", line) + self.assertEqual(self.tp.fsearch("xml"), "") + + class YamlIncludeTest(unittest.TestCase): def test_resolveIncludes(self): with open(os.path.join(RES_DIR, "root.yaml")) as f: @@ -38,8 +62,7 @@ def test_resolveIncludes(self): anyIncludes = True self.assertFalse(anyIncludes) - # Re-parse the resolved stream, make sure that we included the stuff that we - # want + # Re-parse the resolved stream, make sure that we included the stuff that we want resolved.seek(0) data = ruamel.yaml.YAML().load(resolved) self.assertEqual(data["billy"]["children"][1]["full_name"], "Jennifer Person") @@ -99,15 +122,21 @@ class SequentialReaderTests(unittest.TestCase): _DUMMY_FILE_NAME = "DUMMY.txt" - @classmethod - def setUpClass(cls): - with open(cls._DUMMY_FILE_NAME, "w") as f: - f.write(cls.textStream) + def setUp(self): + self.td = TemporaryDirectoryChanger() + self.td.__enter__() + + with open(self._DUMMY_FILE_NAME, "w") as f: + f.write(self.textStream) - @classmethod - def tearDownClass(cls): - if os.path.exists(cls._DUMMY_FILE_NAME): - os.remove(cls._DUMMY_FILE_NAME) + def tearDown(self): + if os.path.exists(self._DUMMY_FILE_NAME): + try: + os.remove(self._DUMMY_FILE_NAME) + except OSError: + pass + + self.td.__exit__(None, None, None) def test_readFile(self): with textProcessors.SequentialReader(self._DUMMY_FILE_NAME) as sr: @@ -118,3 +147,32 @@ def test_readFileWithPattern(self): with textProcessors.SequentialReader(self._DUMMY_FILE_NAME) as sr: self.assertTrue(sr.searchForPattern("(X\s+Y\s+\d+\.\d+)")) self.assertEqual(float(sr.line.split()[2]), 3.5) + + def test_issueWarningOnFindingText(self): + with textProcessors.SequentialReader(self._DUMMY_FILE_NAME) as sr: + warningMsg = "Oh no" + sr.issueWarningOnFindingText("example test stream", warningMsg) + + with mockRunLogs.BufferLog() as mock: + runLog.LOG.startLog("test_issueWarningOnFindingText") + runLog.LOG.setVerbosity(logging.WARNING) + self.assertEqual("", mock.getStdout()) + self.assertTrue(sr.searchForPattern("example test stream")) + self.assertIn(warningMsg, mock.getStdout()) + + self.assertFalse(sr.searchForPattern("Killer Tomatoes")) + + def test_raiseErrorOnFindingText(self): + with textProcessors.SequentialReader(self._DUMMY_FILE_NAME) as sr: + sr.raiseErrorOnFindingText("example test stream", IOError) + + with self.assertRaises(IOError): + self.assertTrue(sr.searchForPattern("example test stream")) + + def test_consumeLine(self): + with textProcessors.SequentialReader(self._DUMMY_FILE_NAME) as sr: + sr.line = "hi" + sr.match = 1 + sr.consumeLine() + self.assertEqual(len(sr.line), 0) + self.assertIsNone(sr.match) diff --git a/armi/utils/tests/test_utils.py b/armi/utils/tests/test_utils.py index 826a155ef..f0e0b6500 100644 --- a/armi/utils/tests/test_utils.py +++ b/armi/utils/tests/test_utils.py @@ -24,26 +24,47 @@ from armi.settings.caseSettings import Settings from armi.tests import mockRunLogs from armi.utils import ( + codeTiming, directoryChangers, - getPowerFractions, - getCycleNames, getAvailabilityFactors, - getStepLengths, - getCycleLengths, getBurnSteps, + getCumulativeNodeNum, + getCycleLengths, + getCycleNames, + getCycleNodeFromCumulativeNode, + getCycleNodeFromCumulativeStep, + getFileSHA1Hash, getMaxBurnSteps, getNodesPerCycle, - getCycleNodeFromCumulativeStep, - getCycleNodeFromCumulativeNode, + getPowerFractions, getPreviousTimeNode, - getCumulativeNodeNum, + getStepLengths, hasBurnup, - codeTiming, safeCopy, ) class TestGeneralUtils(unittest.TestCase): + def test_getFileSHA1Hash(self): + with directoryChangers.TemporaryDirectoryChanger(): + path = "test.txt" + with open(path, "w") as f1: + f1.write("test") + sha = getFileSHA1Hash(path) + self.assertIn("a94a8", sha) + + def test_getFileSHA1HashDir(self): + with directoryChangers.TemporaryDirectoryChanger(): + pathDir = "testDir" + path1 = os.path.join(pathDir, "test1.txt") + path2 = os.path.join(pathDir, "test2.txt") + os.mkdir(pathDir) + for i, path in enumerate([path1, path2]): + with open(path, "w") as f1: + f1.write(f"test{i}") + sha = getFileSHA1Hash(pathDir) + self.assertIn("ccd13", sha) + def test_mergeableDictionary(self): mergeableDict = utils.MergeableDict() normalDict = {"luna": "thehusky", "isbegging": "fortreats", "right": "now"} diff --git a/armi/utils/textProcessors.py b/armi/utils/textProcessors.py index fcb6e1431..db9868316 100644 --- a/armi/utils/textProcessors.py +++ b/armi/utils/textProcessors.py @@ -304,7 +304,6 @@ def issueWarningOnFindingText(self, text, warning): ---------- text : str text to find within the file - warning : str An warning message to issue. @@ -316,7 +315,8 @@ def issueWarningOnFindingText(self, text, warning): self._textWarnings.append((text, warning)) def raiseErrorOnFindingText(self, text, error): - """Add a text search for every line of the file, if the text is found the specified error will be raised. + """Add a text search for every line of the file, if the text is found the specified error + will be raised. This is important for determining if errors occurred while searching for text. @@ -335,7 +335,8 @@ def raiseErrorOnFindingText(self, text, error): self._textErrors.append((text, error)) def raiseErrorOnFindingPattern(self, pattern, error): - """Add a pattern search for every line of the file, if the pattern is found the specified error will be raised. + """Add a pattern search for every line of the file, if the pattern is found the specified + error will be raised. This is important for determining if errors occurred while searching for text. @@ -546,12 +547,7 @@ def __init__(self, fname, highMem=False): # need this not to fail for detecting when RXSUM doesn't exist, etc. # note: Could make it check before instantiating... raise FileNotFoundError(f"{fname} does not exist.") - if not highMem: - # keep the file on disk, read as necessary - self.f = f - else: - # read all of f into memory and set up a list that remembers where it is. - self.f = SmartList(f) + self.f = f def reset(self): """Rewinds the file so you can search through it again.""" @@ -616,42 +612,3 @@ def fsearch(self, pattern, msg=None, killOn=None, textFlag=False): result = "" return result - - -class SmartList: - """A list that does stuff like files do i.e. remembers where it was, can seek, etc. - Actually this is pretty slow. so much for being smart. nice idea though. - """ - - def __init__(self, f): - self.lines = f.readlines() - self.position = 0 - self.name = f.name - self.length = len(self.lines) - - def __getitem__(self, index): - return self.lines[index] - - def __setitem__(self, index, line): - self.lines[index] = line - - def next(self): - if self.position >= self.length: - self.position = 0 - raise StopIteration - else: - c = self.position - self.position += 1 - return self.lines[c] - - def __iter__(self): - return self - - def __len__(self): - return len(self.lines) - - def seek(self, line): - self.position = line - - def close(self): - pass diff --git a/doc/release/0.4.rst b/doc/release/0.4.rst index c02e604c6..edda7276f 100644 --- a/doc/release/0.4.rst +++ b/doc/release/0.4.rst @@ -26,6 +26,7 @@ New Features #. Improve performance by changing the lattice physics interface so that cross sections are not updated on ``everyNode`` calls during coupled calculations (`PR#1963 `_) #. Improve efficiency of reaction rate calculations. (`PR#1887 `_) #. Adding new options for simplifying 1D cross section modeling. (`PR#1949 `_) +#. Updating ``copyOrWarn`` and ``getFileSHA1Hash`` to support directories. (`PR#1984 `_) #. TBD API Changes @@ -49,6 +50,7 @@ API Changes #. Removing ``globalFluxInterface.DoseResultsMapper`` class. (`PR#1952 `_) #. Removing setting ``mpiTasksPerNode`` and renaming ``numProcessors`` to ``nTasks``. (`PR#1958 `_) #. Changing ``synDbAfterWrite`` default to ``True``. (`PR#1968 `_) +#. Removing unused class ``SmartList``. (`PR#1992 `_) #. TBD Bug Fixes