Skip to content

Commit 0de7d22

Browse files
authored
Improve CI (eth-cscs#123)
CI seems to have stopped failing for a while (see output of https://github.com/eth-cscs/stackinator/actions/runs/5486778221/jobs/9997318839?pr=122, for example). This PR improves how linting tools are run. The whole code base is automatically re-formatted to make `black` and `isort` happy again. Additionally, the `docs` and `publish` actions are only allowed to run from the main repo.
1 parent 0d6517c commit 0de7d22

File tree

10 files changed

+112
-80
lines changed

10 files changed

+112
-80
lines changed

.github/workflows/docs.yaml

+1
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ permissions:
99
contents: write
1010
jobs:
1111
deploy:
12+
if: github.repository == 'eth-cscs/stackinator'
1213
runs-on: ubuntu-latest
1314
steps:
1415
- uses: actions/checkout@v3

.github/workflows/lint.yaml

+3-3
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,10 @@ jobs:
1919
python -m pip install black flake8 isort mypy
2020
- name: Black
2121
run: |
22-
black .
22+
black --check --verbose .
2323
- name: isort
2424
run: |
25-
isort .
25+
isort --check --diff .
2626
- name: flake8
2727
run: |
28-
flake8
28+
flake8 --count --show-source --statistics .

.github/workflows/publish.yaml

+1
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ on:
66

77
jobs:
88
publish:
9+
if: github.repository == 'eth-cscs/stackinator'
910
runs-on: ubuntu-20.04
1011
steps:
1112
- uses: actions/checkout@v3

stackinator/builder.py

+14-9
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,8 @@
1010
import jinja2
1111
import yaml
1212

13-
from . import VERSION, root_logger, cache
13+
from . import VERSION, cache, root_logger
14+
1415

1516
class Builder:
1617
def __init__(self, args):
@@ -72,7 +73,7 @@ def environment_meta(self):
7273

7374
@environment_meta.setter
7475
def environment_meta(self, recipe):
75-
'''
76+
"""
7677
The output that we want to generate looks like the following,
7778
Which should correspond directly to the environment_view_meta provided
7879
by the recipe.
@@ -96,7 +97,7 @@ def environment_meta(self, recipe):
9697
}
9798
}
9899
}
99-
'''
100+
"""
100101
conf = recipe.config
101102
meta = {}
102103
meta["name"] = conf["name"]
@@ -204,9 +205,12 @@ def generate(self, recipe):
204205
# print warning if mirrors.yaml is found
205206
if f_config.name in ["mirrors.yaml"]:
206207
self._logger.error(
207-
"mirrors.yaml have been removed from cluster configurations,"
208-
" use the --cache option on stack-config instead.")
209-
raise RuntimeError("Unsupported mirrors.yaml file in cluster configuration.")
208+
"mirrors.yaml have been removed from cluster configurations,"
209+
" use the --cache option on stack-config instead."
210+
)
211+
raise RuntimeError(
212+
"Unsupported mirrors.yaml file in cluster configuration."
213+
)
210214

211215
# construct full file path
212216
src = system_config_path / f_config.name
@@ -219,7 +223,7 @@ def generate(self, recipe):
219223
if recipe.mirror:
220224
dst = config_path / "mirrors.yaml"
221225
self._logger.debug(f"generate the build cache mirror: {dst}")
222-
with dst.open('w') as fid:
226+
with dst.open("w") as fid:
223227
fid.write(cache.generate_mirrors_yaml(recipe.mirror))
224228

225229
# append recipe packages to packages.yaml
@@ -366,8 +370,9 @@ def generate(self, recipe):
366370
with debug_script_path.open("w") as f:
367371
f.write(
368372
debug_script_template.render(
369-
mount_path=recipe.config["store"], build_path=str(self.path), verbose=False
373+
mount_path=recipe.config["store"],
374+
build_path=str(self.path),
375+
verbose=False,
370376
)
371377
)
372378
f.write("\n")
373-

stackinator/cache.py

+23-29
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
1+
import os
12
import pathlib
23

3-
import os
44
import yaml
55

66
from . import schema
77

8+
89
def configuration_from_file(file, mount):
910
with file.open() as fid:
1011
# load the raw yaml input
@@ -16,13 +17,9 @@ def configuration_from_file(file, mount):
1617
# verify that the root path exists
1718
path = pathlib.Path(os.path.expandvars(raw["root"]))
1819
if not path.is_absolute():
19-
raise FileNotFoundError(
20-
f"The build cache path '{path}' is not absolute"
21-
)
20+
raise FileNotFoundError(f"The build cache path '{path}' is not absolute")
2221
if not path.is_dir():
23-
raise FileNotFoundError(
24-
f"The build cache path '{path}' does not exist"
25-
)
22+
raise FileNotFoundError(f"The build cache path '{path}' does not exist")
2623

2724
raw["root"] = path
2825

@@ -35,36 +32,33 @@ def configuration_from_file(file, mount):
3532
if key is not None:
3633
key = pathlib.Path(os.path.expandvars(key))
3734
if not key.is_absolute():
38-
raise FileNotFoundError(
39-
f"The build cache key '{key}' is not absolute"
40-
)
35+
raise FileNotFoundError(f"The build cache key '{key}' is not absolute")
4136
if not key.is_file():
42-
raise FileNotFoundError(
43-
f"The build cache key '{key}' does not exist"
44-
)
37+
raise FileNotFoundError(f"The build cache key '{key}' does not exist")
4538
raw["key"] = key
4639

4740
return raw
4841

42+
4943
def generate_mirrors_yaml(config):
50-
path = config['path'].as_posix()
44+
path = config["path"].as_posix()
5145
mirrors = {
52-
'mirrors': {
53-
'alpscache': {
54-
'fetch': {
55-
'url': f"file://{path}",
56-
'access_pair': [None, None],
57-
'access_token': None,
58-
'profile': None,
59-
'endpoint_url': None,
46+
"mirrors": {
47+
"alpscache": {
48+
"fetch": {
49+
"url": f"file://{path}",
50+
"access_pair": [None, None],
51+
"access_token": None,
52+
"profile": None,
53+
"endpoint_url": None,
54+
},
55+
"push": {
56+
"url": f"file://{path}",
57+
"access_pair": [None, None],
58+
"access_token": None,
59+
"profile": None,
60+
"endpoint_url": None,
6061
},
61-
'push': {
62-
'url': f"file://{path}",
63-
'access_pair': [None, None],
64-
'access_token': None,
65-
'profile': None,
66-
'endpoint_url': None,
67-
}
6862
}
6963
}
7064
}

stackinator/etc/add-compiler-links.py

+24-14
Original file line numberDiff line numberDiff line change
@@ -4,39 +4,47 @@
44

55
import argparse
66
import os
7+
78
import yaml
89

10+
911
# parse compilers.yaml file.
1012
# return a list with the compiler descriptions from the yaml file.
1113
def load_compilers_yaml(path):
12-
with open(path, 'r') as file:
14+
with open(path, "r") as file:
1315
data = yaml.safe_load(file)
1416
compilers = [c["compiler"] for c in data["compilers"]]
1517
return compilers
1618

19+
1720
def parse_export(line):
18-
s = line.replace('=', ' ').split()
21+
s = line.replace("=", " ").split()
1922
var = s[1]
2023
paths = None
21-
if len(s)>2:
22-
paths = s[2].rstrip(';').split(':')
24+
if len(s) > 2:
25+
paths = s[2].rstrip(";").split(":")
2326
return {"variable": var, "paths": paths}
2427

28+
2529
def split_line(line):
26-
return line.strip().rstrip(';').replace('=', ' ').split()
30+
return line.strip().rstrip(";").replace("=", " ").split()
31+
2732

2833
def is_export(parts):
29-
return len(parts)>1 and parts[0]=="export"
34+
return len(parts) > 1 and parts[0] == "export"
35+
3036

3137
def is_alias(parts):
32-
return len(parts)>0 and parts[0]=="alias"
38+
return len(parts) > 0 and parts[0] == "alias"
39+
3340

3441
# Returns True if the given path is a descendant of prefix, False otherwise.
3542
def has_prefix(path, prefix):
3643
prefix = os.path.realpath(prefix)
3744
path = os.path.realpath(path)
3845
return os.path.commonprefix([path, prefix]) == prefix
3946

47+
4048
parser = argparse.ArgumentParser()
4149
parser.add_argument("compiler_path", help="Path to the compilers.yaml file")
4250
parser.add_argument("activate_path", help="Path to the activate script to configure")
@@ -59,12 +67,12 @@ def has_prefix(path, prefix):
5967

6068
paths = []
6169
for c in compilers:
62-
local_paths = set([os.path.dirname(v) for k,v in c["paths"].items()])
70+
local_paths = set([os.path.dirname(v) for k, v in c["paths"].items()])
6371
paths += local_paths
6472
print(f'adding compiler {c["spec"]} -> {[p for p in local_paths]}')
6573

6674
# find unique paths and concatenate them
67-
pathstring = ':'.join(set(paths))
75+
pathstring = ":".join(set(paths))
6876

6977
# Parse the spack env activation script line by line.
7078
# Remove spack-specific environment variables and references the build path.
@@ -73,7 +81,7 @@ def has_prefix(path, prefix):
7381
# etc. This may or may not be surprising for users, and we may have to append
7482
# :$PATH, :$CPATH, etc.
7583

76-
lines=[]
84+
lines = []
7785
with open(args.activate_path) as fid:
7886
for line in fid:
7987
parts = split_line(line)
@@ -86,22 +94,24 @@ def has_prefix(path, prefix):
8694

8795
# parse PATH to remove references to the build directory
8896
if export["variable"] == "PATH":
89-
paths=[p for p in export["paths"] if not has_prefix(p, args.build_path)]
97+
paths = [
98+
p for p in export["paths"] if not has_prefix(p, args.build_path)
99+
]
90100
lines.append(f"export PATH={':'.join(paths)};\n")
91101

92102
# drop the SPACK_ENV variable
93103
elif export["variable"] == "SPACK_ENV":
94104
pass
95105

96106
else:
97-
lines.append(line.strip()+"\n")
107+
lines.append(line.strip() + "\n")
98108
else:
99-
lines.append(line.strip()+"\n")
109+
lines.append(line.strip() + "\n")
100110

101111
# Prepend the compiler paths to PATH
102112
lines.append("# compiler paths added by stackinator\n")
103113
lines.append(f"export PATH={pathstring}:$PATH;\n")
104114

105115
# Write a modified version of the activation script.
106-
with open(args.activate_path, 'w') as fid:
116+
with open(args.activate_path, "w") as fid:
107117
fid.writelines(lines)

0 commit comments

Comments
 (0)