mirror of
https://github.com/yuzu-emu/mbedtls.git
synced 2024-12-23 05:45:27 +00:00
Ensure files get closed when they go out of scope
This is automatic in CPython but not guaranteed by the language. Be friendly to other Python implementations. Signed-off-by: Gilles Peskine <Gilles.Peskine@arm.com>
This commit is contained in:
parent
1177f37648
commit
aeb8d66525
|
@ -249,32 +249,33 @@ class AbiChecker:
|
|||
at_paragraph_start = True
|
||||
description = None
|
||||
full_path = os.path.join(directory, filename)
|
||||
for line_number, line in enumerate(open(full_path), 1):
|
||||
line = line.strip()
|
||||
if not line:
|
||||
at_paragraph_start = True
|
||||
continue
|
||||
if line.startswith('#'):
|
||||
continue
|
||||
if at_paragraph_start:
|
||||
description = line.strip()
|
||||
at_paragraph_start = False
|
||||
continue
|
||||
if line.startswith('depends_on:'):
|
||||
continue
|
||||
# We've reached a test case data line
|
||||
test_case_data = self._normalize_storage_test_case_data(line)
|
||||
if not is_generated:
|
||||
# In manual test data, only look at read tests.
|
||||
function_name = test_case_data.split(':', 1)[0]
|
||||
if 'read' not in function_name.split('_'):
|
||||
with open(full_path) as fd:
|
||||
for line_number, line in enumerate(fd, 1):
|
||||
line = line.strip()
|
||||
if not line:
|
||||
at_paragraph_start = True
|
||||
continue
|
||||
metadata = SimpleNamespace(
|
||||
filename=filename,
|
||||
line_number=line_number,
|
||||
description=description
|
||||
)
|
||||
storage_tests[test_case_data] = metadata
|
||||
if line.startswith('#'):
|
||||
continue
|
||||
if at_paragraph_start:
|
||||
description = line.strip()
|
||||
at_paragraph_start = False
|
||||
continue
|
||||
if line.startswith('depends_on:'):
|
||||
continue
|
||||
# We've reached a test case data line
|
||||
test_case_data = self._normalize_storage_test_case_data(line)
|
||||
if not is_generated:
|
||||
# In manual test data, only look at read tests.
|
||||
function_name = test_case_data.split(':', 1)[0]
|
||||
if 'read' not in function_name.split('_'):
|
||||
continue
|
||||
metadata = SimpleNamespace(
|
||||
filename=filename,
|
||||
line_number=line_number,
|
||||
description=description
|
||||
)
|
||||
storage_tests[test_case_data] = metadata
|
||||
|
||||
@staticmethod
|
||||
def _list_generated_test_data_files(git_worktree_path):
|
||||
|
|
|
@ -407,14 +407,17 @@ def check_output(generated_output_file, main_input_file, merged_files):
|
|||
is also present in an output file. This is not perfect but good enough
|
||||
for now.
|
||||
"""
|
||||
generated_output = set(open(generated_output_file, 'r', encoding='utf-8'))
|
||||
for line in open(main_input_file, 'r', encoding='utf-8'):
|
||||
if line not in generated_output:
|
||||
raise LostContent('original file', line)
|
||||
for merged_file in merged_files:
|
||||
for line in open(merged_file, 'r', encoding='utf-8'):
|
||||
if line not in generated_output:
|
||||
raise LostContent(merged_file, line)
|
||||
with open(generated_output_file, 'r', encoding='utf-8') as out_fd:
|
||||
generated_output = set(out_fd)
|
||||
with open(main_input_file, 'r', encoding='utf-8') as in_fd:
|
||||
for line in in_fd:
|
||||
if line not in generated_output:
|
||||
raise LostContent('original file', line)
|
||||
for merged_file in merged_files:
|
||||
with open(merged_file, 'r', encoding='utf-8') as in_fd:
|
||||
for line in in_fd:
|
||||
if line not in generated_output:
|
||||
raise LostContent(merged_file, line)
|
||||
|
||||
def finish_output(changelog, output_file, input_file, merged_files):
|
||||
"""Write the changelog to the output file.
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
|
||||
import itertools
|
||||
import re
|
||||
from typing import Dict, Iterable, Iterator, List, Optional, Pattern, Set, Tuple, Union
|
||||
from typing import Dict, IO, Iterable, Iterator, List, Optional, Pattern, Set, Tuple, Union
|
||||
|
||||
|
||||
class ReadFileLineException(Exception):
|
||||
|
@ -50,12 +50,13 @@ class read_file_lines:
|
|||
"""
|
||||
def __init__(self, filename: str, binary: bool = False) -> None:
|
||||
self.filename = filename
|
||||
self.file = None #type: Optional[IO[str]]
|
||||
self.line_number = 'entry' #type: Union[int, str]
|
||||
self.generator = None #type: Optional[Iterable[Tuple[int, str]]]
|
||||
self.binary = binary
|
||||
def __enter__(self) -> 'read_file_lines':
|
||||
self.generator = enumerate(open(self.filename,
|
||||
'rb' if self.binary else 'r'))
|
||||
self.file = open(self.filename, 'rb' if self.binary else 'r')
|
||||
self.generator = enumerate(self.file)
|
||||
return self
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
assert self.generator is not None
|
||||
|
@ -64,6 +65,8 @@ class read_file_lines:
|
|||
yield content
|
||||
self.line_number = 'exit'
|
||||
def __exit__(self, exc_type, exc_value, exc_traceback) -> None:
|
||||
if self.file is not None:
|
||||
self.file.close()
|
||||
if exc_type is not None:
|
||||
raise ReadFileLineException(self.filename, self.line_number) \
|
||||
from exc_value
|
||||
|
|
|
@ -56,18 +56,19 @@ class Requirements:
|
|||
* Comments (``#`` at the beginning of the line or after whitespace).
|
||||
* ``-r FILENAME`` to include another file.
|
||||
"""
|
||||
for line in open(filename):
|
||||
line = line.strip()
|
||||
line = re.sub(r'(\A|\s+)#.*', r'', line)
|
||||
if not line:
|
||||
continue
|
||||
m = re.match(r'-r\s+', line)
|
||||
if m:
|
||||
nested_file = os.path.join(os.path.dirname(filename),
|
||||
line[m.end(0):])
|
||||
self.add_file(nested_file)
|
||||
continue
|
||||
self.requirements.append(self.adjust_requirement(line))
|
||||
with open(filename) as fd:
|
||||
for line in fd:
|
||||
line = line.strip()
|
||||
line = re.sub(r'(\A|\s+)#.*', r'', line)
|
||||
if not line:
|
||||
continue
|
||||
m = re.match(r'-r\s+', line)
|
||||
if m:
|
||||
nested_file = os.path.join(os.path.dirname(filename),
|
||||
line[m.end(0):])
|
||||
self.add_file(nested_file)
|
||||
continue
|
||||
self.requirements.append(self.adjust_requirement(line))
|
||||
|
||||
def write(self, out: typing_util.Writable) -> None:
|
||||
"""List the gathered requirements."""
|
||||
|
|
Loading…
Reference in a new issue