pw_tokenizer: Fix tests on Windows
detokenize_test.py fails on Windows because Windows doesn't support
having two open file descriptors for the same file.
Change-Id: I218586311407f713b20d62229ace4a0ae75befb4
Reviewed-on: https://pigweed-review.googlesource.com/c/pigweed/pigweed/+/21100
Reviewed-by: Alexei Frolov <frolv@google.com>
Commit-Queue: Wyatt Hepler <hepler@google.com>
diff --git a/pw_tokenizer/py/database_test.py b/pw_tokenizer/py/database_test.py
index d79b765..0049c68 100755
--- a/pw_tokenizer/py/database_test.py
+++ b/pw_tokenizer/py/database_test.py
@@ -15,6 +15,7 @@
"""Tests for the database module."""
import io
+import os
from pathlib import Path
import shutil
import sys
@@ -95,7 +96,7 @@
return io.TextIOWrapper(output, write_through=True)
-REPORT_DEFAULT_DOMAIN = b'''\
+REPORT_DEFAULT_DOMAIN = '''\
example_binary_with_tokenized_strings.elf]
Domain: default
Entries present: 17
@@ -103,9 +104,9 @@
Total entries: 17
Total size of strings: 205 B
Collisions: 0 tokens
-'''
+'''.replace('\n', os.linesep).encode()
-REPORT_TEST_DOMAIN = b'''\
+REPORT_TEST_DOMAIN = '''\
example_binary_with_tokenized_strings.elf]
Domain: TEST_DOMAIN
Entries present: 3
@@ -113,7 +114,7 @@
Total entries: 3
Total size of strings: 38 B
Collisions: 0 tokens
-'''
+'''.replace('\n', os.linesep).encode()
class DatabaseCommandLineTest(unittest.TestCase):
diff --git a/pw_tokenizer/py/detokenize_test.py b/pw_tokenizer/py/detokenize_test.py
index 300e782..a472385 100755
--- a/pw_tokenizer/py/detokenize_test.py
+++ b/pw_tokenizer/py/detokenize_test.py
@@ -258,12 +258,15 @@
detok = detokenize.Detokenizer(io.BytesIO(ELF_WITH_TOKENIZER_SECTIONS))
expected_tokens = frozenset(detok.database.token_to_entries.keys())
- with tempfile.NamedTemporaryFile() as elf:
+ elf = tempfile.NamedTemporaryFile('wb', delete=False)
+ try:
elf.write(ELF_WITH_TOKENIZER_SECTIONS)
- elf.seek(0)
+ elf.close()
# Open ELF by file object
- detok = detokenize.Detokenizer(elf)
+ with open(elf.name, 'rb') as fd:
+ detok = detokenize.Detokenizer(fd)
+
self.assertEqual(expected_tokens,
frozenset(detok.database.token_to_entries.keys()))
@@ -273,10 +276,13 @@
frozenset(detok.database.token_to_entries.keys()))
# Open ELF by elf_reader.Elf
- elf.seek(0)
- detok = detokenize.Detokenizer(elf_reader.Elf(elf))
+ with open(elf.name, 'rb') as fd:
+ detok = detokenize.Detokenizer(elf_reader.Elf(fd))
+
self.assertEqual(expected_tokens,
frozenset(detok.database.token_to_entries.keys()))
+ finally:
+ os.unlink(elf.name)
def test_decode_from_csv_file(self):
detok = detokenize.Detokenizer(io.BytesIO(ELF_WITH_TOKENIZER_SECTIONS))
@@ -285,9 +291,10 @@
csv_database = str(detok.database)
self.assertEqual(len(csv_database.splitlines()), 17)
- with tempfile.NamedTemporaryFile('r+') as csv_file:
+ csv_file = tempfile.NamedTemporaryFile('w', delete=False)
+ try:
csv_file.write(csv_database)
- csv_file.seek(0)
+ csv_file.close()
# Open CSV by path
detok = detokenize.Detokenizer(csv_file.name)
@@ -295,9 +302,13 @@
frozenset(detok.database.token_to_entries.keys()))
# Open CSV by file object
- detok = detokenize.Detokenizer(csv_file)
+ with open(csv_file.name) as fd:
+ detok = detokenize.Detokenizer(fd)
+
self.assertEqual(expected_tokens,
frozenset(detok.database.token_to_entries.keys()))
+ finally:
+ os.unlink(csv_file.name)
def test_create_detokenizer_with_token_database(self):
detok = detokenize.Detokenizer(io.BytesIO(ELF_WITH_TOKENIZER_SECTIONS))
@@ -385,6 +396,8 @@
class AutoUpdatingDetokenizerTest(unittest.TestCase):
"""Tests the AutoUpdatingDetokenizer class."""
def test_update(self, mock_getmtime):
+ """Tests the update command."""
+
db = database.load_token_database(
io.BytesIO(ELF_WITH_TOKENIZER_SECTIONS))
self.assertEqual(len(db), 17)
@@ -400,15 +413,20 @@
mock_getmtime.side_effect = move_back_time_if_file_exists
- with tempfile.NamedTemporaryFile('wb', delete=True) as fd:
- detok = detokenize.AutoUpdatingDetokenizer(fd.name,
+ file = tempfile.NamedTemporaryFile('wb', delete=False)
+ try:
+ file.close()
+
+ detok = detokenize.AutoUpdatingDetokenizer(file.name,
min_poll_period_s=0)
self.assertFalse(detok.detokenize(JELLO_WORLD_TOKEN).ok())
- tokens.write_binary(db, fd)
- fd.flush()
+ with open(file.name, 'wb') as fd:
+ tokens.write_binary(db, fd)
self.assertTrue(detok.detokenize(JELLO_WORLD_TOKEN).ok())
+ finally:
+ os.unlink(file.name)
# The database stays around if the file is deleted.
self.assertTrue(detok.detokenize(JELLO_WORLD_TOKEN).ok())
@@ -416,24 +434,29 @@
def test_no_update_if_time_is_same(self, mock_getmtime):
mock_getmtime.return_value = 100
- with tempfile.NamedTemporaryFile('wb', delete=True) as fd:
+ file = tempfile.NamedTemporaryFile('wb', delete=False)
+ try:
tokens.write_csv(
database.load_token_database(
- io.BytesIO(ELF_WITH_TOKENIZER_SECTIONS)), fd)
- fd.flush()
+ io.BytesIO(ELF_WITH_TOKENIZER_SECTIONS)), file)
+ file.close()
- detok = detokenize.AutoUpdatingDetokenizer(fd, min_poll_period_s=0)
+ detok = detokenize.AutoUpdatingDetokenizer(file,
+ min_poll_period_s=0)
self.assertTrue(detok.detokenize(JELLO_WORLD_TOKEN).ok())
- # Empty the database, but keep the modified time the same.
- fd.truncate(0)
- fd.flush()
+ # Empty the database, but keep the mock modified time the same.
+ with open(file.name, 'wb'):
+ pass
+
self.assertTrue(detok.detokenize(JELLO_WORLD_TOKEN).ok())
self.assertTrue(detok.detokenize(JELLO_WORLD_TOKEN).ok())
# Move back time so the now-empty file is reloaded.
mock_getmtime.return_value = 50
self.assertFalse(detok.detokenize(JELLO_WORLD_TOKEN).ok())
+ finally:
+ os.unlink(file.name)
def _next_char(message):