keyboard stuff
1"""Utility functions.
2"""
3import contextlib
4import multiprocessing
5import sys
6import re
7
8from milc import cli
9
10TRIPLET_PATTERN = re.compile(r'^(\d+)\.(\d+)\.(\d+)')
11
12maybe_exit_should_exit = True
13maybe_exit_reraise = False
14
15
16# Controls whether or not early `exit()` calls should be made
17def maybe_exit(rc):
18 if maybe_exit_should_exit:
19 sys.exit(rc)
20 if maybe_exit_reraise:
21 e = sys.exc_info()[1]
22 if e:
23 raise e
24
25
26def maybe_exit_config(should_exit: bool = True, should_reraise: bool = False):
27 global maybe_exit_should_exit
28 global maybe_exit_reraise
29 maybe_exit_should_exit = should_exit
30 maybe_exit_reraise = should_reraise
31
32
33def truthy(value, value_if_unknown=False):
34 """Returns True if the value is truthy, False otherwise.
35
36 Deals with:
37 True: 1, true, t, yes, y, on
38 False: 0, false, f, no, n, off
39 """
40 if value in {False, True}:
41 return bool(value)
42
43 test_value = str(value).strip().lower()
44
45 if test_value in {"1", "true", "t", "yes", "y", "on"}:
46 return True
47
48 if test_value in {"0", "false", "f", "no", "n", "off"}:
49 return False
50
51 return value_if_unknown
52
53
54@contextlib.contextmanager
55def parallelize():
56 """Returns a function that can be used in place of a map() call.
57
58 Attempts to use `mpire`, falling back to `multiprocessing` if it's not
59 available. If parallelization is not requested, returns the original map()
60 function.
61 """
62
63 # Work out if we've already got a config value for parallel searching
64 if cli.config.user.parallel_search is None:
65 parallel_search = True
66 else:
67 parallel_search = cli.config.user.parallel_search
68
69 # Non-parallel searches use `map()`
70 if not parallel_search:
71 yield map
72 return
73
74 # Prefer mpire's `WorkerPool` if it's available
75 with contextlib.suppress(ImportError):
76 from mpire import WorkerPool
77 from mpire.utils import make_single_arguments
78 with WorkerPool() as pool:
79
80 def _worker(func, *args):
81 # Ensure we don't unpack tuples -- mpire's `WorkerPool` tries to do so normally so we tell it not to.
82 for r in pool.imap_unordered(func, make_single_arguments(*args, generator=False), progress_bar=True):
83 yield r
84
85 yield _worker
86 return
87
88 # Otherwise fall back to multiprocessing's `Pool`
89 with multiprocessing.Pool() as pool:
90 yield pool.imap_unordered
91
92
93def parallel_map(*args, **kwargs):
94 """Effectively runs `map()` but executes it in parallel if necessary.
95 """
96 with parallelize() as map_fn:
97 # This needs to be enclosed in a `list()` as some implementations return
98 # a generator function, which means the scope of the pool is closed off
99 # before the results are returned. Returning a list ensures results are
100 # materialised before any worker pool is shut down.
101 return list(map_fn(*args, **kwargs))
102
103
104def triplet_to_bcd(ver: str):
105 m = TRIPLET_PATTERN.match(ver)
106 if not m:
107 return '0x00000000'
108 return f'0x{int(m.group(1)):02d}{int(m.group(2)):02d}{int(m.group(3)):04d}'