1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
|
# ==============================================================================
# Copyright (C) 2019 - Philip Paquette
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <https://www.gnu.org/licenses/>.
# ==============================================================================
""" Convoy paths
- Contains utilities to generate all the possible convoy paths for a given map
"""
import collections
import hashlib
import glob
import pickle
import multiprocessing
import os
from queue import Queue
import threading
import tqdm
from diplomacy.engine.map import Map
from diplomacy import settings
# Using `os.path.expanduser()` to find home directory in a more cross-platform way.
HOME_DIRECTORY = os.path.expanduser('~')
if HOME_DIRECTORY == '~':
raise RuntimeError('Cannot find home directory. Unable to save cache')
# Constants
__VERSION__ = '20180913_0915'
COAST_TYPES = ('COAST', 'PORT')
WATER_TYPES = ('WATER', 'PORT')
MAX_CONVOY_LENGTH = 13 # Convoys over this length are not supported, too reduce generation time
CACHE_FILE_NAME = 'convoy_paths_cache.pkl'
INTERNAL_CACHE_PATH = os.path.join(settings.PACKAGE_DIR, 'maps', CACHE_FILE_NAME)
EXTERNAL_CACHE_PATH = os.path.join(HOME_DIRECTORY, '.cache', 'diplomacy', CACHE_FILE_NAME)
def _display_progress_bar(queue, max_loop_iters):
""" Displays a progress bar
:param queue: Multiprocessing queue to display the progress bar
:param max_loop_iters: The expected maximum number of iterations
"""
progress_bar = tqdm.tqdm(total=max_loop_iters)
for item in iter(queue.get, None): # type: int
for _ in range(item):
progress_bar.update()
progress_bar.close()
def _get_convoy_paths(map_object, start_location, max_convoy_length, queue):
""" Returns a list of possible convoy destinations with the required units to get there
Does a breadth first search from the starting location
:param map_object: The instantiated map
:param start_location: The start location of the unit (e.g. 'LON')
:param max_convoy_length: The maximum convoy length permitted
:param queue: Multiprocessing queue to display the progress bar
:return: A list of ({req. fleets}, {reachable destinations})
:type map_object: diplomacy.Map
"""
to_check = Queue() # Items in queue have format ({fleets location}, last fleet location)
dest_paths = {} # Dict with dest as key and a list of all paths from start_location to dest as value
# To measure progress
last_completed_path_length = 0
nb_water_locs = len([loc.upper() for loc in map_object.locs if map_object.area_type(loc) in WATER_TYPES])
# We need to start on a coast / port
if map_object.area_type(start_location) not in COAST_TYPES or '/' in start_location:
queue.put(nb_water_locs)
return []
# Queuing all adjacent water locations from start
for loc in [loc.upper() for loc in map_object.abut_list(start_location, incl_no_coast=True)]:
if map_object.area_type(loc) in WATER_TYPES:
to_check.put(({loc}, loc))
# Checking all subsequent adjacencies until no more adjacencies are possible
while not to_check.empty():
fleets_loc, last_loc = to_check.get()
new_completed_path_length = len(fleets_loc) - 1
# Marking path length as completed
if new_completed_path_length > last_completed_path_length:
queue.put(new_completed_path_length - last_completed_path_length)
last_completed_path_length = new_completed_path_length
# Checking adjacencies
for loc in [loc.upper() for loc in map_object.abut_list(last_loc, incl_no_coast=True)]:
# If we find adjacent coasts, we mark them as a possible result
if map_object.area_type(loc) in COAST_TYPES and '/' not in loc and loc != start_location:
dest_paths.setdefault(loc, [])
# If we already have a working path that is a subset of the current fleets, we can skip
# Otherwise, we add the new path as a valid path to dest
for path in dest_paths[loc]:
if path.issubset(fleets_loc):
break
else:
dest_paths[loc] += [fleets_loc]
# If we find adjacent water/port, we add them to the queue
elif map_object.area_type(loc) in WATER_TYPES \
and loc not in fleets_loc \
and len(fleets_loc) < max_convoy_length:
to_check.put((fleets_loc | {loc}, loc))
# Merging destinations with similar paths
similar_paths = {}
for dest, paths in dest_paths.items():
for path in paths:
tuple_path = tuple(sorted(path))
similar_paths.setdefault(tuple_path, set([]))
similar_paths[tuple_path] |= {dest}
# Converting to list
results = []
for fleets, dests in similar_paths.items():
results += [(start_location, set(fleets), dests)]
# Marking as done
if nb_water_locs > last_completed_path_length:
queue.put(nb_water_locs - last_completed_path_length)
# Returning
return results
def _build_convoy_paths_cache(map_object, max_convoy_length):
""" Builds the convoy paths cache for a map
:param map_object: The instantiated map object
:param max_convoy_length: The maximum convoy length permitted
:return: A dictionary where the key is the number of fleets in the path and
the value is a list of convoy paths (start loc, {fleets}, {dest}) of that length for the map
:type map_object: diplomacy.Map
"""
print('Generating convoy paths for "{}"'.format(map_object.name))
print('This is an operation that is required the first time a map is loaded. It might take several minutes...\n')
coasts = [loc.upper() for loc in map_object.locs if map_object.area_type(loc) in COAST_TYPES and '/' not in loc]
water_locs = [loc.upper() for loc in map_object.locs if map_object.area_type(loc) in WATER_TYPES]
# Starts the progress bar loop
manager = multiprocessing.Manager()
queue = manager.Queue()
progress_bar = threading.Thread(target=_display_progress_bar, args=(queue, len(coasts) * len(water_locs)))
progress_bar.start()
# Getting all paths for each coasts in parallel (except if the map is large, to avoid high memory usage)
nb_cores = multiprocessing.cpu_count() if (len(water_locs) <= 30 or max_convoy_length <= MAX_CONVOY_LENGTH) else 1
pool = multiprocessing.Pool(nb_cores)
tasks = [(map_object, coast, max_convoy_length, queue) for coast in coasts]
results = pool.starmap(_get_convoy_paths, tasks)
pool.close()
results = [item for sublist in results for item in sublist]
queue.put(None)
progress_bar.join()
# Splitting into buckets
buckets = collections.OrderedDict({i: [] for i in range(1, len(map_object.locs) + 1)})
for start, fleets, dests in results:
buckets[len(fleets)] += [(start, fleets, dests)]
# Returning
print('Found {} convoy paths for {}\n'.format(len(results), map_object.name))
return buckets
def get_file_md5(file_path):
""" Calculates a file MD5 hash
:param file_path: The file path
:return: The computed md5 hash
"""
hash_md5 = hashlib.md5()
with open(file_path, 'rb') as file:
for chunk in iter(lambda: file.read(4096), b''):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def add_to_cache(map_name, max_convoy_length=MAX_CONVOY_LENGTH):
""" Lazy generates convoys paths for a map and adds it to the disk cache
:param map_name: The name of the map
:param max_convoy_length: The maximum convoy length permitted
:return: The convoy_paths for that map
"""
convoy_paths = {'__version__': __VERSION__} # Uses hash as key
external_convoy_paths = {'__version__': __VERSION__} # Uses hash as key
# Loading from internal cache first
if os.path.exists(INTERNAL_CACHE_PATH):
try:
cache_data = pickle.load(open(INTERNAL_CACHE_PATH, 'rb'))
if cache_data.get('__version__', '') == __VERSION__:
convoy_paths.update(cache_data)
except (pickle.UnpicklingError, EOFError):
pass
# Loading external cache
if os.path.exists(EXTERNAL_CACHE_PATH):
try:
cache_data = pickle.load(open(EXTERNAL_CACHE_PATH, 'rb'))
if cache_data.get('__version__', '') != __VERSION__:
print('Upgrading cache from "%s" to "%s"' % (cache_data.get('__version__', '<N/A>'), __VERSION__))
else:
convoy_paths.update(cache_data)
external_convoy_paths.update(cache_data)
except (pickle.UnpicklingError, EOFError):
pass
# Getting map MD5 hash
if os.path.exists(map_name):
map_path = map_name
else:
map_path = os.path.join(settings.PACKAGE_DIR, 'maps', map_name + '.map')
if not os.path.exists(map_path):
return None
map_hash = get_file_md5(map_path)
# Generating and adding to alternate cache paths
if map_hash not in convoy_paths:
map_object = Map(map_name, use_cache=False)
convoy_paths[map_hash] = _build_convoy_paths_cache(map_object, max_convoy_length)
external_convoy_paths[map_hash] = convoy_paths[map_hash]
os.makedirs(os.path.dirname(EXTERNAL_CACHE_PATH), exist_ok=True)
pickle.dump(external_convoy_paths, open(EXTERNAL_CACHE_PATH, 'wb'))
# Returning
return convoy_paths[map_hash]
def get_convoy_paths_cache():
""" Returns the current cache from disk """
disk_convoy_paths = {} # Uses hash as key
cache_convoy_paths = {} # Use map name as key
# Loading from internal cache first
if os.path.exists(INTERNAL_CACHE_PATH):
try:
cache_data = pickle.load(open(INTERNAL_CACHE_PATH, 'rb'))
if cache_data.get('__version__', '') == __VERSION__:
disk_convoy_paths.update(cache_data)
except (pickle.UnpicklingError, EOFError):
pass
# Loading external cache
if os.path.exists(EXTERNAL_CACHE_PATH):
try:
cache_data = pickle.load(open(EXTERNAL_CACHE_PATH, 'rb'))
if cache_data.get('__version__', '') == __VERSION__:
disk_convoy_paths.update(cache_data)
except (pickle.UnpicklingError, EOFError):
pass
# Getting map name and file paths
files_path = glob.glob(settings.PACKAGE_DIR + '/maps/*.map')
for file_path in files_path:
map_name = file_path.replace(settings.PACKAGE_DIR + '/maps/', '').replace('.map', '')
map_hash = get_file_md5(file_path)
if map_hash in disk_convoy_paths:
cache_convoy_paths[map_name] = disk_convoy_paths[map_hash]
cache_convoy_paths[file_path] = disk_convoy_paths[map_hash]
# Returning
return cache_convoy_paths
def rebuild_all_maps():
""" Rebuilds all the maps in the external cache """
if os.path.exists(EXTERNAL_CACHE_PATH):
os.remove(EXTERNAL_CACHE_PATH)
files_path = glob.glob(settings.PACKAGE_DIR + '/maps/*.map')
for file_path in files_path:
map_name = file_path.replace(settings.PACKAGE_DIR + '/maps/', '').replace('.map', '')
map_hash = get_file_md5(file_path)
print('-' * 80)
print('Adding {} (Hash: {}) to cache\n'.format(file_path, map_hash))
add_to_cache(map_name)
|