Skip to content

Commit c55f366

Browse files
committed
Merge branch 'master' of http://github.com/GraylinKim/sc2reader
2 parents fe57344 + 84d4f54 commit c55f366

File tree

7 files changed

+250
-120
lines changed

7 files changed

+250
-120
lines changed

MANIFEST.in

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
include LICENSE.txt
22
include CONTRIBUTORS.txt
33
include README.txt
4+
recursive-include sc2reader *.csv

sc2reader/__init__.py

Lines changed: 31 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
from __future__ import absolute_import
22

3+
import sys
4+
35
# import submodules
46
from sc2reader import plugins, data, scripts
57

@@ -11,21 +13,35 @@
1113
# For backwards compatibility
1214
SC2Reader = factories.SC2Factory
1315

14-
# Expose a nice module level interface
15-
__defaultSC2Reader = factories.SC2Factory()
16+
def setFactory(factory):
17+
# Expose a nice module level interface
18+
module = sys.modules[__name__]
19+
module.load_replays = factory.load_replays
20+
module.load_replay = factory.load_replay
21+
module.load_maps = factory.load_maps
22+
module.load_map = factory.load_map
23+
module.load_game_summaries = factory.load_game_summaries
24+
module.load_game_summary = factory.load_game_summary
25+
module.load_map_infos = factory.load_map_infos
26+
module.load_map_info = factory.load_map_info
27+
module.load_map_histories = factory.load_map_headers
28+
module.load_map_history = factory.load_map_header
29+
30+
module.configure = factory.configure
31+
module.reset = factory.reset
32+
33+
module.register_plugin = factory.register_plugin
34+
module._defaultFactory = factory
35+
36+
def useFileCache(cache_dir, **options):
37+
setFactory(factories.FileCachedSC2Factory(cache_dir, **options))
38+
39+
def useDictCache(cache_max_size=0, **options):
40+
setFactory(factories.DictCachedSC2Factory(cache_max_size, **options))
41+
42+
def useDoubleCache(cache_dir, cache_max_size=0, **options):
43+
setFactory(factories.DoubleCachedSC2Factory(cache_dir, cache_max_size, **options))
1644

17-
load_replays = __defaultSC2Reader.load_replays
18-
load_replay = __defaultSC2Reader.load_replay
19-
load_maps = __defaultSC2Reader.load_maps
20-
load_map = __defaultSC2Reader.load_map
21-
load_game_summaries = __defaultSC2Reader.load_game_summaries
22-
load_game_summary = __defaultSC2Reader.load_game_summary
23-
load_map_infos = __defaultSC2Reader.load_map_infos
24-
load_map_info = __defaultSC2Reader.load_map_info
25-
load_map_histories = __defaultSC2Reader.load_map_headers
26-
load_map_history = __defaultSC2Reader.load_map_header
45+
setFactory(factories.SC2Factory())
2746

28-
configure = __defaultSC2Reader.configure
29-
reset = __defaultSC2Reader.reset
3047

31-
register_plugin = __defaultSC2Reader.register_plugin

sc2reader/data/__init__.py

Lines changed: 80 additions & 84 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,6 @@
11
from __future__ import absolute_import
22

3-
#from sc2reader.data.utils import DataObject
4-
#from sc2reader.data.build16561 import Data_16561
5-
#from sc2reader.data.build17326 import Data_17326
6-
#from sc2reader.data.build18317 import Data_18317
7-
#from sc2reader.data.build19595 import Data_19595
8-
3+
import pkgutil
94

105
class Build(object):
116
def __init__(self, build_id, units, abilities):
@@ -271,87 +266,88 @@ def __cmp__(self, other):
271266
class Ability(object):
272267
pass
273268

269+
274270
def create_build(build):
275-
units_file = path.join(BASE_PATH, "{}_{}.csv".format(build,"units"))
276-
abils_file = path.join(BASE_PATH, "{}_{}.csv".format(build,"abilities"))
277-
with open(units_file, 'r') as data_file:
278-
units = dict()
279-
for row in [UnitRow(*line.strip().split('|')[1:]) for line in data_file]:
280-
unit_id = int(row.id, 10) << 8 | 1
281-
values = dict(cost=[0,0,0], race='Neutral',is_army=False, is_building=False, is_worker=False)
282-
race, minerals, vespene, supply = "Neutral", 0, 0, 0
283-
for race in ('Protoss','Terran','Zerg'):
284-
if row.type.lower() in unit_lookup[race]:
285-
values.update(unit_lookup[race][row.type.lower()])
286-
values['race']=race
287-
break
288-
289-
units[unit_id] = type(row.title,(Unit,), dict(
290-
type=unit_id,
291-
name=row.title,
292-
title=row.title,
293-
race=values['race'],
294-
minerals=values['cost'][0],
295-
vespene=values['cost'][1],
296-
supply=values['cost'][2],
297-
is_building=values['is_building'],
298-
is_worker=values['is_worker'],
299-
is_army=values['is_army'],
271+
units = dict()
272+
units_file = "{0}_{1}.csv".format(build,"units")
273+
units_data = pkgutil.get_data('sc2reader.data',units_file).split('\n')[:-1]
274+
for row in [UnitRow(*line.strip().split('|')[1:]) for line in units_data]:
275+
unit_id = int(row.id, 10) << 8 | 1
276+
values = dict(cost=[0,0,0], race='Neutral',is_army=False, is_building=False, is_worker=False)
277+
race, minerals, vespene, supply = "Neutral", 0, 0, 0
278+
for race in ('Protoss','Terran','Zerg'):
279+
if row.type.lower() in unit_lookup[race]:
280+
values.update(unit_lookup[race][row.type.lower()])
281+
values['race']=race
282+
break
283+
284+
units[unit_id] = type(row.title,(Unit,), dict(
285+
type=unit_id,
286+
name=row.title,
287+
title=row.title,
288+
race=values['race'],
289+
minerals=values['cost'][0],
290+
vespene=values['cost'][1],
291+
supply=values['cost'][2],
292+
is_building=values['is_building'],
293+
is_worker=values['is_worker'],
294+
is_army=values['is_army'],
295+
))
296+
297+
if row.title.lower() in ('probe','zealot','stalker','immortal','phoenix','hightemplar','warpprism','archon','colossus','voidray'):
298+
units[unit_id+1] = type("Hallucinated"+row.title,(Unit,), dict(
299+
type=unit_id+1,
300+
name="Hallucinated"+row.title,
301+
title="Hallucinated"+row.title,
302+
race='Protoss',
303+
minerals=0,
304+
vespene=0,
305+
supply=0,
306+
is_building=False,
307+
is_army=True,
308+
is_worker=False,
300309
))
301310

302-
if row.title.lower() in ('probe','zealot','stalker','immortal','phoenix','hightemplar','warpprism','archon','colossus','voidray'):
303-
units[unit_id+1] = type("Hallucinated"+row.title,(Unit,), dict(
304-
type=unit_id+1,
305-
name="Hallucinated"+row.title,
306-
title="Hallucinated"+row.title,
307-
race='Protoss',
308-
minerals=0,
309-
vespene=0,
310-
supply=0,
311-
is_building=False,
312-
is_army=True,
313-
is_worker=False,
314-
))
315-
316-
317-
with open(abils_file, 'r') as data_file:
318-
abilities = {0:type('RightClick',(Ability,), dict(type=0, name='RightClick', title='Right Click', is_build=False, build_time=None, build_unit=None))}
319-
for row in [line.strip().split('|') for line in data_file]:
320-
base = int(row[1],10) << 5
321-
if base == 0: continue
322-
323-
# Temporary Hack here.
324-
if base == 0xe80:
325-
real_abils = [(0xe80,"QueueCancel0"), (0xe81,"QueueCancel1")]
326-
else:
327-
real_abils = [(base|i,t) for i,t in enumerate(row[3:]) if t.strip()!='']
328-
329-
for abil_id, title in real_abils:
330-
abilities[abil_id] = type(title,(Ability,), dict(
331-
type=abil_id,
332-
name=title,
333-
title=title,
334-
is_build=False,
335-
build_time=None,
336-
build_unit=None
337-
))
338-
339-
340-
# Some abilities have missing entries..
341-
if len(real_abils) == 0:
342-
abilities[base] = type(row[2],(Ability,), dict(
343-
type=base,
344-
name=row[2],
345-
title=row[2],
346-
is_build=False,
347-
build_time=None,
348-
build_unit=None
349-
))
350-
351-
if int(row[1],10) == 249 and build==22612:
352-
pass
353-
#print row
354-
#print abilities[0x1f20], abilities[0x1f21], abilities[0x1f22], abilities[0x1f23]
311+
312+
abils_file = "{0}_{1}.csv".format(build,"abilities")
313+
abils_data = pkgutil.get_data('sc2reader.data',abils_file).split('\n')[:-1]
314+
abilities = {0:type('RightClick',(Ability,), dict(type=0, name='RightClick', title='Right Click', is_build=False, build_time=None, build_unit=None))}
315+
for row in [line.strip().split('|') for line in abils_data]:
316+
base = int(row[1],10) << 5
317+
if base == 0: continue
318+
319+
# Temporary Hack here.
320+
if base == 0xe80:
321+
real_abils = [(0xe80,"QueueCancel0"), (0xe81,"QueueCancel1")]
322+
else:
323+
real_abils = [(base|i,t) for i,t in enumerate(row[3:]) if t.strip()!='']
324+
325+
for abil_id, title in real_abils:
326+
abilities[abil_id] = type(title,(Ability,), dict(
327+
type=abil_id,
328+
name=title,
329+
title=title,
330+
is_build=False,
331+
build_time=None,
332+
build_unit=None
333+
))
334+
335+
336+
# Some abilities have missing entries..
337+
if len(real_abils) == 0:
338+
abilities[base] = type(row[2],(Ability,), dict(
339+
type=base,
340+
name=row[2],
341+
title=row[2],
342+
is_build=False,
343+
build_time=None,
344+
build_unit=None
345+
))
346+
347+
if int(row[1],10) == 249 and build==22612:
348+
pass
349+
#print row
350+
#print abilities[0x1f20], abilities[0x1f21], abilities[0x1f22], abilities[0x1f23]
355351

356352
data = Build(build, units, abilities)
357353
for unit in units.values():

sc2reader/factories.py

Lines changed: 92 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -219,28 +219,101 @@ def _load_resource(self, resource, options=None, **new_options):
219219
return (resource, resource_name)
220220

221221

222-
class SC2Cache(SC2Factory):
222+
import urlparse, time
223+
224+
class CachedSC2Factory(SC2Factory):
225+
226+
def get_remote_cache_key(self, remote_resource):
227+
# Strip the port and use the domain as the bucket
228+
# and use the full path as the key
229+
parseresult = urlparse.urlparse(remote_resource)
230+
bucket = re.sub(r':.*', '', parseresult.netloc)
231+
key = parseresult.path.strip('/')
232+
return (bucket, key)
233+
234+
def load_remote_resource_contents(self, remote_resource, **options):
235+
cache_key = self.get_remote_cache_key(remote_resource)
236+
if not self.cache_has(cache_key):
237+
resource = super(CachedSC2Factory, self).load_remote_resource_contents(remote_resource, **options)
238+
self.cache_set(cache_key, resource)
239+
else:
240+
resource = self.cache_get(cache_key)
241+
return resource
223242

224-
def __init__(self, **options):
225-
super(SC2Cache, self).__init__(self, **options)
226-
self.cache = IntitializeCache(**options)
243+
def cache_has(self, cache_key):
244+
raise NotImplemented()
227245

228-
def load_map(self, map_file, options=None, **new_options):
229-
options = options or utils.merged_dict(self.options, new_options)
246+
def cache_get(self, cache_key):
247+
raise NotImplemented()
230248

231-
if self.cache.has(map_file):
232-
return self.cache.get(map_file)
233-
else:
234-
map = super(SC2Cache, self).load_map(map_file, options=options)
235-
self.cache.set(map_file, map)
236-
return map
249+
def cache_set(self, cache_key, value):
250+
raise NotImplemented()
251+
252+
class FileCachedSC2Factory(CachedSC2Factory):
253+
def __init__(self, cache_dir, **options):
254+
super(FileCachedSC2Factory, self).__init__(**options)
255+
self.cache_dir = os.path.abspath(cache_dir)
256+
if not os.path.isdir(self.cache_dir):
257+
raise ValueError("cache_dir ({}) must be an existing directory.".format(self.cache_dir))
258+
elif not os.access(self.cache_dir, os.F_OK | os.W_OK | os.R_OK ):
259+
raise ValueError("Must have read/write access to {} for local file caching.".format(self.cache_dir))
260+
261+
def cache_has(self, cache_key):
262+
return os.path.exists(self.cache_path(cache_key))
263+
264+
def cache_get(self, cache_key, **options):
265+
return self.load_local_resource_contents(self.cache_path(cache_key),**options)
266+
267+
def cache_set(self, cache_key, value):
268+
cache_path = self.cache_path(cache_key)
269+
bucket_dir = os.path.dirname(cache_path)
270+
if not os.path.exists(bucket_dir):
271+
os.makedirs(bucket_dir)
272+
273+
with open(cache_path, 'w') as out:
274+
out.write(value)
275+
276+
def cache_path(self, cache_key):
277+
return os.path.join(self.cache_dir,*(cache_key))
237278

238-
def load_replay(self, replay_file, options=None, **new_options):
239-
options = options or utils.merged_dict(self.options, new_options)
279+
class DictCachedSC2Factory(CachedSC2Factory):
280+
def __init__(self, cache_max_size=0, **options):
281+
super(DictCachedSC2Factory, self).__init__(**options)
282+
self.cache_dict = dict()
283+
self.cache_used = dict()
284+
self.cache_max_size = cache_max_size
240285

241-
if self.cache.has(replay_file):
242-
return self.cache.get(replay_file)
286+
def cache_set(self, cache_key, value):
287+
if self.cache_max_size and len(self.cache_dict) >= self.cache_max_size:
288+
oldest_cache_key = min(self.cache_used.items(), key=lambda e: e[1])[0]
289+
del self.cache_used[oldest_cache_key]
290+
del self.cache_dict[oldest_cache_key]
291+
self.cache_dict[cache_key] = value
292+
self.cache_used[cache_key] = time.time()
293+
294+
def cache_get(self, cache_key):
295+
self.cache_used[cache_key] = time.time()
296+
return self.cache_dict[cache_key]
297+
298+
def cache_has(self, cache_key):
299+
return cache_key in self.cache_dict
300+
301+
class DoubleCachedSC2Factory(DictCachedSC2Factory, FileCachedSC2Factory):
302+
303+
def __init__(self, cache_dir, cache_max_size=0, **options):
304+
super(DoubleCachedSC2Factory, self).__init__(cache_max_size, cache_dir=cache_dir, **options)
305+
306+
def load_remote_resource_contents(self, remote_resource, **options):
307+
cache_key = self.get_remote_cache_key(remote_resource)
308+
309+
if DictCachedSC2Factory.cache_has(self, cache_key):
310+
return DictCachedSC2Factory.cache_get(self, cache_key)
311+
312+
if not FileCachedSC2Factory.cache_has(self, cache_key):
313+
resource = SC2Factory.load_remote_resource_contents(self, remote_resource, **options)
314+
FileCachedSC2Factory.cache_set(self, cache_key, resource)
243315
else:
244-
replay = super(SC2Cache, self).load_replay(replay_file, options=options)
245-
self.cache.set(replay_file, replay)
246-
return replay
316+
resource = FileCachedSC2Factory.cache_get(self, cache_key)
317+
318+
DictCachedSC2Factory.cache_set(self, cache_key, resource)
319+
return resource

0 commit comments

Comments
 (0)