This commit is contained in:
Roman Belousov 2024-12-25 20:09:04 +04:00 committed by GitHub
commit 40a29fd596
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 63 additions and 8 deletions

View file

@ -459,6 +459,8 @@ optional arguments:
--exclude <prefix> Exclude files starting with <prefix> (case --exclude <prefix> Exclude files starting with <prefix> (case
insensitive) insensitive)
--install-tag <tag> Only download files with the specified install tag --install-tag <tag> Only download files with the specified install tag
--read-files Read duplicated parts from already saved files, do not
keep them in RAM
--enable-reordering Enable reordering optimization to reduce RAM --enable-reordering Enable reordering optimization to reduce RAM
requirements during download (may have adverse results requirements during download (may have adverse results
for some titles) for some titles)
@ -670,6 +672,8 @@ log_level = debug
max_memory = 2048 max_memory = 2048
; maximum number of worker processes when downloading (fewer workers will be slower, but also use less system resources) ; maximum number of worker processes when downloading (fewer workers will be slower, but also use less system resources)
max_workers = 8 max_workers = 8
; Enables reading duplicated data from files during download (decreases RAM usage but increases disk I/O)
read_files = false
; default install directory ; default install directory
install_dir = /mnt/tank/games install_dir = /mnt/tank/games
; locale override, must be in RFC 1766 format (e.g. "en-US") ; locale override, must be in RFC 1766 format (e.g. "en-US")

View file

@ -971,6 +971,7 @@ class LegendaryCLI:
file_prefix_filter=args.file_prefix, file_prefix_filter=args.file_prefix,
file_exclude_filter=args.file_exclude_prefix, file_exclude_filter=args.file_exclude_prefix,
file_install_tag=args.install_tag, file_install_tag=args.install_tag,
read_files=args.read_files,
dl_optimizations=args.order_opt, dl_optimizations=args.order_opt,
dl_timeout=args.dl_timeout, dl_timeout=args.dl_timeout,
repair=args.repair_mode, repair=args.repair_mode,
@ -2768,6 +2769,8 @@ def main():
type=str, help='Exclude files starting with <prefix> (case insensitive)') type=str, help='Exclude files starting with <prefix> (case insensitive)')
install_parser.add_argument('--install-tag', dest='install_tag', action='append', metavar='<tag>', install_parser.add_argument('--install-tag', dest='install_tag', action='append', metavar='<tag>',
type=str, help='Only download files with the specified install tag') type=str, help='Only download files with the specified install tag')
install_parser.add_argument('--read-files', dest='read_files', action='store_true',
help='Read duplicated parts from already saved files, do not keep them in memory')
install_parser.add_argument('--enable-reordering', dest='order_opt', action='store_true', install_parser.add_argument('--enable-reordering', dest='order_opt', action='store_true',
help='Enable reordering optimization to reduce RAM requirements ' help='Enable reordering optimization to reduce RAM requirements '
'during download (may have adverse results for some titles)') 'during download (may have adverse results for some titles)')

View file

@ -1327,6 +1327,7 @@ class LegendaryCore:
override_old_manifest: str = '', override_base_url: str = '', override_old_manifest: str = '', override_base_url: str = '',
platform: str = 'Windows', file_prefix_filter: list = None, platform: str = 'Windows', file_prefix_filter: list = None,
file_exclude_filter: list = None, file_install_tag: list = None, file_exclude_filter: list = None, file_install_tag: list = None,
read_files: bool = False,
dl_optimizations: bool = False, dl_timeout: int = 10, dl_optimizations: bool = False, dl_timeout: int = 10,
repair: bool = False, repair_use_latest: bool = False, repair: bool = False, repair_use_latest: bool = False,
disable_delta: bool = False, override_delta_manifest: str = '', disable_delta: bool = False, override_delta_manifest: str = '',
@ -1487,6 +1488,9 @@ class LegendaryCore:
if not max_shm: if not max_shm:
max_shm = self.lgd.config.getint('Legendary', 'max_memory', fallback=2048) max_shm = self.lgd.config.getint('Legendary', 'max_memory', fallback=2048)
if not read_files:
read_files = self.lgd.config.getboolean('Legendary', 'read_files', fallback=False)
if dl_optimizations or is_opt_enabled(game.app_name, new_manifest.meta.build_version): if dl_optimizations or is_opt_enabled(game.app_name, new_manifest.meta.build_version):
self.log.info('Download order optimizations are enabled.') self.log.info('Download order optimizations are enabled.')
process_opt = True process_opt = True
@ -1499,12 +1503,26 @@ class LegendaryCore:
dlm = DLManager(install_path, base_url, resume_file=resume_file, status_q=status_q, dlm = DLManager(install_path, base_url, resume_file=resume_file, status_q=status_q,
max_shared_memory=max_shm * 1024 * 1024, max_workers=max_workers, max_shared_memory=max_shm * 1024 * 1024, max_workers=max_workers,
dl_timeout=dl_timeout, bind_ip=bind_ip) dl_timeout=dl_timeout, bind_ip=bind_ip)
anlres = dlm.run_analysis(manifest=new_manifest, old_manifest=old_manifest,
patch=not disable_patching, resume=not force, analysis_kwargs = dict(
file_prefix_filter=file_prefix_filter, old_manifest=old_manifest,
file_exclude_filter=file_exclude_filter, patch=not disable_patching, resume=not force,
file_install_tag=file_install_tag, file_prefix_filter=file_prefix_filter,
processing_optimization=process_opt) file_exclude_filter=file_exclude_filter,
file_install_tag=file_install_tag,
processing_optimization=process_opt
)
try:
anlres = dlm.run_analysis(manifest=new_manifest, **analysis_kwargs, read_files=read_files)
except MemoryError:
if read_files:
raise
self.log.warning('Memory error encountered, retrying with file read enabled...')
dlm = DLManager(install_path, base_url, resume_file=resume_file, status_q=status_q,
max_shared_memory=max_shm * 1024 * 1024, max_workers=max_workers,
dl_timeout=dl_timeout, bind_ip=bind_ip)
anlres = dlm.run_analysis(manifest=new_manifest, **analysis_kwargs, read_files=True)
prereq = None prereq = None
if new_manifest.meta.prereq_ids: if new_manifest.meta.prereq_ids:

View file

@ -82,6 +82,7 @@ class DLManager(Process):
def run_analysis(self, manifest: Manifest, old_manifest: Manifest = None, def run_analysis(self, manifest: Manifest, old_manifest: Manifest = None,
patch=True, resume=True, file_prefix_filter=None, patch=True, resume=True, file_prefix_filter=None,
file_exclude_filter=None, file_install_tag=None, file_exclude_filter=None, file_install_tag=None,
read_files=False,
processing_optimization=False) -> AnalysisResult: processing_optimization=False) -> AnalysisResult:
""" """
Run analysis on manifest and old manifest (if not None) and return a result Run analysis on manifest and old manifest (if not None) and return a result
@ -94,6 +95,7 @@ class DLManager(Process):
:param file_prefix_filter: Only download files that start with this prefix :param file_prefix_filter: Only download files that start with this prefix
:param file_exclude_filter: Exclude files with this prefix from download :param file_exclude_filter: Exclude files with this prefix from download
:param file_install_tag: Only install files with the specified tag :param file_install_tag: Only install files with the specified tag
:param read_files: Allow reading from already finished files
:param processing_optimization: Attempt to optimize processing order and RAM usage :param processing_optimization: Attempt to optimize processing order and RAM usage
:return: AnalysisResult :return: AnalysisResult
""" """
@ -318,6 +320,30 @@ class DLManager(Process):
analysis_res.reuse_size += cp.size analysis_res.reuse_size += cp.size
break break
# determine whether a chunk part is currently in written files
reusable_written = defaultdict(dict)
if read_files:
self.log.debug('Analyzing manifest for re-usable chunks in saved files...')
cur_written_cps = defaultdict(list)
for cur_file in fmlist:
cur_file_cps = dict()
cur_file_offset = 0
for cp in cur_file.chunk_parts:
key = (cp.guid_num, cp.offset, cp.size)
for wr_file_name, wr_file_offset, wr_cp_offset, wr_cp_end_offset in cur_written_cps[cp.guid_num]:
# check if new chunk part is wholly contained in a written chunk part
cur_cp_end_offset = cp.offset + cp.size
if wr_cp_offset <= cp.offset and wr_cp_end_offset >= cur_cp_end_offset:
references[cp.guid_num] -= 1
reuse_offset = wr_file_offset + (cp.offset - wr_cp_offset)
reusable_written[cur_file.filename][key] = (wr_file_name, reuse_offset)
break
cur_file_cps[cp.guid_num] = (cur_file.filename, cur_file_offset, cp.offset, cp.offset + cp.size)
cur_file_offset += cp.size
for guid, value in cur_file_cps.items():
cur_written_cps[guid].append(value)
last_cache_size = current_cache_size = 0 last_cache_size = current_cache_size = 0
# set to determine whether a file is currently cached or not # set to determine whether a file is currently cached or not
cached = set() cached = set()
@ -338,6 +364,7 @@ class DLManager(Process):
continue continue
existing_chunks = re_usable.get(current_file.filename, None) existing_chunks = re_usable.get(current_file.filename, None)
written_chunks = reusable_written.get(current_file.filename, None)
chunk_tasks = [] chunk_tasks = []
reused = 0 reused = 0
@ -345,10 +372,13 @@ class DLManager(Process):
ct = ChunkTask(cp.guid_num, cp.offset, cp.size) ct = ChunkTask(cp.guid_num, cp.offset, cp.size)
# re-use the chunk from the existing file if we can # re-use the chunk from the existing file if we can
if existing_chunks and (cp.guid_num, cp.offset, cp.size) in existing_chunks: key = (cp.guid_num, cp.offset, cp.size)
if existing_chunks and key in existing_chunks:
reused += 1 reused += 1
ct.chunk_file = current_file.filename ct.chunk_file = current_file.filename
ct.chunk_offset = existing_chunks[(cp.guid_num, cp.offset, cp.size)] ct.chunk_offset = existing_chunks[key]
elif written_chunks and key in written_chunks:
ct.chunk_file, ct.chunk_offset = written_chunks[key]
else: else:
# add to DL list if not already in it # add to DL list if not already in it
if cp.guid_num not in chunks_in_dl_list: if cp.guid_num not in chunks_in_dl_list: