2024-12-03 11:39:32 -05:00
|
|
|
# Not unlike factory pattern.
|
|
|
|
|
2024-12-03 13:43:48 -05:00
|
|
|
# This code's been directly written in due to time and lazy, but
|
|
|
|
# will be thrown into submodules eventually to clean readability up.
|
|
|
|
|
2024-12-03 11:39:32 -05:00
|
|
|
## Recursively creates virtual file systems.
|
|
|
|
static func create_meta_accessor(file_accessors:Array[VFileAccess])->VFileAccess:
|
|
|
|
var vfiler := VFileAccess.new()
|
|
|
|
|
|
|
|
vfiler._get_stuff = func()->Variant: return file_accessors
|
|
|
|
|
|
|
|
vfiler._file_exists = func(path:String)->bool:
|
|
|
|
return file_accessors.any(func(vfs:VFileAccess)->bool:
|
|
|
|
return vfs.file_exists(path))
|
|
|
|
|
|
|
|
vfiler._get_files_at = func(path:String)->Array[String]:
|
|
|
|
var accum:Array[String] = []
|
|
|
|
accum.assign(
|
|
|
|
file_accessors.reduce(
|
|
|
|
(func _merge_files(total:Array[String], current:VFileAccess)->Array[String]:
|
|
|
|
var res:Array[String] = total.duplicate()
|
|
|
|
for file:String in current.get_files_at(path):
|
|
|
|
if not file in total:
|
|
|
|
res.append(file)
|
|
|
|
return res) ,[]))
|
|
|
|
return accum
|
|
|
|
|
|
|
|
vfiler._get_buffer = func(path:String)->PackedByteArray:
|
|
|
|
for vfs:VFileAccess in file_accessors:
|
|
|
|
if vfs.file_exists(path):
|
|
|
|
return vfs.get_buffer(path)
|
|
|
|
push_error("File '%s' not found in any checked accessors." % path)
|
|
|
|
return PackedByteArray()
|
|
|
|
|
|
|
|
vfiler._write_file = func _zip_write(_path:String, _buffer:PackedByteArray)->Error:
|
|
|
|
return ERR_FILE_CANT_WRITE
|
|
|
|
|
|
|
|
vfiler._close = func()->void:
|
|
|
|
file_accessors.map(func(vfs:VFileAccess)->void: vfs.close())
|
|
|
|
|
|
|
|
return vfiler
|
|
|
|
|
|
|
|
|
|
|
|
## A basic file accessor.
|
2024-12-03 13:43:48 -05:00
|
|
|
static func create_file_access(
|
|
|
|
root:String = "./",
|
|
|
|
support_files:Dictionary[String,Callable] = VFileAccess.IMPORTS.DEFAULT_SUPPORTED_FILES
|
|
|
|
)->VFileAccess:
|
|
|
|
var vfiler := VFileAccess.new(root, support_files)
|
2024-12-03 11:39:32 -05:00
|
|
|
|
|
|
|
vfiler._get_buffer = FileAccess.get_file_as_bytes # thanks for being static
|
|
|
|
|
|
|
|
vfiler._file_exists = FileAccess.file_exists # thanks for being static
|
|
|
|
|
|
|
|
vfiler._write_file = func _file_access_write(path:String, buffer:PackedByteArray)->void:
|
|
|
|
var abs_path := vfiler.get_absolute_path(path)
|
|
|
|
if not DirAccess.dir_exists_absolute(abs_path):
|
|
|
|
DirAccess.make_dir_recursive_absolute(abs_path.get_base_dir())
|
|
|
|
var f := FileAccess.open(abs_path, FileAccess.WRITE_READ)
|
|
|
|
f.store_buffer(buffer)
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
vfiler._get_files_at = func(path:String)->Array[String]:
|
|
|
|
var paths:Array[String] = []
|
|
|
|
#var abs_path := vfiler.get_absolute_path(path)
|
|
|
|
if not DirAccess.dir_exists_absolute(path):
|
|
|
|
print("Path '%s' not found. Adding..." % path)
|
|
|
|
DirAccess.make_dir_recursive_absolute(path)
|
|
|
|
paths.assign(DirAccess.get_files_at(path))
|
|
|
|
return paths
|
|
|
|
|
|
|
|
return vfiler
|
|
|
|
|
|
|
|
|
|
|
|
static func create_writeonly_zip_access(
|
|
|
|
zip_path:String,
|
2024-12-03 13:43:48 -05:00
|
|
|
append := ZIPPacker.ZipAppend.APPEND_CREATE,
|
2024-12-03 11:39:32 -05:00
|
|
|
)->VFileAccess:
|
|
|
|
var vfiler := VFileAccess.new()
|
|
|
|
if not is_instance_valid(vfiler): return null
|
|
|
|
|
|
|
|
var writer := ZIPPacker.new()
|
|
|
|
# Open once to verify it exists and works.
|
|
|
|
var _open_err := writer.open(zip_path, append)
|
|
|
|
#if open_err != OK:
|
|
|
|
#push_error("Could not open zip writer '%s'" % error_string(open_err))
|
|
|
|
#return null
|
|
|
|
#writer.close()
|
|
|
|
|
|
|
|
vfiler._get_stuff = func()->Variant: return writer
|
|
|
|
|
|
|
|
vfiler._write_file = func _zip_write(path:String, buffer:PackedByteArray)->Error:
|
|
|
|
var abs_path := vfiler.get_absolute_path(path)
|
|
|
|
writer.start_file(path)
|
|
|
|
var write_err := writer.write_file(buffer)
|
|
|
|
writer.close_file()
|
|
|
|
return write_err
|
|
|
|
|
|
|
|
vfiler._get_buffer = func(_path:String)->PackedByteArray:
|
|
|
|
push_error("Attempted to read from write-only zip access! Returning 0 bytes.")
|
|
|
|
return PackedByteArray()
|
|
|
|
|
|
|
|
vfiler._get_files_at = func(_path:String)->Array[String]:
|
|
|
|
push_error("Attempted to get files from write-only zip access!")
|
|
|
|
return []
|
|
|
|
|
|
|
|
vfiler._close = func()->void: writer.close()
|
|
|
|
|
|
|
|
return vfiler
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
## A single zip file accessor, but with write functions disabled.
|
|
|
|
## TODO option to keep zips open, for OS to flag files as in-use.
|
2024-12-03 13:43:48 -05:00
|
|
|
static func create_readonly_zip_access(
|
|
|
|
zip_path:String, keep_open:bool = false,
|
|
|
|
support_files:Dictionary[String,Callable] = VFileAccess.IMPORTS.DEFAULT_SUPPORTED_FILES
|
|
|
|
)->VFileAccess:
|
|
|
|
var vfiler := create_bulk_readonly_zip_access([zip_path], keep_open, support_files)
|
2024-12-03 11:39:32 -05:00
|
|
|
vfiler._get_stuff = func()->Variant: return vfiler.get_stuff()[0]
|
|
|
|
return vfiler
|
|
|
|
|
|
|
|
## A multi-zip readonly accessor. Allows for multiple zips to load with overrides.
|
|
|
|
## TODO option to keep zips open, for OS to flag files as in-use.
|
2024-12-03 13:43:48 -05:00
|
|
|
## [param if_missing_zip(String)] executes if a requested file is not found.
|
2024-12-03 11:39:32 -05:00
|
|
|
static func create_bulk_readonly_zip_access(
|
|
|
|
zip_paths:Array[String], keep_open:bool = false,
|
2024-12-03 13:43:48 -05:00
|
|
|
support_files:Dictionary[String,Callable] = VFileAccess.IMPORTS.DEFAULT_SUPPORTED_FILES,
|
|
|
|
if_missing_zip := func _ignore(_zip_path:String)->bool: return false,
|
2024-12-03 11:39:32 -05:00
|
|
|
)->VFileAccess:
|
2024-12-03 13:43:48 -05:00
|
|
|
var vfiler := VFileAccess.new("", support_files)
|
2024-12-03 11:39:32 -05:00
|
|
|
|
|
|
|
var readers:Array[ZIPReader] = []
|
|
|
|
readers.assign(zip_paths.map(
|
|
|
|
func _open_zip_reader(zip_path:String)->ZIPReader:
|
|
|
|
var zip := ZIPReader.new()
|
|
|
|
if not FileAccess.file_exists(zip_path): # Use VFileAccess recursively so we are more powerful?
|
|
|
|
if_missing_zip.call(zip_path)
|
|
|
|
var open_err := zip.open(zip_path)
|
|
|
|
if open_err:
|
|
|
|
push_error("Could not open zip '%s': %s" % [zip_path, error_string(open_err)])
|
|
|
|
return null
|
|
|
|
return zip))
|
|
|
|
|
|
|
|
readers.assign(readers.filter(is_instance_valid))
|
|
|
|
|
|
|
|
vfiler._get_stuff = func()->Variant: return readers
|
|
|
|
|
|
|
|
# file exists set first so we can bulk-check later
|
|
|
|
vfiler._file_exists = func(abs_path:String)->bool:
|
|
|
|
return readers.any(
|
|
|
|
func(z:ZIPReader)->bool: return z.file_exists(abs_path))
|
|
|
|
|
|
|
|
vfiler._get_buffer = func(abs_path:String)->PackedByteArray:
|
|
|
|
var output := PackedByteArray()
|
|
|
|
if not vfiler.file_exists(abs_path):
|
|
|
|
push_error("Zips don't have file id '%s'" % abs_path)
|
|
|
|
return output
|
|
|
|
for index:int in readers.size():
|
|
|
|
var z := readers[index]
|
|
|
|
if not z.file_exists(abs_path): continue
|
|
|
|
output = z.read_file(abs_path)
|
|
|
|
break
|
|
|
|
return output
|
|
|
|
|
|
|
|
vfiler._write_file = func _zip_write(_path:String, _buffer:PackedByteArray)->Error:
|
|
|
|
return ERR_FILE_CANT_WRITE
|
|
|
|
vfiler._close = func()->void:
|
|
|
|
readers.map(func(z:ZIPReader)->void: z.close())
|
|
|
|
|
|
|
|
vfiler._get_files_at = func(path:String)->Array[String]:
|
|
|
|
if not path.ends_with("/"): # TODO ensure no // as well because that can be problem.
|
|
|
|
path += "/" # sorry for the mutation
|
|
|
|
var accum:Array[String] = []
|
|
|
|
accum.assign( readers.reduce(
|
|
|
|
(func _merge_files(total:Array[String], current:ZIPReader)->Array[String]:
|
|
|
|
var res:Array[String] = total.duplicate()
|
|
|
|
var filtered:Array = []
|
|
|
|
filtered.assign(current.get_files())
|
|
|
|
filtered.filter(
|
|
|
|
func _filter_dir(curpath:String)->bool:
|
|
|
|
return curpath.begins_with(path))
|
|
|
|
for file:String in filtered:
|
|
|
|
var stripped_file:String = file.replace(path, "")
|
|
|
|
# remove files with / after the file stripping to prevent directories.
|
|
|
|
if not file in total and not stripped_file.contains("/"):
|
|
|
|
res.append(file)
|
|
|
|
return res) ,[]))
|
|
|
|
return accum
|
|
|
|
|
|
|
|
return vfiler
|