Add customizable assets via zip loading.
This commit is contained in:
parent
70be9e841f
commit
5b63a0a673
34
asset_pack.gd
Normal file
34
asset_pack.gd
Normal file
@ -0,0 +1,34 @@
|
||||
class_name AssetPack extends Object
|
||||
|
||||
const ASSET_DIR := "./assets.zip"
|
||||
|
||||
static func load_sfx()->Dictionary[StringName, AudioStream]:
|
||||
var dict := load_user_assets(
|
||||
VFileAccess.IMPORTS.AUDIO_FILES, ["ogg","mp3","wav"])
|
||||
var ugh:Dictionary[StringName,AudioStream]
|
||||
ugh.assign(dict)
|
||||
return ugh
|
||||
|
||||
|
||||
static func load_gfx()->Dictionary[StringName, Texture2D]:
|
||||
var images:Dictionary = load_user_assets(
|
||||
VFileAccess.IMPORTS.IMAGE_FILES, ["png","jpg","jpeg"])
|
||||
var output:Dictionary[StringName, Texture2D] = {}
|
||||
for id:StringName in images.keys():
|
||||
output[id] = ImageTexture.create_from_image(images[id])
|
||||
return output
|
||||
|
||||
|
||||
static func load_user_assets(
|
||||
formats_supported:Dictionary[String,Callable],
|
||||
formats:Array[String]
|
||||
)->Dictionary:
|
||||
var vfs := VFileAccess.CREATE.create_readonly_zip_access(ASSET_DIR, false, formats_supported)
|
||||
var load_asset := vfs.load_any_supported.bind(formats)
|
||||
var output:Dictionary = {
|
||||
&"rest": load_asset.call("rest"),
|
||||
&"longrest": load_asset.call("longrest"),
|
||||
&"run": load_asset.call("run")
|
||||
}
|
||||
vfs.close()
|
||||
return output
|
1
asset_pack.gd.uid
Normal file
1
asset_pack.gd.uid
Normal file
@ -0,0 +1 @@
|
||||
uid://d17o8drukf01j
|
BIN
assets.zip
Normal file
BIN
assets.zip
Normal file
Binary file not shown.
@ -9,10 +9,11 @@ custom_features=""
|
||||
export_filter="all_resources"
|
||||
include_filter=""
|
||||
exclude_filter=""
|
||||
export_path="../dino_tomato_builds/dino_tomato.x86_64"
|
||||
export_path="../Builds/dino_tomato.x86_64"
|
||||
patches=PackedStringArray()
|
||||
encryption_include_filters=""
|
||||
encryption_exclude_filters=""
|
||||
seed=0
|
||||
encrypt_pck=false
|
||||
encrypt_directory=false
|
||||
script_export_mode=2
|
||||
@ -50,10 +51,11 @@ custom_features=""
|
||||
export_filter="all_resources"
|
||||
include_filter=""
|
||||
exclude_filter=""
|
||||
export_path="../dino_tomato_builds/dino_tomato.arm64"
|
||||
export_path="../Builds/dino_tomato.arm64"
|
||||
patches=PackedStringArray()
|
||||
encryption_include_filters=""
|
||||
encryption_exclude_filters=""
|
||||
seed=0
|
||||
encrypt_pck=false
|
||||
encrypt_directory=false
|
||||
script_export_mode=2
|
||||
@ -79,3 +81,70 @@ unzip -o -q \"{temp_dir}/{archive_name}\" -d \"{temp_dir}\"
|
||||
ssh_remote_deploy/cleanup_script="#!/usr/bin/env bash
|
||||
kill $(pgrep -x -f \"{temp_dir}/{exe_name} {cmd_args}\")
|
||||
rm -rf \"{temp_dir}\""
|
||||
|
||||
[preset.2]
|
||||
|
||||
name="Windows Desktop"
|
||||
platform="Windows Desktop"
|
||||
runnable=true
|
||||
advanced_options=false
|
||||
dedicated_server=false
|
||||
custom_features=""
|
||||
export_filter="all_resources"
|
||||
include_filter=""
|
||||
exclude_filter=""
|
||||
export_path="../Builds/dino_tomato.exe"
|
||||
patches=PackedStringArray()
|
||||
encryption_include_filters=""
|
||||
encryption_exclude_filters=""
|
||||
seed=0
|
||||
encrypt_pck=false
|
||||
encrypt_directory=false
|
||||
script_export_mode=2
|
||||
|
||||
[preset.2.options]
|
||||
|
||||
custom_template/debug=""
|
||||
custom_template/release=""
|
||||
debug/export_console_wrapper=1
|
||||
binary_format/embed_pck=false
|
||||
texture_format/s3tc_bptc=true
|
||||
texture_format/etc2_astc=false
|
||||
binary_format/architecture="x86_64"
|
||||
codesign/enable=false
|
||||
codesign/timestamp=true
|
||||
codesign/timestamp_server_url=""
|
||||
codesign/digest_algorithm=1
|
||||
codesign/description=""
|
||||
codesign/custom_options=PackedStringArray()
|
||||
application/modify_resources=true
|
||||
application/icon=""
|
||||
application/console_wrapper_icon=""
|
||||
application/icon_interpolation=4
|
||||
application/file_version=""
|
||||
application/product_version=""
|
||||
application/company_name=""
|
||||
application/product_name=""
|
||||
application/file_description=""
|
||||
application/copyright=""
|
||||
application/trademarks=""
|
||||
application/export_angle=0
|
||||
application/export_d3d12=0
|
||||
application/d3d12_agility_sdk_multiarch=true
|
||||
ssh_remote_deploy/enabled=false
|
||||
ssh_remote_deploy/host="user@host_ip"
|
||||
ssh_remote_deploy/port="22"
|
||||
ssh_remote_deploy/extra_args_ssh=""
|
||||
ssh_remote_deploy/extra_args_scp=""
|
||||
ssh_remote_deploy/run_script="Expand-Archive -LiteralPath '{temp_dir}\\{archive_name}' -DestinationPath '{temp_dir}'
|
||||
$action = New-ScheduledTaskAction -Execute '{temp_dir}\\{exe_name}' -Argument '{cmd_args}'
|
||||
$trigger = New-ScheduledTaskTrigger -Once -At 00:00
|
||||
$settings = New-ScheduledTaskSettingsSet
|
||||
$task = New-ScheduledTask -Action $action -Trigger $trigger -Settings $settings
|
||||
Register-ScheduledTask godot_remote_debug -InputObject $task -Force:$true
|
||||
Start-ScheduledTask -TaskName godot_remote_debug
|
||||
while (Get-ScheduledTask -TaskName godot_remote_debug | ? State -eq running) { Start-Sleep -Milliseconds 100 }
|
||||
Unregister-ScheduledTask -TaskName godot_remote_debug -Confirm:$false -ErrorAction:SilentlyContinue"
|
||||
ssh_remote_deploy/cleanup_script="Stop-ScheduledTask -TaskName godot_remote_debug -ErrorAction:SilentlyContinue
|
||||
Unregister-ScheduledTask -TaskName godot_remote_debug -Confirm:$false -ErrorAction:SilentlyContinue
|
||||
Remove-Item -Recurse -Force '{temp_dir}'"
|
||||
|
14
main.gd
14
main.gd
@ -8,6 +8,12 @@ const PREFIX_LABELS := "[color=black]%s"
|
||||
&"run": preload("res://assets/lemon_run.tres")
|
||||
}
|
||||
|
||||
@export var sounds:Dictionary[StringName, AudioStream] = {
|
||||
&"rest": preload("res://assets/miss.wav"),
|
||||
&"longrest": preload("res://assets/miss.wav"),
|
||||
&"run": preload("res://assets/miss.wav")
|
||||
}
|
||||
|
||||
@export var time_label:RichTextLabel
|
||||
@export var realtime_label:RichTextLabel
|
||||
@export var rest_label:RichTextLabel
|
||||
@ -18,7 +24,9 @@ var realtime_since_start:float = 0.0
|
||||
@onready var sfx:AudioStreamPlayer = $SFX
|
||||
|
||||
@export var edits:Dictionary[StringName,TextEdit] = {}
|
||||
var state_id:StringName = &"rest"
|
||||
|
||||
## This probably should be its own script but.
|
||||
class PomodoroContext:
|
||||
var current_state := &"run"
|
||||
var workmins:float = 25.0
|
||||
@ -58,7 +66,10 @@ class PomodoroContext:
|
||||
current_state = &"run"
|
||||
|
||||
|
||||
var state_id:StringName = &"rest"
|
||||
func _ready()->void:
|
||||
images = AssetPack.load_gfx()
|
||||
sounds = AssetPack.load_sfx()
|
||||
pass
|
||||
|
||||
|
||||
func _process(delta: float) -> void:
|
||||
@ -69,6 +80,7 @@ func _process(delta: float) -> void:
|
||||
realtime_label.text = "[color=red]%s" % \
|
||||
Time.get_time_string_from_unix_time(realtime_since_start)
|
||||
|
||||
|
||||
func get_from_menus()->PomodoroContext:
|
||||
return PomodoroContext.new(
|
||||
4,
|
||||
|
1
main.gd.uid
Normal file
1
main.gd.uid
Normal file
@ -0,0 +1 @@
|
||||
uid://c4mjp8t1rmhgx
|
@ -1,6 +1,6 @@
|
||||
[gd_scene load_steps=3 format=3 uid="uid://dw6uk60j153g4"]
|
||||
|
||||
[ext_resource type="Script" path="res://main.gd" id="1_fn6k1"]
|
||||
[ext_resource type="Script" uid="uid://c4mjp8t1rmhgx" path="res://main.gd" id="1_fn6k1"]
|
||||
[ext_resource type="AudioStream" uid="uid://doh10c785c5fm" path="res://assets/miss.wav" id="3_sir8a"]
|
||||
|
||||
[node name="Main" type="Control" node_paths=PackedStringArray("time_label", "realtime_label", "rest_label", "timer", "image", "edits")]
|
||||
@ -17,9 +17,9 @@ rest_label = NodePath("Rests")
|
||||
timer = NodePath("Timer")
|
||||
image = NodePath("FunnyImg")
|
||||
edits = {
|
||||
&"longrest": NodePath("VBoxContainer/EditMinutesLongBreak"),
|
||||
&"rest": NodePath("VBoxContainer/EditMinutesBreak"),
|
||||
&"run": NodePath("VBoxContainer/EditWorkMins"),
|
||||
&"longrest": NodePath("VBoxContainer/EditMinutesLongBreak")
|
||||
&"run": NodePath("VBoxContainer/EditWorkMins")
|
||||
}
|
||||
|
||||
[node name="FunnyImg" type="TextureRect" parent="."]
|
||||
|
@ -12,6 +12,8 @@ config_version=5
|
||||
|
||||
config/name="dino_tomato"
|
||||
run/main_scene="res://main.tscn"
|
||||
config/use_custom_user_dir=true
|
||||
config/custom_user_dir_name="dinoleaf/tools/tomato"
|
||||
config/features=PackedStringArray("4.4", "GL Compatibility")
|
||||
run/max_fps=30
|
||||
run/low_processor_mode=true
|
||||
@ -32,4 +34,5 @@ window/vsync/vsync_mode=0
|
||||
[rendering]
|
||||
|
||||
renderer/rendering_method="mobile"
|
||||
textures/vram_compression/import_s3tc_bptc=true
|
||||
environment/defaults/default_clear_color=Color(1, 1, 1, 1)
|
||||
|
193
vfs/create_vfs.gd
Normal file
193
vfs/create_vfs.gd
Normal file
@ -0,0 +1,193 @@
|
||||
# Not unlike factory pattern.
|
||||
|
||||
# This code's been directly written in due to time and lazy, but
|
||||
# will be thrown into submodules eventually to clean readability up.
|
||||
|
||||
## Recursively creates virtual file systems.
|
||||
static func create_meta_accessor(file_accessors:Array[VFileAccess])->VFileAccess:
|
||||
var vfiler := VFileAccess.new("")
|
||||
|
||||
vfiler._get_stuff = func()->Variant: return file_accessors
|
||||
|
||||
vfiler._file_exists = func(path:String)->bool:
|
||||
return file_accessors.any(func(vfs:VFileAccess)->bool:
|
||||
return vfs.file_exists(path))
|
||||
|
||||
vfiler._get_files_at = func(path:String)->Array[String]:
|
||||
var accum:Array[String] = []
|
||||
accum.assign(
|
||||
file_accessors.reduce(
|
||||
(func _merge_files(total:Array, current:VFileAccess)->Array:
|
||||
var res:Array = total.duplicate()
|
||||
for file:String in current.get_files_at(path):
|
||||
if not file in total:
|
||||
res.append(file)
|
||||
return res) ,[]))
|
||||
return accum
|
||||
|
||||
vfiler._get_buffer = func(path:String)->PackedByteArray:
|
||||
for vfs:VFileAccess in file_accessors:
|
||||
if vfs.file_exists(path):
|
||||
return vfs.get_buffer(path)
|
||||
push_error("File '%s' not found in any checked accessors." % path)
|
||||
return PackedByteArray()
|
||||
|
||||
vfiler._write_file = func _zip_write(_path:String, _buffer:PackedByteArray)->Error:
|
||||
return ERR_FILE_CANT_WRITE
|
||||
|
||||
vfiler._close = func()->void:
|
||||
file_accessors.map(func(vfs:VFileAccess)->void: vfs.close())
|
||||
|
||||
return vfiler
|
||||
|
||||
|
||||
## A basic file accessor.
|
||||
static func create_file_access(
|
||||
root:String = "./",
|
||||
support_files:Dictionary[String,Callable] = VFileAccess.IMPORTS.DEFAULT_SUPPORTED_FILES
|
||||
)->VFileAccess:
|
||||
var vfiler := VFileAccess.new(root, support_files)
|
||||
|
||||
vfiler._get_buffer = FileAccess.get_file_as_bytes # thanks for being static
|
||||
|
||||
vfiler._file_exists = FileAccess.file_exists # thanks for being static
|
||||
|
||||
vfiler._write_file = func _file_access_write(path:String, buffer:PackedByteArray)->void:
|
||||
var abs_path := vfiler.get_absolute_path(path)
|
||||
if not DirAccess.dir_exists_absolute(abs_path):
|
||||
DirAccess.make_dir_recursive_absolute(abs_path.get_base_dir())
|
||||
var f := FileAccess.open(abs_path, FileAccess.WRITE_READ)
|
||||
f.store_buffer(buffer)
|
||||
f.close()
|
||||
|
||||
vfiler._get_files_at = func(path:String)->Array[String]:
|
||||
var paths:Array[String] = []
|
||||
#var abs_path := vfiler.get_absolute_path(path)
|
||||
if not DirAccess.dir_exists_absolute(path):
|
||||
print("Path '%s' not found. Adding..." % path)
|
||||
DirAccess.make_dir_recursive_absolute(path)
|
||||
paths.assign(DirAccess.get_files_at(path))
|
||||
return paths
|
||||
|
||||
return vfiler
|
||||
|
||||
|
||||
static func create_writeonly_zip_access(
|
||||
zip_path:String,
|
||||
append := ZIPPacker.ZipAppend.APPEND_CREATE,
|
||||
)->VFileAccess:
|
||||
var vfiler := VFileAccess.new()
|
||||
if not is_instance_valid(vfiler): return null
|
||||
|
||||
var writer := ZIPPacker.new()
|
||||
# Open once to verify it exists and works.
|
||||
var _open_err := writer.open(zip_path, append)
|
||||
#if open_err != OK:
|
||||
#push_error("Could not open zip writer '%s'" % error_string(open_err))
|
||||
#return null
|
||||
#writer.close()
|
||||
|
||||
vfiler._get_stuff = func()->Variant: return writer
|
||||
|
||||
vfiler._write_file = func _zip_write(path:String, buffer:PackedByteArray)->Error:
|
||||
var abs_path := vfiler.get_absolute_path(path)
|
||||
writer.start_file(path)
|
||||
var write_err := writer.write_file(buffer)
|
||||
writer.close_file()
|
||||
return write_err
|
||||
|
||||
vfiler._get_buffer = func(_path:String)->PackedByteArray:
|
||||
push_error("Attempted to read from write-only zip access! Returning 0 bytes.")
|
||||
return PackedByteArray()
|
||||
|
||||
vfiler._get_files_at = func(_path:String)->Array[String]:
|
||||
push_error("Attempted to get files from write-only zip access!")
|
||||
return []
|
||||
|
||||
vfiler._close = func()->void: writer.close()
|
||||
|
||||
return vfiler
|
||||
|
||||
|
||||
|
||||
## A single zip file accessor, but with write functions disabled.
|
||||
## TODO option to keep zips open, for OS to flag files as in-use.
|
||||
static func create_readonly_zip_access(
|
||||
zip_path:String, keep_open:bool = false,
|
||||
support_files:Dictionary[String,Callable] = VFileAccess.IMPORTS.DEFAULT_SUPPORTED_FILES
|
||||
)->VFileAccess:
|
||||
var vfiler := create_bulk_readonly_zip_access([zip_path], keep_open, support_files)
|
||||
vfiler._get_stuff = func()->Variant: return vfiler.get_stuff()[0]
|
||||
return vfiler
|
||||
|
||||
## A multi-zip readonly accessor. Allows for multiple zips to load with overrides.
|
||||
## TODO option to keep zips open, for OS to flag files as in-use.
|
||||
## [param if_missing_zip(String)] executes if a requested file is not found.
|
||||
static func create_bulk_readonly_zip_access(
|
||||
zip_paths:Array[String], keep_open:bool = false,
|
||||
support_files:Dictionary[String,Callable] = VFileAccess.IMPORTS.DEFAULT_SUPPORTED_FILES,
|
||||
if_missing_zip := func _ignore(_zip_path:String)->bool: return false,
|
||||
)->VFileAccess:
|
||||
var vfiler := VFileAccess.new("", support_files)
|
||||
|
||||
var readers:Array[ZIPReader] = []
|
||||
readers.assign(zip_paths.map(
|
||||
func _open_zip_reader(zip_path:String)->ZIPReader:
|
||||
var zip := ZIPReader.new()
|
||||
if not FileAccess.file_exists(zip_path): # Use VFileAccess recursively so we are more powerful?
|
||||
if_missing_zip.call(zip_path)
|
||||
var open_err := zip.open(zip_path)
|
||||
if open_err:
|
||||
push_error("Could not open zip '%s': %s" % [zip_path, error_string(open_err)])
|
||||
return null
|
||||
return zip))
|
||||
|
||||
readers.assign(readers.filter(is_instance_valid))
|
||||
|
||||
vfiler._get_stuff = func()->Variant: return readers
|
||||
|
||||
# file exists set first so we can bulk-check later
|
||||
vfiler._file_exists = func(abs_path:String)->bool:
|
||||
return readers.any(
|
||||
func(z:ZIPReader)->bool: return z.file_exists(abs_path))
|
||||
|
||||
vfiler._get_buffer = func(abs_path:String)->PackedByteArray:
|
||||
var output := PackedByteArray()
|
||||
if not vfiler.file_exists(abs_path):
|
||||
push_error("Zips don't have file id '%s'" % abs_path)
|
||||
return output
|
||||
for index:int in readers.size():
|
||||
var z := readers[index]
|
||||
if not z.file_exists(abs_path): continue
|
||||
output = z.read_file(abs_path)
|
||||
break
|
||||
return output
|
||||
|
||||
vfiler._write_file = func _zip_write(_path:String, _buffer:PackedByteArray)->Error:
|
||||
return ERR_FILE_CANT_WRITE
|
||||
vfiler._close = func()->void:
|
||||
readers.map(func(z:ZIPReader)->void: z.close())
|
||||
|
||||
vfiler._get_files_at = func(path:String)->Array[String]:
|
||||
if not path.ends_with("/"): # TODO ensure no // as well because that can be problem.
|
||||
path += "/" # sorry for the mutation
|
||||
var accum:Array = []
|
||||
accum.assign( readers.reduce(
|
||||
(func _merge_files(total:Array, current:ZIPReader)->Array:
|
||||
var res:Array = total.duplicate()
|
||||
var filtered:Array = []
|
||||
filtered.assign(current.get_files())
|
||||
filtered = filtered.filter(
|
||||
func _filter_dir(curpath:String)->bool:
|
||||
return curpath.begins_with(path))
|
||||
for file:String in filtered:
|
||||
var stripped_file:String = file.replace(path, "")
|
||||
# remove files with / after the file stripping to prevent directories.
|
||||
if not file in total and not stripped_file.contains("/"):
|
||||
res.append(file)
|
||||
return res) ,[]))
|
||||
var out_accum:Array[String] = []
|
||||
out_accum.assign(accum)
|
||||
return out_accum
|
||||
|
||||
return vfiler
|
179
vfs/vfs.gd
Normal file
179
vfs/vfs.gd
Normal file
@ -0,0 +1,179 @@
|
||||
## Virtual File Access
|
||||
## [br]
|
||||
## File access across different methods sucks.
|
||||
## Here's something to abstract that away and suck a little less.
|
||||
## [br]
|
||||
## Written for Godot 4.4
|
||||
|
||||
class_name VFileAccess extends RefCounted
|
||||
|
||||
## Built-in VFileAccess factories (Learn to make your own with these)
|
||||
const CREATE := preload("create_vfs.gd")
|
||||
## File importer functions.
|
||||
const IMPORTS := preload("vfs_loaders.gd")
|
||||
|
||||
## Path prefix. How this is handled depends on the file loader itself.
|
||||
var root:String = ""
|
||||
|
||||
## Callable(bytes:PackedByteArray, [...])->Variant
|
||||
var supported_files:Dictionary[String,Callable] = {}
|
||||
|
||||
## Get any stuff we might be using in our closures.
|
||||
var _get_stuff:Callable = func()->Variant: return null
|
||||
## How can we write to a file?
|
||||
var _write_file:Callable = func(_abs_path:String, data:Variant)->Error:
|
||||
return ERR_CANT_OPEN
|
||||
## How do we get bytes from this?
|
||||
var _get_buffer:Callable = func(_abs_path:String)->PackedByteArray:
|
||||
return PackedByteArray()
|
||||
## What determines if a file exists?
|
||||
var _file_exists:Callable = func(_abs_path:String)->bool:
|
||||
return false
|
||||
## How do we get files at a subdirectory?
|
||||
var _get_files_at:Callable = func(_abs_path:String)->Array[String]:
|
||||
return []
|
||||
## Shutdown code here.
|
||||
var _close:Callable = func()->void: pass
|
||||
|
||||
## Default
|
||||
func _init(
|
||||
root_dir:String = "./",
|
||||
support_files = IMPORTS.DEFAULT_SUPPORTED_FILES
|
||||
)->void:
|
||||
self.root = root_dir
|
||||
self.supported_files = support_files.duplicate(true)
|
||||
|
||||
#region Static ops
|
||||
|
||||
static func copy_file(from:VFileAccess, to:VFileAccess)->bool:
|
||||
push_warning("Copy file unimplemented")
|
||||
return false
|
||||
|
||||
#endregion
|
||||
|
||||
## I never won awards for naming things correctly.
|
||||
func get_stuff()->Variant:
|
||||
return _get_stuff.call()
|
||||
|
||||
## Add automatic file loaders to this virtual file system.
|
||||
func add_supported_files(file_loaders:Dictionary[String,Callable])->Dictionary[String,bool]:
|
||||
var results:Dictionary[String,bool] = {}
|
||||
for key:String in file_loaders.keys():
|
||||
var loader := file_loaders[key]
|
||||
results[key] = false
|
||||
if not IMPORTS.validate_loader(loader): continue
|
||||
supported_files[key] = loader
|
||||
results[key] = true
|
||||
return results
|
||||
|
||||
|
||||
## Get all files supported for autoloading.
|
||||
func get_supported_files()->Array[String]:
|
||||
return supported_files.keys()
|
||||
|
||||
|
||||
func load_supported_bulk(
|
||||
paths:Array[String],
|
||||
ext_override:String = ""
|
||||
)->Array[Variant]:
|
||||
return paths.map(load_supported.bind(ext_override))
|
||||
|
||||
|
||||
## Load a supported file. If [param path]'s extension matches a supported file,
|
||||
## it will use that loader and return whatever it's supposed to.
|
||||
## [param ext_override] allows for selecting a specific loader by key.
|
||||
## Returns null if no matching loader is found.
|
||||
func load_supported(path:String, ext_override:String = "")->Variant:
|
||||
var abs_path := get_absolute_path(path)
|
||||
var ext := abs_path.get_extension() \
|
||||
if ext_override.is_empty() else ext_override
|
||||
|
||||
if not ext in supported_files.keys():
|
||||
push_error("File extension '%s' not supported by this loader!" % ext)
|
||||
return null
|
||||
|
||||
var buffer := get_buffer(path)
|
||||
var result:Variant = supported_files[ext].call(buffer)
|
||||
if not is_instance_valid(result) and result is not String:
|
||||
push_warning("%s loader tried loading '%s' but received null. Does the file exist?"
|
||||
% [ext, abs_path])
|
||||
|
||||
return result
|
||||
|
||||
|
||||
## Like load_supported, but expects [param path] to not have an extension.
|
||||
## Instead, [param extensions] can contain multiple extensions, which will be
|
||||
## appended and checked to exist. This allows for prioritizing one format
|
||||
## over another or loosely defining a file id and allowing the engine to
|
||||
## find a first match.
|
||||
func load_any_supported(base_path:String, extensions:Array[String] = [])->Variant:
|
||||
var _append_ext := func _append_ext(ext:String)->String:
|
||||
return (base_path + ext) \
|
||||
if ext.begins_with(".") \
|
||||
else (base_path + "." + ext)
|
||||
var try_paths:Array = extensions.map(_append_ext)
|
||||
for path:String in try_paths:
|
||||
if not file_exists(path): continue
|
||||
return load_supported(path)
|
||||
push_error("Could not find any file for '%s'.%s!" % [base_path, extensions])
|
||||
return null
|
||||
|
||||
|
||||
## [param parse] Callable(buffer:PackedByteArray)->Variant
|
||||
func load_and_parse(path:String, parse:Callable)->Variant:
|
||||
var abs_path:String = get_absolute_path(path)
|
||||
var buffer:PackedByteArray = get_buffer(abs_path)
|
||||
var result:Variant = parse.call(buffer)
|
||||
return result
|
||||
|
||||
|
||||
## Get bytes from this file loader.
|
||||
func get_buffer(path:String)->PackedByteArray:
|
||||
return get_multiple_buffers([path])[path]
|
||||
|
||||
|
||||
## Get multiple file buffers.
|
||||
## Returns Dictionary[String,PackedByteArray]
|
||||
## which uses the path given as the key for the respective data buffer.
|
||||
func get_multiple_buffers(paths:Array[String])->Dictionary[String,PackedByteArray]:
|
||||
var output:Dictionary[String,PackedByteArray] = {}
|
||||
for path:String in paths:
|
||||
var abs_path := get_absolute_path(path)
|
||||
if _file_exists.call(abs_path):
|
||||
output[path] = _get_buffer.call(abs_path)
|
||||
if output[path].size() == 0:
|
||||
push_warning("File '%s' found but is 0 bytes long. Intentional?" % abs_path)
|
||||
else:
|
||||
push_error("File '%s' does not exist!" % abs_path)
|
||||
output[path] = PackedByteArray()
|
||||
return output
|
||||
|
||||
|
||||
## Get files in a directory
|
||||
func get_files_at(path:String)->Array[String]:
|
||||
var abs_path := get_absolute_path(path)
|
||||
return _get_files_at.call(abs_path)
|
||||
|
||||
|
||||
## Write buffer to file, if possible.
|
||||
func write_file(path:String, buffer:PackedByteArray)->Error:
|
||||
return _write_file.call(path, buffer)
|
||||
|
||||
|
||||
## Deallocate stuff.
|
||||
func close()->void:
|
||||
_close.call()
|
||||
|
||||
|
||||
## Check if this file exists within this loader.
|
||||
## If [param ignore_root] is true, file root will not be prefixed to [param path]
|
||||
func file_exists(path:String, ignore_root := false)->bool:
|
||||
return _file_exists.call(path) \
|
||||
if ignore_root else _file_exists.call(get_absolute_path(path))
|
||||
|
||||
|
||||
## Gets the internal absolute path based on this filesystem's root.
|
||||
func get_absolute_path(relative_path:String)->String:
|
||||
if root.is_empty() or root.ends_with("/"):
|
||||
return root + relative_path
|
||||
return "%s/%s" % [root, relative_path]
|
71
vfs/vfs_loaders.gd
Normal file
71
vfs/vfs_loaders.gd
Normal file
@ -0,0 +1,71 @@
|
||||
|
||||
|
||||
static func validate_loader(loader:Callable)->bool:
|
||||
if loader.get_unbound_arguments_count() != 1:
|
||||
push_error("Cannot add loader for %s as only one String arg can be taken.")
|
||||
return false
|
||||
elif loader.get_bound_arguments()[0] is not String \
|
||||
or loader.get_bound_arguments()[0] is not StringName:
|
||||
push_error("Cannot add loader for %s as only one String arg can be taken.")
|
||||
return false
|
||||
return true
|
||||
|
||||
|
||||
static func load_bin(buffer:PackedByteArray)->PackedByteArray: return buffer
|
||||
|
||||
static func load_png(buffer:PackedByteArray)->Image:
|
||||
var img := Image.new()
|
||||
img.load_png_from_buffer(buffer)
|
||||
return img
|
||||
|
||||
static func load_jpg(buffer:PackedByteArray)->Image:
|
||||
var img := Image.new()
|
||||
img.load_jpg_from_buffer(buffer)
|
||||
return img
|
||||
|
||||
static func load_mp3(buffer:PackedByteArray)->AudioStreamMP3:
|
||||
var sfx := AudioStreamMP3.new()
|
||||
sfx.data = buffer
|
||||
return sfx
|
||||
|
||||
static func load_ogg(buffer:PackedByteArray)->AudioStreamOggVorbis:
|
||||
return AudioStreamOggVorbis.load_from_buffer(buffer)
|
||||
|
||||
static func load_txt(buffer:PackedByteArray)->String:
|
||||
var txt:String = buffer.get_string_from_utf8()
|
||||
return txt
|
||||
|
||||
static func load_wav(buffer:PackedByteArray)->AudioStreamWAV:
|
||||
var sfx := AudioStreamWAV.new()
|
||||
sfx.load_from_buffer(buffer)
|
||||
return sfx
|
||||
|
||||
# can't use CONST since Callables are technically instanced dynamically
|
||||
|
||||
static var DEFAULT_SUPPORTED_FILES:Dictionary[String,Callable] = {
|
||||
"bin": load_bin,
|
||||
"txt": load_txt,
|
||||
"png": load_png,
|
||||
"jpg": load_jpg,
|
||||
"jpeg": load_jpg,
|
||||
"mp3": load_mp3,
|
||||
"ogg": load_ogg,
|
||||
"wav": load_wav,
|
||||
}
|
||||
|
||||
static var DATA_FILES:Dictionary[String,Callable] = {
|
||||
"bin": load_bin,
|
||||
"txt": load_txt,
|
||||
}
|
||||
|
||||
static var IMAGE_FILES:Dictionary[String,Callable] = {
|
||||
"png": load_png,
|
||||
"jpg": load_jpg,
|
||||
"jpeg": load_jpg,
|
||||
}
|
||||
|
||||
static var AUDIO_FILES:Dictionary[String,Callable] = {
|
||||
"mp3": load_mp3,
|
||||
"ogg": load_ogg,
|
||||
"wav": load_wav,
|
||||
}
|
Loading…
Reference in New Issue
Block a user