I dunno what I was doing.
This commit is contained in:
parent
39282b1a38
commit
c9e753d289
@ -207,25 +207,44 @@ func get_runtime_python_executable_godot_path() -> String:
|
|||||||
func get_runtime_python_executable_system_path() -> String:
|
func get_runtime_python_executable_system_path() -> String:
|
||||||
return ProjectSettings.globalize_path(get_runtime_python_executable_godot_path())
|
return ProjectSettings.globalize_path(get_runtime_python_executable_godot_path())
|
||||||
|
|
||||||
|
func get_cache_status() -> Dictionary:
|
||||||
|
var cache_status = {}
|
||||||
|
var cache_path_godot : String = _get_cache_path_godot()
|
||||||
|
var cache_status_filename : String = cache_path_godot.path_join(".completed_unpack")
|
||||||
|
if FileAccess.file_exists(cache_status_filename):
|
||||||
|
var cache_status_json : String = FileAccess.get_file_as_string(cache_status_filename)
|
||||||
|
cache_status = JSON.parse_string(cache_status_json)
|
||||||
|
return cache_status
|
||||||
|
|
||||||
|
func write_cache_status(cache_status : Dictionary):
|
||||||
|
var cache_path_godot : String = _get_cache_path_godot()
|
||||||
|
var cache_status_filename : String = cache_path_godot.path_join(".completed_unpack")
|
||||||
|
var cache_status_json = JSON.stringify(cache_status)
|
||||||
|
var cache_status_file : FileAccess = FileAccess.open(cache_status_filename, FileAccess.WRITE)
|
||||||
|
cache_status_file.store_string(cache_status_json)
|
||||||
|
cache_status_file.close()
|
||||||
|
|
||||||
func unpack_python(overwrite : bool = false):
|
func unpack_python(overwrite : bool = false):
|
||||||
|
|
||||||
var cache_path_godot : String = _get_cache_path_godot()
|
var cache_path_godot : String = _get_cache_path_godot()
|
||||||
|
|
||||||
# FIXME: !!! THIS CAN END UP WITH PARTIAL INSTALLS !!!
|
|
||||||
# Check to see if the Python executable already exists. If it does, we might
|
|
||||||
# just skip unpacking.
|
|
||||||
var python_executable_expected_path : String = \
|
|
||||||
get_runtime_python_executable_godot_path()
|
|
||||||
if not overwrite:
|
|
||||||
if FileAccess.file_exists(python_executable_expected_path):
|
|
||||||
return
|
|
||||||
|
|
||||||
# Open archive.
|
# Open archive.
|
||||||
var python_archive_path : String = _detect_archive_for_runtime()
|
var python_archive_path : String = _detect_archive_for_runtime()
|
||||||
var reader : KiriTARReader = KiriTARReader.new()
|
var reader : KiriTARReader = KiriTARReader.new()
|
||||||
var err : Error = reader.open(python_archive_path)
|
var err : Error = reader.open(python_archive_path)
|
||||||
assert(err == OK)
|
assert(err == OK)
|
||||||
|
|
||||||
|
var cache_status_filename : String = cache_path_godot.path_join(".completed_unpack")
|
||||||
|
|
||||||
|
# Check to see if we've marked this as completely unpacked.
|
||||||
|
var tar_hash : String = reader.get_tar_hash()
|
||||||
|
var cache_status : Dictionary = get_cache_status()
|
||||||
|
if not overwrite:
|
||||||
|
if cache_status.has("completed_install_hash"):
|
||||||
|
if cache_status["completed_install_hash"] == tar_hash:
|
||||||
|
# This appears to already be completely unpacked.
|
||||||
|
return
|
||||||
|
|
||||||
# Get files.
|
# Get files.
|
||||||
var file_list : PackedStringArray = reader.get_files()
|
var file_list : PackedStringArray = reader.get_files()
|
||||||
|
|
||||||
@ -233,6 +252,11 @@ func unpack_python(overwrite : bool = false):
|
|||||||
for relative_filename : String in file_list:
|
for relative_filename : String in file_list:
|
||||||
reader.unpack_file(cache_path_godot, relative_filename)
|
reader.unpack_file(cache_path_godot, relative_filename)
|
||||||
|
|
||||||
|
# Mark this as completely unpacked.
|
||||||
|
print("Writing unpacked marker.")
|
||||||
|
cache_status["completed_install_hash"] = tar_hash
|
||||||
|
write_cache_status(cache_status)
|
||||||
|
|
||||||
# TODO: Clear cache function. Uninstall Python, etc.
|
# TODO: Clear cache function. Uninstall Python, etc.
|
||||||
|
|
||||||
func get_extra_scripts_list() -> Array:
|
func get_extra_scripts_list() -> Array:
|
||||||
|
@ -35,8 +35,10 @@ class TarFileRecord:
|
|||||||
var type_indicator : String
|
var type_indicator : String
|
||||||
|
|
||||||
var _internal_file_list = []
|
var _internal_file_list = []
|
||||||
|
var _internal_file_list_indices = {} # Map filename -> index in _internal_file_list
|
||||||
var _reader : ZIPReader = null
|
var _reader : ZIPReader = null
|
||||||
var _tar_file_cache : PackedByteArray = []
|
var _tar_file_cache : PackedByteArray = []
|
||||||
|
var _tar_file_hash : PackedByteArray = []
|
||||||
|
|
||||||
func _load_record(record : TarFileRecord) -> PackedByteArray:
|
func _load_record(record : TarFileRecord) -> PackedByteArray:
|
||||||
load_cache()
|
load_cache()
|
||||||
@ -121,6 +123,9 @@ func get_files() -> PackedStringArray:
|
|||||||
ret.append(record.filename)
|
ret.append(record.filename)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
func get_tar_hash():
|
||||||
|
return _tar_file_hash.hex_encode()
|
||||||
|
|
||||||
func open(path: String) -> Error:
|
func open(path: String) -> Error:
|
||||||
|
|
||||||
assert(not _reader)
|
assert(not _reader)
|
||||||
@ -133,6 +138,14 @@ func open(path: String) -> Error:
|
|||||||
|
|
||||||
load_cache()
|
load_cache()
|
||||||
|
|
||||||
|
# Hash it.
|
||||||
|
print("Computing tar hash...")
|
||||||
|
var hashing : HashingContext = HashingContext.new()
|
||||||
|
hashing.start(HashingContext.HASH_SHA256)
|
||||||
|
hashing.update(_tar_file_cache)
|
||||||
|
_tar_file_hash = hashing.finish()
|
||||||
|
print("Done computing tar hash.")
|
||||||
|
|
||||||
var tar_file_offset = 0
|
var tar_file_offset = 0
|
||||||
var zero_filled_record_count = 0
|
var zero_filled_record_count = 0
|
||||||
var zero_filled_record : PackedByteArray = []
|
var zero_filled_record : PackedByteArray = []
|
||||||
@ -242,6 +255,7 @@ func open(path: String) -> Error:
|
|||||||
tar_record.link_destination = merged_paxheader["linkpath"]
|
tar_record.link_destination = merged_paxheader["linkpath"]
|
||||||
|
|
||||||
# Add it to our record list.
|
# Add it to our record list.
|
||||||
|
_internal_file_list_indices[tar_record.filename] = len(_internal_file_list)
|
||||||
_internal_file_list.append(tar_record)
|
_internal_file_list.append(tar_record)
|
||||||
|
|
||||||
return OK
|
return OK
|
||||||
@ -259,6 +273,12 @@ func read_file(path : String, case_sensitive : bool = true) -> PackedByteArray:
|
|||||||
|
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
func _convert_permissions(tar_mode_str : String) -> FileAccess.UnixPermissionFlags:
|
||||||
|
# Okay so this turned out to be easier than I thought. Godot's
|
||||||
|
# UnixPermissionFlags line up with the actual permission bits in the tar.
|
||||||
|
return _octal_str_to_int(tar_mode_str)
|
||||||
|
|
||||||
|
|
||||||
# Extract a file to a specific path. Sets permissions when possible, handles
|
# Extract a file to a specific path. Sets permissions when possible, handles
|
||||||
# symlinks and directories. Will extract to the dest_path plus the internal
|
# symlinks and directories. Will extract to the dest_path plus the internal
|
||||||
# relative path.
|
# relative path.
|
||||||
@ -266,67 +286,95 @@ func read_file(path : String, case_sensitive : bool = true) -> PackedByteArray:
|
|||||||
# Example:
|
# Example:
|
||||||
# dest_path: "foo/bar", filename: "butts/whatever/thingy.txt"
|
# dest_path: "foo/bar", filename: "butts/whatever/thingy.txt"
|
||||||
# extracts to: "foo/bar/butts/whatever/thingy.txt"
|
# extracts to: "foo/bar/butts/whatever/thingy.txt"
|
||||||
func unpack_file(dest_path : String, filename : String, overwrite : bool = false):
|
func unpack_file(dest_path : String, filename : String, force_overwrite : bool = false):
|
||||||
var full_dest_path : String = dest_path.path_join(filename)
|
var full_dest_path : String = dest_path.path_join(filename)
|
||||||
DirAccess.make_dir_recursive_absolute(full_dest_path.get_base_dir())
|
DirAccess.make_dir_recursive_absolute(full_dest_path.get_base_dir())
|
||||||
|
|
||||||
for record : TarFileRecord in _internal_file_list:
|
assert(_internal_file_list_indices.has(filename))
|
||||||
|
var record : TarFileRecord = _internal_file_list[_internal_file_list_indices[filename]]
|
||||||
if record.filename.is_absolute_path():
|
|
||||||
# hmmmmmmmmmmmmmm
|
|
||||||
assert(false)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if record.filename.simplify_path().begins_with(".."):
|
|
||||||
assert(false)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# FIXME: There are probably a million other ways to do directory
|
# FIXME: There are probably a million other ways to do directory
|
||||||
# traversal attacks.
|
# traversal attacks than just what we've checked for here.
|
||||||
|
if record.filename.is_absolute_path():
|
||||||
if record.filename == filename:
|
assert(false)
|
||||||
|
return
|
||||||
# FIXME: Somehow this is slower than just overwriting the file.
|
if record.filename.simplify_path().begins_with(".."):
|
||||||
# Awesome. /s
|
assert(false)
|
||||||
if overwrite == false and FileAccess.file_exists(full_dest_path):
|
return
|
||||||
continue
|
|
||||||
|
var need_file_made : bool = true
|
||||||
|
var need_permission_update : bool = true
|
||||||
|
var exists_in_some_way : bool = FileAccess.file_exists(full_dest_path) || DirAccess.dir_exists_absolute(full_dest_path)
|
||||||
|
|
||||||
|
# Check to see if we need to make the dir/file/etc.
|
||||||
|
if force_overwrite == false:
|
||||||
|
|
||||||
|
if exists_in_some_way:
|
||||||
|
|
||||||
|
# Link exists. Don't overwrite.
|
||||||
if record.is_link:
|
if record.is_link:
|
||||||
|
#print("Skip (link exist): ", full_dest_path)
|
||||||
|
# FIXME: Check symlink destination?
|
||||||
|
need_file_made = false
|
||||||
|
|
||||||
# Okay, look. I know that symbolic links technically exist on
|
if record.is_directory:
|
||||||
# Windows, but they're messy and hardly ever used. FIXME later
|
#print("Skip (dir exist): ", full_dest_path)
|
||||||
# if for some reason you need to support that. -Kiri
|
need_file_made = false
|
||||||
assert(OS.get_name() != "Windows")
|
|
||||||
|
|
||||||
# Fire off a command to make a symbolic link on *normal* OSes.
|
# If the file is there and it's a complete file, then we're probably
|
||||||
var err = OS.execute("ln", [
|
# done. We can't check or set mtime through Godot's API, though.
|
||||||
"-s",
|
var f : FileAccess = FileAccess.open(full_dest_path, FileAccess.READ)
|
||||||
record.link_destination,
|
if f.get_length() == record.file_size:
|
||||||
ProjectSettings.globalize_path(full_dest_path)
|
#print("Skip (file exist): ", full_dest_path)
|
||||||
])
|
need_file_made = false
|
||||||
|
f.close()
|
||||||
assert(err != -1)
|
|
||||||
|
|
||||||
elif record.is_directory:
|
|
||||||
|
|
||||||
# It's just a directory. Make it.
|
|
||||||
DirAccess.make_dir_recursive_absolute(full_dest_path)
|
|
||||||
|
|
||||||
|
if not record.is_link and OS.get_name() != "Windows":
|
||||||
|
if FileAccess.file_exists(full_dest_path) || DirAccess.dir_exists_absolute(full_dest_path):
|
||||||
|
var existing_permissions : FileAccess.UnixPermissionFlags = FileAccess.get_unix_permissions(full_dest_path)
|
||||||
|
var wanted_permissions : FileAccess.UnixPermissionFlags = _convert_permissions(record.mode)
|
||||||
|
if existing_permissions == wanted_permissions:
|
||||||
|
need_permission_update = false
|
||||||
|
#print("Permission are fine: ", record.mode, " ", existing_permissions, " ", full_dest_path)
|
||||||
else:
|
else:
|
||||||
|
print("Permission update needed on existing file: ", record.mode, " ", existing_permissions, " ", full_dest_path)
|
||||||
|
|
||||||
# Okay this is an actual file. Extract it.
|
if record.is_link:
|
||||||
var file_data : PackedByteArray = read_file(record.filename)
|
|
||||||
var out_file = FileAccess.open(full_dest_path, FileAccess.WRITE)
|
|
||||||
out_file.store_buffer(file_data)
|
|
||||||
out_file.close()
|
|
||||||
|
|
||||||
# Set permissions (on normal OSes, not Windows). I don't think this
|
# Okay, look. I know that symbolic links technically exist on
|
||||||
# applies to symlinks, though.
|
# Windows, but they're messy and hardly ever used. FIXME later
|
||||||
if not record.is_link:
|
# if for some reason you need to support that. -Kiri
|
||||||
if OS.get_name() != "Windows":
|
assert(OS.get_name() != "Windows")
|
||||||
var err = OS.execute("chmod", [
|
|
||||||
record.mode,
|
# Fire off a command to make a symbolic link on *normal* OSes.
|
||||||
ProjectSettings.globalize_path(full_dest_path) ])
|
var err = OS.execute("ln", [
|
||||||
assert(err != -1)
|
"-s",
|
||||||
|
record.link_destination,
|
||||||
|
ProjectSettings.globalize_path(full_dest_path)
|
||||||
|
])
|
||||||
|
|
||||||
|
assert(err != -1)
|
||||||
|
|
||||||
|
elif record.is_directory:
|
||||||
|
|
||||||
|
# It's just a directory. Make it.
|
||||||
|
DirAccess.make_dir_recursive_absolute(full_dest_path)
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
# Okay this is an actual file. Extract it.
|
||||||
|
var file_data : PackedByteArray = read_file(record.filename)
|
||||||
|
var out_file = FileAccess.open(full_dest_path, FileAccess.WRITE)
|
||||||
|
out_file.store_buffer(file_data)
|
||||||
|
out_file.close()
|
||||||
|
|
||||||
|
# Set permissions (on normal OSes, not Windows). I don't think this
|
||||||
|
# applies to symlinks, though.
|
||||||
|
if not record.is_link:
|
||||||
|
if need_permission_update:
|
||||||
|
if OS.get_name() != "Windows":
|
||||||
|
var err : Error = FileAccess.set_unix_permissions(
|
||||||
|
full_dest_path, _convert_permissions(record.mode))
|
||||||
|
assert(err != -1)
|
||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
@ -11,14 +11,16 @@ Done:
|
|||||||
x Remove xterm dependency, or make it like a debug-only thing.
|
x Remove xterm dependency, or make it like a debug-only thing.
|
||||||
x Test on WINE/Windows.
|
x Test on WINE/Windows.
|
||||||
x First-time setup of requirements (pip, etc).
|
x First-time setup of requirements (pip, etc).
|
||||||
|
x Deal with interrupted setup operations
|
||||||
|
x We check for the python.exe file in a given setup location to see if
|
||||||
|
we need to unpack stuff, but what if that exists but the setup was
|
||||||
|
interrupted and we're missing files?
|
||||||
|
x Deal with bad state after interrupted unpacking operation
|
||||||
|
|
||||||
The big ones:
|
The big ones:
|
||||||
|
|
||||||
- Deal with interrupted setup operations
|
- Add some kind of progress bar, or API for progress tracking, for the unpacking.
|
||||||
- We check for the python.exe file in a given setup location to see if
|
- Progress bar or API for progress tracking for pip installs.
|
||||||
we need to unpack stuff, but what if that exists but the setup was
|
- Maybe we should parse the pip requirements.txt and also set up an API for calling pip install.
|
||||||
interrupted and we're missing files?
|
|
||||||
- Documentation.
|
- Documentation.
|
||||||
- how to use .kiri_export_python
|
- how to use .kiri_export_python
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user