Compare commits

...

8 Commits

Author SHA1 Message Date
c9e753d289 I dunno what I was doing. 2024-07-21 19:25:19 -07:00
39282b1a38 After-stream commit. 2024-07-15 07:39:58 -07:00
f0cb6f7c1f More stream work. 2024-07-14 18:40:49 -07:00
08b8cdcf14 Working on un-threading the packetsocket. 2024-07-14 16:23:39 -07:00
53e4be0439 Pre-stream commit. 2024-07-14 14:49:44 -07:00
4cad6a574f More stuff. 2024-07-14 12:58:20 -07:00
bdd56c30d1 Moving stuff around. 2024-07-14 10:23:19 -07:00
5d3b74d798 Work from last night. 2024-07-14 08:54:08 -07:00
34 changed files with 1749 additions and 491 deletions

View File

@ -43,7 +43,7 @@ func _process(_delta):
wrapper_instance.poll()
#print(wrapper_instance.get_status())
wrapper_instance.call_rpc_async("func_to_call", [12345], func(stuff):
wrapper_instance.call_rpc_callback("func_to_call", [12345], func(stuff):
print(stuff.response)
)

View File

@ -1,3 +1,6 @@
# FIXME: Remove this. I think we can nuke this entire class because we don't
# need it anymore.
# KiriJSONRPC
#
# This just wraps JSONRPC and adds a little more sanity-checking, like

View File

View File

@ -0,0 +1,5 @@
mediapipe == 0.10.14
numpy == 1.26.0
psutil == 5.9.7
cv2-enumerate-cameras == 1.1.10

View File

@ -0,0 +1,10 @@
#!/usr/bin/python3
import time
a = 10
while a > 0:
print("asdf: ", a)
a -= 1
time.sleep(1)

View File

@ -0,0 +1,13 @@
#!/usr/bin/python3
import time
def some_function_to_call():
print("butts")
a = 5
while a > 0:
a -= 1
print(a)
time.sleep(1.0)

6
TarTest.tscn Normal file
View File

@ -0,0 +1,6 @@
[gd_scene load_steps=2 format=3 uid="uid://dfgfueotq2kt6"]
[ext_resource type="Script" path="res://TestPythonInExport.gd" id="1_o4sdc"]
[node name="TarTest" type="Node"]
script = ExtResource("1_o4sdc")

67
TestPythonInExport.gd Normal file
View File

@ -0,0 +1,67 @@
extends Node
var pw : KiriPythonWrapperInstance
func _ready():
#pw = KiriPythonWrapperInstance.new(
#"/storage/git2/GodotJSONRPCTest/addons/KiriPythonRPCWrapper/KiriPythonRPCWrapper/test_module/__init__.py")
#pw.setup_python()
#pw.start_process()
#var ret = pw.call_rpc_sync("func_to_call", ["test string whatever blah"])
#print(ret)
#print("Calling the thing")
#butt("nkc sckccsd scaskcmasklcms")
#print("Done calling the thing:" )
#asdfblah.emit()
#pw.stop_process()
#
pw = KiriPythonWrapperInstance.new(
"res://SomePythonThingy/test_a_thing.py")
pw.setup_python()
#print("running command...")
#var utility_script_path = pw.convert_cache_item_to_real_path(
#"res://SomePythonThingy/some_utility_script.py")
#var r1 = pw.run_python_command([utility_script_path], [], true)
#print("done running command... ", r1)
print("running command...")
var requirements_txt_path = pw.convert_cache_item_to_real_path(
"res://SomePythonThingy/requirements.txt")
var output_array = []
var r1 = pw.run_python_command(
["-m", "pip", "install", "-r", requirements_txt_path],
output_array, true, true)
print("done running command... ", r1)
print("OUTPUT WAS:", output_array[0])
pw.start_process(true)
print("Call thingy...")
var ret = await pw.call_rpc_async("some_function_to_call", [])
print("Done thingy...")
print(ret)
func _process(delta):
#if pw.get_status() == \
#KiriPythonWrapperInstance.KiriPythonWrapperStatus.STATUS_RUNNING:
#print("poll")
pw.poll()
#if pw.get_status() == \
#KiriPythonWrapperInstance.KiriPythonWrapperStatus.STATUS_STOPPED:
#print("IT STOPPED")
signal asdfblah
func butt(asdf):
print("Butt1: ", asdf)
await asdfblah
print("Butt2: ", asdf)

View File

@ -0,0 +1,58 @@
# Python build export plugin
#
# This just makes sure that the specific Python build for whatever platform we
# need gets bundled into the build for that platform, so that it can be unpacked
# and used later by KiriPythonBuildWrangler.
@tool
extends EditorExportPlugin
class_name KiriPythonBuildExportPlugin
func _get_name() -> String:
return "KiriPythonBuildExportPlugin"
func _export_begin(
features : PackedStringArray, is_debug : bool,
path : String, flags : int):
var build_wrangler : KiriPythonBuildWrangler = KiriPythonBuildWrangler.new()
var platform_list = []
var arch_list = []
if "linux" in features:
platform_list.append("Linux")
if "windows" in features:
platform_list.append("Windows")
if "x86_64" in features:
arch_list.append("x86_64")
# TODO: Other platforms (macos)
for platform in platform_list:
for arch in arch_list:
var archive_to_export = build_wrangler._detect_archive_for_build(platform, arch)
var file_contents : PackedByteArray = FileAccess.get_file_as_bytes(archive_to_export)
add_file(archive_to_export, file_contents, false)
# Make sure all the RPC wrapper scripts make it in.
var script_path : String = get_script().resource_path
var script_dir : String = script_path.get_base_dir()
# Actually add all the files.
var extra_python_files = build_wrangler.get_extra_scripts_list()
for extra_python_file : String in extra_python_files:
var file_bytes : PackedByteArray = FileAccess.get_file_as_bytes(extra_python_file)
add_file(extra_python_file, file_bytes, false)
# Add the list of Python files as its own file so we know what to extract so
# it's visible to Python.
var python_wrapper_manifest_str : String = JSON.stringify(extra_python_files, " ")
var python_wrapper_manifest_bytes : PackedByteArray = \
python_wrapper_manifest_str.to_utf8_buffer()
var python_wrapper_manifset_path = script_dir.path_join(
"KiriPythonWrapperPythonFiles.json")
add_file(python_wrapper_manifset_path, python_wrapper_manifest_bytes, false)

View File

@ -0,0 +1,324 @@
# Python build wrangler
#
# This handles extracting and juggling standalone Python builds per-platform.
extends RefCounted
class_name KiriPythonBuildWrangler
# Cached release info so we don't have to constantly reload the .json file.
var _python_release_info : Dictionary = {}
#region releaseinfo file interactions
func _get_python_release_info():
if _python_release_info == {}:
var this_script_path = get_script().resource_path
var this_script_dir = this_script_path.get_base_dir()
var release_info_path = this_script_dir.path_join("StandalonePythonBuilds/python_release_info.json")
_python_release_info = load(release_info_path).data
# If you hit this assert, your python_release_info.json file is probably
# missing and you missed a setup step. Check the README.
assert(_python_release_info != null)
return _python_release_info
func _get_python_version():
var info = _get_python_release_info()
var versions : Array = info["versions"]
# Sort version numbers so that the highest version is the first element.
versions.sort_custom(func(a : String, b : String):
var version_parts_a : PackedStringArray = a.split(".")
var version_parts_b : PackedStringArray = b.split(".")
for i in range(0, 3):
if int(version_parts_a[i]) > int(version_parts_b[i]):
return true
if int(version_parts_a[i]) < int(version_parts_b[i]):
return false
return false)
return versions[0]
func _get_python_release() -> String:
var info = _get_python_release_info()
return info["release"]
#endregion
#region Python archive filename wrangling
# Generate the archive filename based on what we've figured out from the release
# info, the platform, architecture, optimizations, and so on. This is just the
# filename, not including the full path.
#
# Use _generate_python_archive_full_path() to generate the full path (as a
# res:// path).
func _generate_python_archive_string(
python_version : String,
python_release : String,
arch : String,
os : String,
opt : String) -> String:
return "cpython-{python_version}+{python_release}-{python_arch}-{python_os}-{python_opt}-full.tar.zip".format({
"python_version" : python_version,
"python_release" : python_release,
"python_arch" : arch,
"python_os" : os,
"python_opt" : opt
})
# Get full path (in Godot) to the archive for a given Python build.
func _generate_python_archive_full_path(
python_version : String,
python_release : String,
arch : String,
os : String,
opt : String) -> String:
var just_the_archive_filename = _generate_python_archive_string(
python_version, python_release, arch, os, opt)
var this_script_path = get_script().resource_path
var this_script_dir = this_script_path.get_base_dir()
var python_archive_path = this_script_dir.path_join(
"StandalonePythonBuilds").path_join(just_the_archive_filename)
return python_archive_path
# os_name as it appears in the Python archive filename.
func _get_python_opt_for_os(os_name : String) -> String:
if os_name == "pc-windows-msvc-shared":
return "pgo"
# TODO: (macos)
# Linux default.
return "pgo+lto"
# Note: arch variable is output of _get_python_architecture, not whatever Godot
# returns. os_name IS what Godot returns from OS.get_name().
func _get_python_platform(os_name : String, arch : String) -> String:
var os_name_mappings : Dictionary = {
"Linux" : "unknown-linux-gnu",
"macOS" : "apple-darwin", # TODO: Test this. (macos)
"Windows" : "pc-windows-msvc-shared"
}
# Special case for armv7 Linux:
if arch == "armv7" and os_name == "Linux":
return "linux-gnueabi"
assert(os_name_mappings.has(os_name))
return os_name_mappings[os_name]
func _get_python_architecture(engine_arch : String) -> String:
var arch_name_mappings : Dictionary = {
"x86_64" : "x86_64",
"x86_32" : "i686",
"arm64" : "aarch64", # FIXME: I dunno if this is correct.
"arm32" : "armv7", # FIXME: I dunno if this is correct.
}
assert(arch_name_mappings.has(engine_arch))
return arch_name_mappings[engine_arch]
func _detect_archive_for_runtime() -> String:
var python_version : String = _get_python_version()
var python_release : String = _get_python_release()
var arch : String = _get_python_architecture(Engine.get_architecture_name())
var os_name : String = _get_python_platform(OS.get_name(), arch)
var opt = _get_python_opt_for_os(os_name)
return _generate_python_archive_full_path(
python_version, python_release,
arch, os_name, opt)
# Params are Godot's names for OSes and architectures (eg "Windows", "Linux",
# etc), not Python archive filename fields. Use things like OS.get_name().
func _detect_archive_for_build(
os_name_from_godot : String,
arch_from_godot : String) -> String:
var python_version : String = _get_python_version()
var python_release : String = _get_python_release()
var arch : String = _get_python_architecture(arch_from_godot)
var os_name : String = _get_python_platform(os_name_from_godot, arch)
var opt = _get_python_opt_for_os(os_name)
return _generate_python_archive_full_path(
python_version, python_release,
arch, os_name, opt)
#endregion
#region Cache path wrangling
# Get the cache path, relative to the user data dir.
# Example return value:
# "_python_dist/20240415/3.12.3"
func _get_cache_path_relative():
return "_python_dist".path_join(_get_python_release()).path_join(_get_python_version())
# Get the full cache path, as understood by the OS.
# Example return value:
# "/home/kiri/.local/share/godot/app_userdata/GodotJSONRPCTest/_python_dist/20240415/3.12.3/packaged_scripts"
func _get_script_cache_path_system() -> String:
return OS.get_user_data_dir().path_join(_get_cache_path_relative()).path_join("packaged_scripts")
# Get the full cache path, as understood by Godot.
# Example return value:
# "user://_python_dist/20240415/3.12.3"
func _get_cache_path_godot() -> String:
return "user://".path_join(_get_cache_path_relative())
#endregion
#region Public API
# Get the expected path to the Python executable. This is where we think it'll
# end up, not where it actually did end up. This can be called without actually
# extracting the archive. In fact, we need it to act that way because we use it
# to determine if there's already a Python install in-place.
#
# Path is a Godot path. Use ProjectSettings.globalize_path() to conver to a
# system path.
#
# Example return:
# "user://_python_dist/20240415/3.12.3/python/install/bin/python3"
func get_runtime_python_executable_godot_path() -> String:
var base_dir = _get_cache_path_godot().path_join("python/install")
if OS.get_name() == "Windows":
return base_dir.path_join("python.exe")
else:
return base_dir.path_join("bin/python3")
# TODO: Other platforms (macos).
# Get system path for the Python executable, which is what we actually need to
# use to execute it in most cases.
#
# Example return:
# "home/<user>/.local/share/godot/app_userdata/<project>/_python_dist/20240415/3.12.3/python/install/bin/python3"
func get_runtime_python_executable_system_path() -> String:
return ProjectSettings.globalize_path(get_runtime_python_executable_godot_path())
func get_cache_status() -> Dictionary:
var cache_status = {}
var cache_path_godot : String = _get_cache_path_godot()
var cache_status_filename : String = cache_path_godot.path_join(".completed_unpack")
if FileAccess.file_exists(cache_status_filename):
var cache_status_json : String = FileAccess.get_file_as_string(cache_status_filename)
cache_status = JSON.parse_string(cache_status_json)
return cache_status
func write_cache_status(cache_status : Dictionary):
var cache_path_godot : String = _get_cache_path_godot()
var cache_status_filename : String = cache_path_godot.path_join(".completed_unpack")
var cache_status_json = JSON.stringify(cache_status)
var cache_status_file : FileAccess = FileAccess.open(cache_status_filename, FileAccess.WRITE)
cache_status_file.store_string(cache_status_json)
cache_status_file.close()
func unpack_python(overwrite : bool = false):
var cache_path_godot : String = _get_cache_path_godot()
# Open archive.
var python_archive_path : String = _detect_archive_for_runtime()
var reader : KiriTARReader = KiriTARReader.new()
var err : Error = reader.open(python_archive_path)
assert(err == OK)
var cache_status_filename : String = cache_path_godot.path_join(".completed_unpack")
# Check to see if we've marked this as completely unpacked.
var tar_hash : String = reader.get_tar_hash()
var cache_status : Dictionary = get_cache_status()
if not overwrite:
if cache_status.has("completed_install_hash"):
if cache_status["completed_install_hash"] == tar_hash:
# This appears to already be completely unpacked.
return
# Get files.
var file_list : PackedStringArray = reader.get_files()
# Extract files.
for relative_filename : String in file_list:
reader.unpack_file(cache_path_godot, relative_filename)
# Mark this as completely unpacked.
print("Writing unpacked marker.")
cache_status["completed_install_hash"] = tar_hash
write_cache_status(cache_status)
# TODO: Clear cache function. Uninstall Python, etc.
func get_extra_scripts_list() -> Array:
var script_path : String = get_script().resource_path
var script_dir : String = script_path.get_base_dir()
var python_wrapper_manifset_path = script_dir.path_join(
"KiriPythonWrapperPythonFiles.json")
# If this is running an actual build, we'll just return the manifest here.
if FileAccess.file_exists(python_wrapper_manifset_path):
return load(python_wrapper_manifset_path).data
# If it's not running an actual build, we need to scan for extra Python
# files.
# First pass: Find all the .kiri_export_python markers in the entire project
# tree.
var extra_python_files : Array = []
var scan_dir_list = ["res://"]
var verified_script_bundles = []
while len(scan_dir_list):
var current_dir : String = scan_dir_list.pop_front()
var da : DirAccess = DirAccess.open(current_dir)
if da.file_exists(".kiri_export_python"):
verified_script_bundles.append(current_dir)
else:
# Add all directories to the scan list.
da.include_navigational = false
var dir_list = da.get_directories()
for dir in dir_list:
if dir == "__pycache__":
continue
scan_dir_list.append(current_dir.path_join(dir))
# Second pass: Add everything under a directory containing a
# .kiri_export_python marker.
scan_dir_list = verified_script_bundles
while len(scan_dir_list):
var current_dir : String = scan_dir_list.pop_front()
var da : DirAccess = DirAccess.open(current_dir)
# Add all directories to the scan list.
da.include_navigational = false
var dir_list = da.get_directories()
for dir in dir_list:
if dir == "__pycache__":
continue
scan_dir_list.append(current_dir.path_join(dir))
# Add all Python files.
var file_list = da.get_files()
for file in file_list:
var full_file = current_dir.path_join(file)
extra_python_files.append(full_file)
## FIXME: Remove this.
#for f in extra_python_files:
#print("Extra file: ", f)
return extra_python_files
#endregion

View File

@ -0,0 +1,15 @@
@tool
extends EditorPlugin
var python_build_export_plugin = null
func _enter_tree():
assert(not python_build_export_plugin)
python_build_export_plugin = KiriPythonBuildExportPlugin.new()
add_export_plugin(python_build_export_plugin)
func _exit_tree():
assert(python_build_export_plugin)
remove_export_plugin(python_build_export_plugin)
python_build_export_plugin = null

View File

@ -1,3 +1,10 @@
# KiriPacketSocket
#
# GDScript version of the KiriPacketSocket Python module. Basically just copied
# the code over and reformatted it. Error handling and some other behaviors are
# different due to differences in how Python and GDScript handle exceptions and
# others.
extends RefCounted
class_name KiriPacketSocket
@ -6,8 +13,8 @@ var _packet_buffer : KiriPacketBuffer = KiriPacketBuffer.new()
var _state : KiriSocketState = KiriSocketState.DISCONNECTED
var _outgoing_packet_queue : Array = []
var _state_lock : Mutex = Mutex.new()
var _worker_thread : Thread = null
var _worker_thread : bool = false
signal _worker_thread_should_continue
var _new_connections_to_server : Array = []
var _error_string : String = ""
@ -58,38 +65,30 @@ enum KiriSocketState {
ERROR = 5
}
func _notification(what):
if what == NOTIFICATION_PREDELETE:
assert(not _worker_thread)
func send_packet(packet_bytes : PackedByteArray):
assert(packet_bytes)
_state_lock.lock()
_outgoing_packet_queue.append(packet_bytes)
_state_lock.unlock()
func poll():
_worker_thread_should_continue.emit()
func get_next_packet():
_state_lock.lock()
poll()
var ret = _packet_buffer.get_next_packet()
_state_lock.unlock()
return ret
func get_next_server_connection():
_state_lock.lock()
poll()
var ret = null
if len(_new_connections_to_server) > 0:
ret = _new_connections_to_server.pop_front()
_state_lock.unlock()
return ret
func get_last_error():
_state_lock.lock()
var ret = _error_string
_state_lock.unlock()
return ret
func is_disconnected_or_error():
_state_lock.lock()
var bad_states = [
KiriSocketState.DISCONNECTED,
KiriSocketState.ERROR
@ -99,14 +98,10 @@ func is_disconnected_or_error():
if _state in bad_states:
ret = true
_state_lock.unlock()
return ret
func get_state():
_state_lock.lock()
var ret = _state
_state_lock.unlock()
return ret
func start_server(address):
@ -114,8 +109,10 @@ func start_server(address):
_set_state(KiriSocketState.SERVER_STARTING)
assert(not _worker_thread)
_worker_thread = Thread.new()
_worker_thread.start(_server_thread_func.bind(address))
_worker_thread = true
# Starts coroutine.
_server_thread_func(address)
func start_client(address):
@ -123,53 +120,51 @@ func start_client(address):
assert(not _worker_thread)
_worker_thread = Thread.new()
_worker_thread.start(_client_thread_func.bind(address))
_worker_thread = true
# Starts coroutine.
_client_thread_func(address)
func stop():
assert(_worker_thread)
if not _worker_thread:
return
_should_quit = true
_worker_thread.wait_to_finish()
_worker_thread = null
while _worker_thread:
_worker_thread_should_continue.emit()
_should_quit = false
func is_running():
return not (_worker_thread == null)
func _normal_communication_loop(sock : StreamPeer, address):
func _normal_communication_loop_iteration(sock : StreamPeer, address):
while not _should_quit:
if sock.poll() != OK:
return FAILED
if sock.poll() != OK:
break
if sock.get_status() != StreamPeerTCP.STATUS_CONNECTED:
return FAILED
if sock.get_status() != StreamPeerTCP.STATUS_CONNECTED:
break
# Get new data.
var available_bytes = sock.get_available_bytes()
if available_bytes > 0:
var incoming_bytes = sock.get_data(available_bytes)
_packet_buffer.add_bytes(PackedByteArray(incoming_bytes[1]))
if incoming_bytes[0] != OK:
return FAILED
# Get new data.
_state_lock.lock()
var available_bytes = sock.get_available_bytes()
if available_bytes > 0:
var incoming_bytes = sock.get_data(available_bytes)
_packet_buffer.add_bytes(PackedByteArray(incoming_bytes[1]))
if incoming_bytes[0] != OK:
break
_state_lock.unlock()
# Send all packets from queue.
while len(self._outgoing_packet_queue):
var next_outgoing_packet = _outgoing_packet_queue.pop_front()
var len_to_send = len(next_outgoing_packet)
sock.put_u8((len_to_send & 0x000000ff) >> 0)
sock.put_u8((len_to_send & 0x0000ff00) >> 8)
sock.put_u8((len_to_send & 0x00ff0000) >> 16)
sock.put_u8((len_to_send & 0xff000000) >> 24)
sock.put_data(next_outgoing_packet)
# Send all packets from queue.
_state_lock.lock()
while len(self._outgoing_packet_queue):
var next_outgoing_packet = _outgoing_packet_queue.pop_front()
var len_to_send = len(next_outgoing_packet)
sock.put_u8((len_to_send & 0x000000ff) >> 0)
sock.put_u8((len_to_send & 0x0000ff00) >> 8)
sock.put_u8((len_to_send & 0x00ff0000) >> 16)
sock.put_u8((len_to_send & 0xff000000) >> 24)
sock.put_data(next_outgoing_packet)
_state_lock.unlock()
OS.delay_usec(1)
return OK
func _client_thread_func(address):
@ -182,7 +177,13 @@ func _client_thread_func(address):
if connect_err == OK:
_set_state(KiriSocketState.CONNECTED)
_normal_communication_loop(sock, address)
while not _should_quit:
await _worker_thread_should_continue
var err = _normal_communication_loop_iteration(sock, address)
if err != OK:
break
# We are now disconnected.
_set_state(KiriSocketState.DISCONNECTED)
@ -191,10 +192,11 @@ func _client_thread_func(address):
else:
_set_state(KiriSocketState.ERROR, "Connection failed")
func _set_state(state : KiriSocketState, error_string=null):
_state_lock.lock()
_state = state
sock.close()
_worker_thread = false
func _set_state(state : KiriSocketState, error_string=null):
_state = state
if _state == KiriSocketState.ERROR:
assert(error_string)
_error_string = error_string
@ -202,12 +204,19 @@ func _set_state(state : KiriSocketState, error_string=null):
assert(not error_string)
_error_string = ""
_state_lock.unlock()
func _server_to_client_thread_func(connection : StreamPeerTCP, address):
print("_server_to_client_thread_func start")
_set_state(KiriSocketState.CONNECTED)
_normal_communication_loop(connection, address)
while not _should_quit:
await _worker_thread_should_continue
var err = _normal_communication_loop_iteration(connection, address)
if err != OK:
break
# FIXME: Missing some error handling here due to exception differences
# between Python and GDScript.
@ -217,6 +226,11 @@ func _server_to_client_thread_func(connection : StreamPeerTCP, address):
if get_state() == KiriSocketState.CONNECTED:
_set_state(KiriSocketState.DISCONNECTED)
connection.disconnect_from_host()
_worker_thread = false
print("_server_to_client_thread_func stop")
func _server_thread_func(address):
while not _should_quit:
@ -237,20 +251,37 @@ func _server_thread_func(address):
while not _should_quit:
await _worker_thread_should_continue
if sock.is_connection_available():
var connection : StreamPeerTCP = sock.take_connection()
var new_client : KiriPacketSocket = KiriPacketSocket.new()
new_client._start_client_connection_from_server(connection, address)
_state_lock.lock()
_new_connections_to_server.append(new_client)
_state_lock.unlock()
OS.delay_usec(1)
sock.stop()
sock = null
# Close all connections that were waiting to be accepted.
for c in _new_connections_to_server:
c.stop()
_new_connections_to_server = []
_worker_thread = false
func _start_client_connection_from_server(connection : StreamPeerTCP, address):
assert(not _worker_thread)
_worker_thread = Thread.new()
_worker_thread.start(_server_to_client_thread_func.bind(connection, address))
_worker_thread = true
# Coroutine call.
_server_to_client_thread_func(connection, address)
func _notification(what):
if what == NOTIFICATION_PREDELETE:
# Well, this is horrible.
if self:
if is_running():
stop()

View File

@ -0,0 +1,156 @@
#!/usr/bin/python3
import importlib.util
import sys
import argparse
import time
import json
import KiriPacketSocket
# This whole thing being in a try/except is just so we can catch
# errors and see them before the terminal window closes.
# try:
if True:
# Parse arguments
arg_parser = argparse.ArgumentParser(
prog="KiriPythonRPCWrapper",
description="Wrapper for Python modules to RPCs from Godot.",
epilog="")
arg_parser.add_argument("--script", type=str, required=True)
arg_parser.add_argument("--port", type=int, required=True)
args = arg_parser.parse_args()
# module_path = "../KiriPacketSocket/__init__.py"
# module_name = "KiriPacketSocket"
module_path = args.script
module_name = ""
# Attempt to load the module.
module_spec = importlib.util.spec_from_file_location(
module_name, module_path)
module = importlib.util.module_from_spec(module_spec)
module_spec.loader.exec_module(module)
# This will be all the functions we find in the module that don't
# start with "_".
known_entrypoints = {}
# Scan the module for "public" functions.
for entrypoint in dir(module):
# Skip anything starting with "_". Probably not meant to be
# exposed.
if entrypoint.startswith("_"):
continue
attr = getattr(module, entrypoint)
# if hasattr(attr, "__call__"):
if callable(attr):
known_entrypoints[entrypoint] = attr
# Connect to server.
packet_socket = KiriPacketSocket.PacketSocket()
packet_socket.start_client(("127.0.0.1", args.port))
while packet_socket.get_state() == packet_socket.SocketState.CONNECTING:
time.sleep(0.001)
if packet_socket.get_state() != packet_socket.SocketState.CONNECTED:
packet_socket.stop()
raise Exception("Failed to connect to RPC host.")
print("Starting packet processing.")
def send_error_response(code, message, request_id):
ret_dict = {
"jsonrpc" : "2.0",
"error" : {
"code" : code,
"message" : message
},
"id" : request_id
}
ret_dict_json = json.dumps(ret_dict)
packet_socket.send_packet(ret_dict_json.encode("utf-8"))
def send_response(result, request_id):
try:
ret_dict = {
"jsonrpc" : "2.0",
"result" : ret,
"id" : request_id
}
ret_dict_json = json.dumps(ret_dict)
packet_socket.send_packet(ret_dict_json.encode("utf-8"))
except Exception as e:
send_error_response(-32603, "Error sending result: " + str(e), request_id)
# Start processing packets.
while True:
# Shutdown when we lose connection to host.
if packet_socket.get_state() != packet_socket.SocketState.CONNECTED:
packet_socket.stop()
raise Exception("Disconnected from RPC host.")
next_packet = packet_socket.get_next_packet()
while next_packet:
this_packet = next_packet
next_packet = packet_socket.get_next_packet()
print("GOT PACKET: ", this_packet)
# FIXME: Handle batches.
# Parse the incoming dict.
try:
request_dict_json = this_packet.decode("utf-8")
request_dict = json.loads(request_dict_json)
except Exception as e:
send_error_response(-32700, "Error parsing packet: " + str(e), request_id)
continue
# Make sure all the fields are there.
try:
method = request_dict["method"]
func_args = request_dict["params"]
request_id = request_dict["id"]
except Exception as e:
send_error_response(-32602, "Missing field: " + str(e), request_id)
continue
# Make sure the method is something we scanned earlier.
try:
func = known_entrypoints[method]
except Exception as e:
send_error_response(-32601, "Method not found: " + str(e), request_id)
continue
# Call the dang function.
try:
ret = func(*func_args)
except Exception as e:
send_error_response(-32603, "Call failed: " + str(e), request_id)
continue
send_response(ret, request_id)
time.sleep(0.0001)
# except Exception as e:
# sys.stderr.write(e)
# time.sleep(5)
# raise e

View File

@ -11,3 +11,4 @@ def func_to_call(asdf):
def other_func_to_call():
print("jksdmckjsdncjksncs")

View File

@ -0,0 +1,313 @@
extends RefCounted
class_name KiriPythonWrapperInstance
enum KiriPythonWrapperStatus {
STATUS_RUNNING,
STATUS_STOPPED
}
class KiriPythonWrapperActiveRequest:
enum KiriPythonWrapperActiveRequestState {
STATE_WAITING_TO_SEND,
STATE_SENT,
STATE_RESPONSE_RECEIVED
}
var id : int
var method_name : String
var arguments : Variant # Dictionary or Array
var callback # Callable or null
var state : KiriPythonWrapperActiveRequestState
var response # Return value from the call
var error_response = ""
var _active_request_queue = {}
var _request_counter = 0
var _server_packet_socket : KiriPacketSocket = null
var communication_packet_socket : KiriPacketSocket = null
var python_script_path : String = ""
var _build_wrangler : KiriPythonBuildWrangler = null
var _external_process_pid = -1
signal _rpc_async_response_received
func _init(python_file_path : String):
_build_wrangler = KiriPythonBuildWrangler.new()
python_script_path = python_file_path
func _get_python_executable():
return _build_wrangler.get_runtime_python_executable_system_path()
func _get_wrapper_script():
# FIXME: Paths will be different for builds.
var script_path = self.get_script().get_path()
var script_dirname = script_path.get_base_dir()
return ProjectSettings.globalize_path( \
script_dirname + "/KiriPythonRPCWrapper_start.py")
func _get_wrapper_cache_path() -> String:
return _build_wrangler._get_cache_path_godot().path_join("packaged_scripts")
func _get_wrapper_script_cache_path() -> String:
return _get_wrapper_cache_path().path_join("addons/KiriPythonRPCWrapper/KiriPythonRPCWrapper/__init__.py")
func setup_python():
# Unpack base Python build.
_build_wrangler.unpack_python(false)
# Unpack Python wrapper.
var extra_scripts = _build_wrangler.get_extra_scripts_list()
print(extra_scripts)
for extra_script : String in extra_scripts:
# Chop off the "res://".
var extra_script_relative : String = extra_script.substr(len("res://"))
# Some other path wrangling.
var extraction_path : String = _get_wrapper_cache_path().path_join(extra_script_relative)
var extraction_path_dir : String = extraction_path.get_base_dir()
# Make the dir.
DirAccess.make_dir_recursive_absolute(extraction_path_dir)
# Extract the file.
var bytes : PackedByteArray = FileAccess.get_file_as_bytes(extra_script)
FileAccess.open(extraction_path, FileAccess.WRITE).store_buffer(bytes)
func get_status():
if _external_process_pid == -1:
return KiriPythonWrapperStatus.STATUS_STOPPED
if not OS.is_process_running(_external_process_pid):
return KiriPythonWrapperStatus.STATUS_STOPPED
return KiriPythonWrapperStatus.STATUS_RUNNING
func run_python_command(
args : PackedStringArray,
output : Array = [],
read_stderr : bool = false,
open_console : bool = false):
var python_exe_path : String = _get_python_executable()
# Do a little switcheroo on Linux to open a console.
if open_console:
if OS.get_name() == "Linux":
args = PackedStringArray(["-e", python_exe_path]) + args
python_exe_path = "xterm"
return OS.execute(python_exe_path, args, output, read_stderr, open_console)
func convert_cache_item_to_real_path(path : String):
var real_python_script_path = path
if real_python_script_path.begins_with("res://"):
real_python_script_path = _build_wrangler._get_script_cache_path_system().path_join(
real_python_script_path.substr(len("res://")))
else:
real_python_script_path = ProjectSettings.globalize_path(
real_python_script_path)
return real_python_script_path
func start_process(open_terminal : bool = false):
# FIXME: Make sure we don't have one running.
var open_port = 9500
var real_python_script_path = convert_cache_item_to_real_path(
python_script_path)
assert(not _server_packet_socket)
_server_packet_socket = KiriPacketSocket.new()
while true:
_server_packet_socket.start_server(["127.0.0.1", open_port])
# Wait for the server to start.
while _server_packet_socket.get_state() == KiriPacketSocket.KiriSocketState.SERVER_STARTING:
OS.delay_usec(1)
# If we're successfully listening, then we found a port to use and we
# don't need to loop anymore.
if _server_packet_socket.get_state() == KiriPacketSocket.KiriSocketState.SERVER_LISTENING:
break
# This port is busy. Try the next one.
_server_packet_socket.stop()
open_port += 1
var python_exe_path : String = _get_python_executable()
var wrapper_script_path : String = \
ProjectSettings.globalize_path(_get_wrapper_script_cache_path())
var startup_command : Array = [
python_exe_path,
wrapper_script_path,
"--script", real_python_script_path,
"--port", open_port]
if open_terminal:
if OS.get_name() == "Linux":
startup_command = ["xterm", "-e"] + startup_command
#print("startup command: ", startup_command)
_external_process_pid = OS.create_process(
startup_command[0], startup_command.slice(1),
open_terminal)
#print("external process: ", _external_process_pid)
func stop_process():
if _external_process_pid != -1:
OS.kill(_external_process_pid)
_external_process_pid = -1
# Clean up server and communication sockets.
if _server_packet_socket:
_server_packet_socket.stop()
_server_packet_socket = null
if communication_packet_socket:
communication_packet_socket.stop()
communication_packet_socket = null
func call_rpc_callback(method : String, args : Variant, callback = null) -> int:
assert((args is Dictionary) or (args is Array))
assert((callback == null) or (callback is Callable))
var new_request = KiriPythonWrapperActiveRequest.new()
new_request.id = _request_counter
_request_counter += 1
new_request.method_name = method
new_request.arguments = args
new_request.callback = callback
_active_request_queue[new_request.id] = new_request
return new_request.id
func call_rpc_async(method : String, args : Variant):
var request_id = call_rpc_callback(method, args, func(request_ob):
_rpc_async_response_received.emit(request_ob)
)
# Wait (block) until we get a response.
while true:
var rpc_response = await _rpc_async_response_received
if not rpc_response:
push_error("Error happened while waiting for RPC response in async call.")
break
if rpc_response.id == request_id:
return rpc_response.response
return null
func call_rpc_sync(method : String, args : Variant):
# Kinda hacky. We're using arrays because we can change the contents.
# Without the array or something else mutable we'd just end up with the
# internal pointer pointing to different values without affecting these
# ones.
var done_array = [false]
var response_list = []
var request_id = call_rpc_callback(method, args, func(request_ob):
done_array[0] = true
response_list.append(request_ob.response)
)
# Wait (block) until we get a response.
while not done_array[0]:
# Bail out if something happened to our instance or connection to it.
if communication_packet_socket:
if communication_packet_socket.is_disconnected_or_error():
push_error("Disconnected from RPC client while waiting for response.")
break
poll()
OS.delay_usec(1)
if len(response_list):
return response_list[0]
return null
func poll():
# Hand-off between listening socket and actual communications socket.
if _server_packet_socket:
communication_packet_socket = _server_packet_socket.get_next_server_connection()
if communication_packet_socket:
_server_packet_socket.stop()
_server_packet_socket = null
if communication_packet_socket:
if communication_packet_socket.is_disconnected_or_error():
# Tell any awaiting async calls that they're never getting an
# answer. So sad.
_rpc_async_response_received.emit(null)
stop_process()
push_error("poll(): Disconnected from RPC client.")
return
# Send all waiting requests
for request_id in _active_request_queue:
var request : KiriPythonWrapperActiveRequest = _active_request_queue[request_id]
if request.state == request.KiriPythonWrapperActiveRequestState.STATE_WAITING_TO_SEND:
var request_dict = {
"jsonrpc": "2.0",
"method": request.method_name,
"params": request.arguments,
"id": request.id
}
var request_dict_json = JSON.stringify(request_dict)
communication_packet_socket.send_packet(request_dict_json.to_utf8_buffer())
request.state = request.KiriPythonWrapperActiveRequestState.STATE_SENT
# Check for responses.
var packet = communication_packet_socket.get_next_packet()
while packet != null:
var packet_dict = JSON.parse_string(packet.get_string_from_utf8())
if packet_dict:
if packet_dict.has("id"):
var request_id = packet_dict["id"]
# floats aren't even allowed in JSON RPC as an id. Probably
# meant it to be an int.
if request_id is float:
request_id = int(request_id)
if _active_request_queue.has(request_id):
var request : KiriPythonWrapperActiveRequest = \
_active_request_queue[request_id]
if "result" in packet_dict:
request.response = packet_dict["result"]
elif "error" in packet_dict:
push_error(packet_dict["error"])
else:
request.error_response = "Couldn't find result on packet."
if request.callback:
request.callback.call(request)
# Clean up request.
_active_request_queue.erase(request_id)
packet = communication_packet_socket.get_next_packet()

View File

@ -0,0 +1,380 @@
# TARReader
#
# Read .tar.zip files. Interface mostly identical to ZIPReader.
#
# Why .tar.zip instead of .tar.bz2, .tar.gz, .tar.zst, or something normal?
# Godot supports loading files with GZip and Zstandard compression, but only
# files that it's saved (with a header/footer), so it can't load normal .tar.gz
# or .tar.zst files. It can load zips, though.
#tar
# DO NOT USE THIS ON UNTRUSTED DATA.
extends RefCounted
class_name KiriTARReader
#region Internal data
class TarFileRecord:
extends RefCounted
var filename : String
var offset : int
var file_size : int
# Unix file permissions.
#
# Technically this is an int, but we're just going to leave it as an octal
# string because that's what we can feed right into chmod.
var mode : String
# Symlinks.
var is_link : bool
var link_destination : String
var is_directory : bool
var type_indicator : String
var _internal_file_list = []
var _internal_file_list_indices = {} # Map filename -> index in _internal_file_list
var _reader : ZIPReader = null
var _tar_file_cache : PackedByteArray = []
var _tar_file_hash : PackedByteArray = []
func _load_record(record : TarFileRecord) -> PackedByteArray:
load_cache()
return _tar_file_cache.slice(record.offset, record.offset + record.file_size)
#endregion
#region Cache wrangling
# We have to load the entire .tar file into memory with the way the ZipReader
# API works, but we'll at least include an option to nuke the cache to free up
# memory if you want to just leave the file open.
#
# This lets us avoid re-opening and decompressing the entire .tar every time we
# need something out of it, while still letting us manually free the memory when
# we won't need it for a while.
func clear_cache():
_tar_file_cache = []
func load_cache() -> Error:
assert(_reader)
if len(_tar_file_cache):
# Cache already in-memory.
return OK
var zip_file_list = _reader.get_files()
if len(zip_file_list) != 1:
return ERR_FILE_UNRECOGNIZED
_tar_file_cache = _reader.read_file(zip_file_list[0])
return OK
#endregion
#region Number wrangling
func _octal_str_to_int(s : String) -> int:
var ret : int = 0;
var digit_multiplier = 1;
while len(s):
var lsb = s.substr(len(s) - 1, 1)
s = s.substr(0, len(s) - 1)
ret += digit_multiplier * lsb.to_int()
digit_multiplier *= 8
return ret
func _pad_to_512(x : int) -> int:
var x_lowbits = x & 511
var x_hibits = x & ~511
if x_lowbits:
x_hibits += 512
return x_hibits
#endregion
#region Public API
func close() -> Error:
_internal_file_list = []
_reader.close()
_reader = null
clear_cache()
return OK
func file_exists(path: String, case_sensitive: bool = true) -> bool:
for record : TarFileRecord in _internal_file_list:
if case_sensitive:
if record.filename == path:
return true
else:
if record.filename.nocasecmp_to(path) == 0:
return true
return false
func get_files() -> PackedStringArray:
var ret : PackedStringArray = []
for record : TarFileRecord in _internal_file_list:
ret.append(record.filename)
return ret
func get_tar_hash():
return _tar_file_hash.hex_encode()
func open(path: String) -> Error:
assert(not _reader)
_reader = ZIPReader.new()
var err = _reader.open(path)
if err != OK:
_reader.close()
_reader = null
return err
load_cache()
# Hash it.
print("Computing tar hash...")
var hashing : HashingContext = HashingContext.new()
hashing.start(HashingContext.HASH_SHA256)
hashing.update(_tar_file_cache)
_tar_file_hash = hashing.finish()
print("Done computing tar hash.")
var tar_file_offset = 0
var zero_filled_record_count = 0
var zero_filled_record : PackedByteArray = []
zero_filled_record.resize(512)
zero_filled_record.fill(0)
var paxheader_next_file = {}
var paxheader_global = {}
while tar_file_offset < len(_tar_file_cache):
var chunk = _tar_file_cache.slice(tar_file_offset, tar_file_offset + 512)
if chunk == zero_filled_record:
zero_filled_record_count += 1
if zero_filled_record_count >= 2:
break
tar_file_offset += 512
continue
var tar_record : TarFileRecord = TarFileRecord.new()
var tar_chunk_name = chunk.slice(0, 100)
var tar_chunk_size = chunk.slice(124, 124+12)
var tar_chunk_mode = chunk.slice(100, 100+8)
var tar_chunk_link_indicator = chunk.slice(156, 156+1)
var tar_chunk_link_file = chunk.slice(157, 157+100)
# FIXME: Technically "ustar\0" but we'll skip the \0
var tar_ustar_indicator = chunk.slice(257, 257+5)
var tar_ustar_file_prefix = chunk.slice(345, 345+155)
# Pluck out the relevant bits we need for the record.
tar_record.filename = tar_chunk_name.get_string_from_utf8()
tar_record.file_size = _octal_str_to_int(tar_chunk_size.get_string_from_utf8())
tar_record.mode = tar_chunk_mode.get_string_from_utf8()
tar_record.is_link = (tar_chunk_link_indicator[0] != 0 and tar_chunk_link_indicator.get_string_from_utf8()[0] == "2")
tar_record.link_destination = tar_chunk_link_file.get_string_from_utf8()
tar_record.is_directory = (tar_chunk_link_indicator[0] != 0 and tar_chunk_link_indicator.get_string_from_utf8()[0] == "5")
if tar_chunk_link_indicator[0] != 0:
tar_record.type_indicator = tar_chunk_link_indicator.get_string_from_utf8()
else:
tar_record.type_indicator = ""
# Append prefix if this is the "ustar" format.
# TODO: Test this.
if tar_ustar_indicator.get_string_from_utf8() == "ustar":
tar_record.filename = \
tar_ustar_file_prefix.get_string_from_utf8() + \
tar_record.filename
# TODO: Things we skipped:
# - owner id (108, 108+8)
# - group id (116, 116+8)
# - modification time (136, 136+12)
# - checksum (148, 148+8)
# - mosty related to USTAR format
# Skip header.
tar_file_offset += 512
# Record start offset.
tar_record.offset = tar_file_offset
# Skip file contents.
tar_file_offset += _pad_to_512(tar_record.file_size)
if tar_record.filename.get_file() == "@PaxHeader":
# This is a special file entry that just has some extended data
# about the next file or all the following files. It's not an actual
# file.
var paxheader_data : PackedByteArray = _tar_file_cache.slice(
tar_record.offset,
tar_record.offset + tar_record.file_size)
var paxheader_str : String = paxheader_data.get_string_from_utf8()
# FIXME: Do some error checking here.
var paxheader_lines = paxheader_str.split("\n", false)
for line in paxheader_lines:
var length_and_the_rest = line.split(" ")
var key_and_value = length_and_the_rest[1].split("=")
var key = key_and_value[0]
var value = key_and_value[1]
if tar_record.type_indicator == "x":
paxheader_next_file[key] = value
elif tar_record.type_indicator == "g":
paxheader_global[key] = value
else:
# Apply paxheader. We're just using "path" for now.
# See here for other available fields:
# https://pubs.opengroup.org/onlinepubs/009695399/utilities/pax.html
var merged_paxheader : Dictionary = paxheader_global.duplicate()
merged_paxheader.merge(paxheader_next_file, true)
paxheader_next_file = {}
if merged_paxheader.has("path"):
tar_record.filename = merged_paxheader["path"]
if merged_paxheader.has("linkpath"):
tar_record.link_destination = merged_paxheader["linkpath"]
# Add it to our record list.
_internal_file_list_indices[tar_record.filename] = len(_internal_file_list)
_internal_file_list.append(tar_record)
return OK
# Extract a file into memory as a PackedByteArray.
func read_file(path : String, case_sensitive : bool = true) -> PackedByteArray:
for record : TarFileRecord in _internal_file_list:
if case_sensitive:
if record.filename == path:
return _load_record(record)
else:
if record.filename.nocasecmp_to(path) == 0:
return _load_record(record)
return []
func _convert_permissions(tar_mode_str : String) -> FileAccess.UnixPermissionFlags:
# Okay so this turned out to be easier than I thought. Godot's
# UnixPermissionFlags line up with the actual permission bits in the tar.
return _octal_str_to_int(tar_mode_str)
# Extract a file to a specific path. Sets permissions when possible, handles
# symlinks and directories. Will extract to the dest_path plus the internal
# relative path.
#
# Example:
# dest_path: "foo/bar", filename: "butts/whatever/thingy.txt"
# extracts to: "foo/bar/butts/whatever/thingy.txt"
func unpack_file(dest_path : String, filename : String, force_overwrite : bool = false):
var full_dest_path : String = dest_path.path_join(filename)
DirAccess.make_dir_recursive_absolute(full_dest_path.get_base_dir())
assert(_internal_file_list_indices.has(filename))
var record : TarFileRecord = _internal_file_list[_internal_file_list_indices[filename]]
# FIXME: There are probably a million other ways to do directory
# traversal attacks than just what we've checked for here.
if record.filename.is_absolute_path():
assert(false)
return
if record.filename.simplify_path().begins_with(".."):
assert(false)
return
var need_file_made : bool = true
var need_permission_update : bool = true
var exists_in_some_way : bool = FileAccess.file_exists(full_dest_path) || DirAccess.dir_exists_absolute(full_dest_path)
# Check to see if we need to make the dir/file/etc.
if force_overwrite == false:
if exists_in_some_way:
# Link exists. Don't overwrite.
if record.is_link:
#print("Skip (link exist): ", full_dest_path)
# FIXME: Check symlink destination?
need_file_made = false
if record.is_directory:
#print("Skip (dir exist): ", full_dest_path)
need_file_made = false
# If the file is there and it's a complete file, then we're probably
# done. We can't check or set mtime through Godot's API, though.
var f : FileAccess = FileAccess.open(full_dest_path, FileAccess.READ)
if f.get_length() == record.file_size:
#print("Skip (file exist): ", full_dest_path)
need_file_made = false
f.close()
if not record.is_link and OS.get_name() != "Windows":
if FileAccess.file_exists(full_dest_path) || DirAccess.dir_exists_absolute(full_dest_path):
var existing_permissions : FileAccess.UnixPermissionFlags = FileAccess.get_unix_permissions(full_dest_path)
var wanted_permissions : FileAccess.UnixPermissionFlags = _convert_permissions(record.mode)
if existing_permissions == wanted_permissions:
need_permission_update = false
#print("Permission are fine: ", record.mode, " ", existing_permissions, " ", full_dest_path)
else:
print("Permission update needed on existing file: ", record.mode, " ", existing_permissions, " ", full_dest_path)
if record.is_link:
# Okay, look. I know that symbolic links technically exist on
# Windows, but they're messy and hardly ever used. FIXME later
# if for some reason you need to support that. -Kiri
assert(OS.get_name() != "Windows")
# Fire off a command to make a symbolic link on *normal* OSes.
var err = OS.execute("ln", [
"-s",
record.link_destination,
ProjectSettings.globalize_path(full_dest_path)
])
assert(err != -1)
elif record.is_directory:
# It's just a directory. Make it.
DirAccess.make_dir_recursive_absolute(full_dest_path)
else:
# Okay this is an actual file. Extract it.
var file_data : PackedByteArray = read_file(record.filename)
var out_file = FileAccess.open(full_dest_path, FileAccess.WRITE)
out_file.store_buffer(file_data)
out_file.close()
# Set permissions (on normal OSes, not Windows). I don't think this
# applies to symlinks, though.
if not record.is_link:
if need_permission_update:
if OS.get_name() != "Windows":
var err : Error = FileAccess.set_unix_permissions(
full_dest_path, _convert_permissions(record.mode))
assert(err != -1)
#endregion

View File

@ -0,0 +1,4 @@
TODO
- How to use
- How to package Python stuff

View File

@ -0,0 +1,69 @@
# What
This is where we store standalone Python builds for distributing with the built application. These will get unpacked into a temp directory on desktop platforms so that we can have an extremely specific, isolated Python environment.
Standalone Python builds from:
https://github.com/indygreg/python-build-standalone/releases
# .tar.zst vs .tar.zip vs .tar.bz2 vs tar.gz, etc
We're going to unpack these at runtime so they need to exist in a way that Godot can load. It's fairly simple for us to write our own .tar format parser, but for the compression format (zst, zip, gz, bz2, etc) it's better to rely on the engine's built-in decompression code.
Of these formats, the only one that can be read by Godot (without Godot-specific headers being attached by saving the file from Godot) is the .zip format. Unfortunately, .zip format doesn't include a lot of the file permissions that the original .tar.whatever archive includes.
So we're splitting the difference in an slightly unusual way: Use .zip as the compression around the .tar file instead of bzip2, gzip, zstd, it whatever else, then write a .tar parser to load the internal .tar at runtime. What we get from this is a slightly worse compression format that Godot can actually read at runtime, which preserves file permissions and other attributes the way a .tar would.
For format reference on .tar:
https://www.gnu.org/software/tar/manual/html_node/Standard.html
https://www.ibm.com/docs/en/zos/2.4.0?topic=formats-tar-format-tar-archives
# The Process
## Automated way
Run `update.bsh`.
## Obsolete, manual way
### 1. Grab latest archives
To update the archives here, grab the latest archive from:
https://github.com/indygreg/python-build-standalone/releases
There's a huge list of files there, so here's how to determine the latest version for this project:
cpython-PYTHONVERSION+RELEASE-ARCHITECTURE-PLATFORM-OPTIMIZATIONS.tar.zst
- PYTHONVERSION: The Python version. Unless there's a good reason to, you probably want the latest version of this.
- RELEASE: Should correspond to the latest release. Formatted as a date (YYYYMMDD).
- ARCHITECTURE: CPU architecture. This is going to be funky for Macs, but for desktop Linux/Windows PCs we usually just want `x86_64`. `x86_64_v2` and up include instructions found in newer and newer architectures. This may change if we start supporting stuff like Linux on ARM or whatever.
- PLATFORM:
- For Windows we want `windows-msvc-shared`.
- For Linux we want `unknown-linux-gnu`.
- OPTIMIZATIONS: `pgo+lto` for fully optimized builds.
Examples:
- Linux Python 3.12.3, release 20240415: `cpython-3.12.3+20240415-x86_64-unknown-linux-gnu-pgo+lto-full.tar.zst`
- Windows Python 3.12.3, release 20240415: `cpython-3.12.3+20240415-x86_64-pc-windows-msvc-shared-pgo-full.tar.zst`
See here for more info:
https://gregoryszorc.com/docs/python-build-standalone/20240415/running.html
### 2. Stick them in this directory
### 3. Run the conversion script
Run `./convert_zsts.bsh` in this directory.
### 4. Add them to git

View File

@ -0,0 +1,21 @@
#!/bin/bash
# Why do we do this? Because Godot can read zip files but not zst
# files. But we still want to preserve the file attributes in the .tar
# archive.
cd "$(dirname "$0")"
# Decompress zsts...
for ZSTFILE in *.tar.zst; do
if [ \! -e "$(basename ${ZSTFILE} .zst)" ]; then
zstd -d "${ZSTFILE}"
fi
done
# Recompress zips...
for TARFILE in *.tar; do
zip "${TARFILE}.zip" "${TARFILE}"
done

View File

@ -0,0 +1,75 @@
#!/bin/bash
# I am writing this with an ocular migraine and it's hard as shit to
# real the goddamn code, so please excuse any obvious mistakes. I
# literally cannot see them right now.
PYTHON_VERSIONS="3.12.3"
# TODO: Add more to this list if we want to support more platforms.
PYTHON_PLATFORM_CONFIGS="x86_64-pc-windows-msvc-shared-pgo-full x86_64-unknown-linux-gnu-pgo+lto-full"
set -e
cd "$(dirname "$0")"
# wget \
# https://raw.githubusercontent.com/indygreg/python-build-standalone/latest-release/latest-release.json \
# -o latest-release.json
RELEASE_PARTS=$(curl https://raw.githubusercontent.com/indygreg/python-build-standalone/latest-release/latest-release.json | \
python3 -c "import json; import sys; d = json.loads(sys.stdin.read()); print(d[\"tag\"]); print(d[\"asset_url_prefix\"]);")
RELEASE_TAG="$(echo $RELEASE_PARTS | cut -d" " -f 1)"
RELEASE_BASE_URL="$(echo $RELEASE_PARTS | cut -d" " -f 2)"
echo $RELEASE_TAG
echo $RELEASE_BASE_URL
echo "Fetching new files from release..."
for PYTHON_VERSION in $PYTHON_VERSIONS; do
for CONFIG in $PYTHON_PLATFORM_CONFIGS; do
FILENAME="cpython-${PYTHON_VERSION}+${RELEASE_TAG}-$CONFIG.tar.zst"
if [ \! -e "${FILENAME}" ] ; then
wget "${RELEASE_BASE_URL}/${FILENAME}"
fi
done
done
echo "Decompressing zsts..."
# Decompress zsts...
for ZSTFILE in *.tar.zst; do
if [ \! -e "$(basename ${ZSTFILE} .zst)" ]; then
zstd -d "${ZSTFILE}"
fi
done
echo "Compressing zips..."
# Recompress zips...
for TARFILE in *.tar; do
if [ \! -e "${TARFILE}.zip" ]; then
zip "${TARFILE}.zip" "${TARFILE}"
fi
done
# Write version data.
# FIXME: Extremely dirty and hacky.
echo "{ \"release\" : \"${RELEASE_TAG}\", \"versions\" : [" > python_release_info.json
FIRST="1"
for PYTHON_VERSION in $PYTHON_VERSIONS; do
if [ "$FIRST" == "0" ]; then
echo "," >> python_release_info.json
fi
FIRST=0
echo "\"${PYTHON_VERSION}\"" >> python_release_info.json
done
echo "]" >> python_release_info.json
echo "}" >> python_release_info.json

View File

@ -0,0 +1,26 @@
Done:
x Handle bundling of the actual Python modules we want to use.
x Remove dependency on psutil.
x Clean up removal of psutil.
x remove parent_pid from wrapper script
x remove KiriPythonRPCWrapper_start.py
x remove test_rpc.py
x Un-thread the GDScript side of PacketSocket.
x Fix whatever this is: <stuff was here>
x example Python module from OUTSIDE the addon
x Remove xterm dependency, or make it like a debug-only thing.
x Test on WINE/Windows.
x First-time setup of requirements (pip, etc).
x Deal with interrupted setup operations
x We check for the python.exe file in a given setup location to see if
we need to unpack stuff, but what if that exists but the setup was
interrupted and we're missing files?
x Deal with bad state after interrupted unpacking operation
The big ones:
- Add some kind of progress bar, or API for progress tracking, for the unpacking.
- Progress bar or API for progress tracking for pip installs.
- Maybe we should parse the pip requirements.txt and also set up an API for calling pip install.
- Documentation.
- how to use .kiri_export_python

View File

@ -1,11 +0,0 @@
@tool
extends EditorPlugin
func _enter_tree():
# Initialization of the plugin goes here.
pass
func _exit_tree():
# Clean-up of the plugin goes here.
pass

View File

@ -1,154 +0,0 @@
#!/usr/bin/python3
import importlib.util
import sys
import argparse
import time
import psutil
import json
import KiriPacketSocket
# Parse arguments
arg_parser = argparse.ArgumentParser(
prog="KiriPythonRPCWrapper",
description="Wrapper for Python modules to RPCs from Godot.",
epilog="")
arg_parser.add_argument("--script", type=str, required=True)
arg_parser.add_argument("--port", type=int, required=True)
arg_parser.add_argument("--parent_pid", type=int, required=True)
args = arg_parser.parse_args()
# module_path = "../KiriPacketSocket/__init__.py"
# module_name = "KiriPacketSocket"
module_path = args.script
module_name = ""
# Attempt to load the module.
module_spec = importlib.util.spec_from_file_location(
module_name, module_path)
module = importlib.util.module_from_spec(module_spec)
module_spec.loader.exec_module(module)
# This will be all the functions we find in the module that don't
# start with "_".
known_entrypoints = {}
# Scan the module for "public" functions.
for entrypoint in dir(module):
# Skip anything starting with "_". Probably not meant to be
# exposed.
if entrypoint.startswith("_"):
continue
attr = getattr(module, entrypoint)
# if hasattr(attr, "__call__"):
if callable(attr):
known_entrypoints[entrypoint] = attr
# Connect to server.
packet_socket = KiriPacketSocket.PacketSocket()
packet_socket.start_client(("127.0.0.1", args.port))
while packet_socket.get_state() == packet_socket.SocketState.CONNECTING:
time.sleep(0.001)
if packet_socket.get_state() != packet_socket.SocketState.CONNECTED:
packet_socket.stop()
raise Exception("Failed to connect to RPC host.")
print("Starting packet processing.")
def send_error_response(code, message, request_id):
ret_dict = {
"jsonrpc" : "2.0",
"error" : {
"code" : code,
"message" : message
},
"id" : request_id
}
ret_dict_json = json.dumps(ret_dict)
packet_socket.send_packet(ret_dict_json.encode("utf-8"))
def send_response(result, request_id):
try:
ret_dict = {
"jsonrpc" : "2.0",
"result" : ret,
"id" : request_id
}
ret_dict_json = json.dumps(ret_dict)
packet_socket.send_packet(ret_dict_json.encode("utf-8"))
except Exception as e:
send_error_response(-32603, "Error sending result: " + str(e), request_id)
# Start processing packets.
while True:
# Shutdown when we lose connection to host.
if packet_socket.get_state() != packet_socket.SocketState.CONNECTED:
packet_socket.stop()
raise Exception("Disconnected from RPC host.")
# Watch parent PID so we can clean up when needed.
if not psutil.pid_exists(args.parent_pid):
packet_socket.stop()
raise Exception("RPC host process died")
next_packet = packet_socket.get_next_packet()
while next_packet:
this_packet = next_packet
next_packet = packet_socket.get_next_packet()
print("GOT PACKET: ", this_packet)
# FIXME: Handle batches.
# Parse the incoming dict.
try:
request_dict_json = this_packet.decode("utf-8")
request_dict = json.loads(request_dict_json)
except Exception as e:
send_error_response(-32700, "Error parsing packet: " + str(e), request_id)
continue
# Make sure all the fields are there.
try:
method = request_dict["method"]
func_args = request_dict["params"]
request_id = request_dict["id"]
except Exception as e:
send_error_response(-32602, "Missing field: " + str(e), request_id)
continue
# Make sure the method is something we scanned earlier.
try:
func = known_entrypoints[method]
except Exception as e:
send_error_response(-32601, "Method not found: " + str(e), request_id)
continue
# Call the dang function.
try:
ret = func(*func_args)
except Exception as e:
send_error_response(-32603, "Call failed: " + str(e), request_id)
continue
send_response(ret, request_id)
time.sleep(0.0001)

View File

@ -1,10 +0,0 @@
#!/usr/bin/python3
import time
try:
import KiriPythonRPCWrapper
except Exception as e:
print(e)
time.sleep(5)

View File

@ -1,219 +0,0 @@
extends RefCounted
class_name KiriPythonWrapperInstance
var external_process_pid = -1
var server_packet_socket : KiriPacketSocket = null
var communication_packet_socket : KiriPacketSocket = null
var python_script_path : String = ""
enum KiriPythonWrapperStatus {
STATUS_RUNNING,
STATUS_STOPPED
}
func _init(python_file_path : String):
python_script_path = python_file_path
func _get_python_executable():
# FIXME: Adjust per-OS. Maybe test a few locations.
return "/usr/bin/python3"
func _get_wrapper_script():
# FIXME: Paths will be different for builds.
var script_path = self.get_script().get_path()
var script_dirname = script_path.get_base_dir()
return ProjectSettings.globalize_path( \
script_dirname + "/KiriPythonRPCWrapper_start.py")
func get_status():
if external_process_pid == -1:
return KiriPythonWrapperStatus.STATUS_STOPPED
if not OS.is_process_running(external_process_pid):
return KiriPythonWrapperStatus.STATUS_STOPPED
return KiriPythonWrapperStatus.STATUS_RUNNING
func start_process():
# FIXME: Make sure we don't have one running.
var open_port = 9500
assert(not server_packet_socket)
server_packet_socket = KiriPacketSocket.new()
while true:
server_packet_socket.start_server(["127.0.0.1", open_port])
# Wait for the server to start.
while server_packet_socket.get_state() == KiriPacketSocket.KiriSocketState.SERVER_STARTING:
OS.delay_usec(1)
# If we're successfully listening, then we found a port to use and we
# don't need to loop anymore.
if server_packet_socket.get_state() == KiriPacketSocket.KiriSocketState.SERVER_LISTENING:
break
# This port is busy. Try the next one.
server_packet_socket.stop()
open_port += 1
print("Port: ", open_port)
var python_exe_path : String = _get_python_executable()
var wrapper_script_path : String = _get_wrapper_script()
var startup_command : Array = [
"xterm", "-e",
python_exe_path,
wrapper_script_path,
"--script", python_script_path,
"--port", open_port,
"--parent_pid", OS.get_process_id()]
print("startup command: ", startup_command)
external_process_pid = OS.create_process(
startup_command[0], startup_command.slice(1), true)
print("external process: ", external_process_pid)
func stop_process():
if external_process_pid != -1:
OS.kill(external_process_pid)
external_process_pid = -1
# Clean up server and communication sockets.
if server_packet_socket:
server_packet_socket.stop()
server_packet_socket = null
if communication_packet_socket:
communication_packet_socket.stop()
communication_packet_socket = null
class KiriPythonWrapperActiveRequest:
enum KiriPythonWrapperActiveRequestState {
STATE_WAITING_TO_SEND,
STATE_SENT,
STATE_RESPONSE_RECEIVED
}
var id : int
var method_name : String
var arguments : Variant # Dictionary or Array
var callback # Callable or null
var state : KiriPythonWrapperActiveRequestState
var response # Return value from the call
var error_response = ""
var _active_request_queue = {}
var _request_counter = 0
func call_rpc_async(method : String, args : Variant, callback = null) -> int:
assert((args is Dictionary) or (args is Array))
assert((callback == null) or (callback is Callable))
var new_request = KiriPythonWrapperActiveRequest.new()
new_request.id = _request_counter
_request_counter += 1
new_request.method_name = method
new_request.arguments = args
new_request.callback = callback
_active_request_queue[new_request.id] = new_request
return new_request.id
func call_rpc_sync(method : String, args : Variant):
# Kinda hacky. We're using arrays because we can change the contents.
# Without the array or something else mutable we'd just end up with the
# internal pointer pointing to different values without affecting these
# ones.
var done_array = [false]
var response_list = []
var request_id = call_rpc_async(method, args, func(request_ob):
done_array[0] = true
response_list.append(request_ob.response)
)
# Wait (block) until we get a response.
while not done_array[0]:
# Bail out if something happened to our instance or connection to it.
if communication_packet_socket:
if communication_packet_socket.is_disconnected_or_error():
break
poll()
OS.delay_usec(1)
if len(response_list):
return response_list[0]
return null
func poll():
# Hand-off between listening socket and actual communications socket.
if server_packet_socket:
communication_packet_socket = server_packet_socket.get_next_server_connection()
if communication_packet_socket:
server_packet_socket.stop()
server_packet_socket = null
if communication_packet_socket:
# Send all waiting requests
for request_id in _active_request_queue:
var request : KiriPythonWrapperActiveRequest = _active_request_queue[request_id]
if request.state == request.KiriPythonWrapperActiveRequestState.STATE_WAITING_TO_SEND:
var request_dict = {
"jsonrpc": "2.0",
"method": request.method_name,
"params": request.arguments,
"id": request.id
}
var request_dict_json = JSON.stringify(request_dict)
communication_packet_socket.send_packet(request_dict_json.to_utf8_buffer())
request.state = request.KiriPythonWrapperActiveRequestState.STATE_SENT
# Check for responses.
var packet = communication_packet_socket.get_next_packet()
while packet != null:
var packet_dict = JSON.parse_string(packet.get_string_from_utf8())
if packet_dict:
if packet_dict.has("id"):
var request_id = packet_dict["id"]
# floats aren't even allowed in JSON RPC as an id. Probably
# meant it to be an int.
if request_id is float:
request_id = int(request_id)
if _active_request_queue.has(request_id):
var request : KiriPythonWrapperActiveRequest = \
_active_request_queue[request_id]
if "result" in packet_dict:
request.response = packet_dict["result"]
else:
request.error_response = "Couldn't find result on packet."
if request.callback:
request.callback.call(request)
# Clean up request.
_active_request_queue.erase(request_id)
packet = communication_packet_socket.get_next_packet()

View File

@ -1,28 +0,0 @@
#!/usr/bin/python3
import time
import KiriPacketSocket
ps = KiriPacketSocket.PacketSocket()
ps.start_server(("127.0.0.1", 9506))
connections = []
while True:
psc = ps.get_next_server_connection()
if psc:
print("Got connection.")
connections.append(psc)
psc.send_packet(b'ABCDEFGHIJ')
for conn in connections:
p = conn.get_next_packet()
while p:
print(p)
conn.send_packet(p + b'1')
p = conn.get_next_packet()
time.sleep(0.0001)

102
export_presets.cfg Normal file
View File

@ -0,0 +1,102 @@
[preset.0]
name="Linux/X11"
platform="Linux/X11"
runnable=true
dedicated_server=false
custom_features=""
export_filter="all_resources"
include_filter=""
exclude_filter=""
export_path="./GodotJSONRPCTest.x86_64"
encryption_include_filters=""
encryption_exclude_filters=""
encrypt_pck=false
encrypt_directory=false
[preset.0.options]
custom_template/debug=""
custom_template/release=""
debug/export_console_wrapper=1
binary_format/embed_pck=false
texture_format/bptc=true
texture_format/s3tc=true
texture_format/etc=false
texture_format/etc2=false
binary_format/architecture="x86_64"
ssh_remote_deploy/enabled=false
ssh_remote_deploy/host="user@host_ip"
ssh_remote_deploy/port="22"
ssh_remote_deploy/extra_args_ssh=""
ssh_remote_deploy/extra_args_scp=""
ssh_remote_deploy/run_script="#!/usr/bin/env bash
export DISPLAY=:0
unzip -o -q \"{temp_dir}/{archive_name}\" -d \"{temp_dir}\"
\"{temp_dir}/{exe_name}\" {cmd_args}"
ssh_remote_deploy/cleanup_script="#!/usr/bin/env bash
kill $(pgrep -x -f \"{temp_dir}/{exe_name} {cmd_args}\")
rm -rf \"{temp_dir}\""
[preset.1]
name="Windows Desktop"
platform="Windows Desktop"
runnable=true
dedicated_server=false
custom_features=""
export_filter="all_resources"
include_filter=""
exclude_filter=""
export_path="./GodotJSONRPCTest.exe"
encryption_include_filters=""
encryption_exclude_filters=""
encrypt_pck=false
encrypt_directory=false
[preset.1.options]
custom_template/debug=""
custom_template/release=""
debug/export_console_wrapper=1
binary_format/embed_pck=false
texture_format/bptc=true
texture_format/s3tc=true
texture_format/etc=false
texture_format/etc2=false
binary_format/architecture="x86_64"
codesign/enable=false
codesign/timestamp=true
codesign/timestamp_server_url=""
codesign/digest_algorithm=1
codesign/description=""
codesign/custom_options=PackedStringArray()
application/modify_resources=true
application/icon=""
application/console_wrapper_icon=""
application/icon_interpolation=4
application/file_version=""
application/product_version=""
application/company_name=""
application/product_name=""
application/file_description=""
application/copyright=""
application/trademarks=""
application/export_angle=0
ssh_remote_deploy/enabled=false
ssh_remote_deploy/host="user@host_ip"
ssh_remote_deploy/port="22"
ssh_remote_deploy/extra_args_ssh=""
ssh_remote_deploy/extra_args_scp=""
ssh_remote_deploy/run_script="Expand-Archive -LiteralPath '{temp_dir}\\{archive_name}' -DestinationPath '{temp_dir}'
$action = New-ScheduledTaskAction -Execute '{temp_dir}\\{exe_name}' -Argument '{cmd_args}'
$trigger = New-ScheduledTaskTrigger -Once -At 00:00
$settings = New-ScheduledTaskSettingsSet
$task = New-ScheduledTask -Action $action -Trigger $trigger -Settings $settings
Register-ScheduledTask godot_remote_debug -InputObject $task -Force:$true
Start-ScheduledTask -TaskName godot_remote_debug
while (Get-ScheduledTask -TaskName godot_remote_debug | ? State -eq running) { Start-Sleep -Milliseconds 100 }
Unregister-ScheduledTask -TaskName godot_remote_debug -Confirm:$false -ErrorAction:SilentlyContinue"
ssh_remote_deploy/cleanup_script="Stop-ScheduledTask -TaskName godot_remote_debug -ErrorAction:SilentlyContinue
Unregister-ScheduledTask -TaskName godot_remote_debug -Confirm:$false -ErrorAction:SilentlyContinue
Remove-Item -Recurse -Force '{temp_dir}'"

View File

@ -11,6 +11,7 @@ config_version=5
[application]
config/name="GodotJSONRPCTest"
run/main_scene="res://TarTest.tscn"
config/features=PackedStringArray("4.2", "GL Compatibility")
run/max_fps=60
config/icon="res://icon.svg"
@ -21,7 +22,7 @@ window/vsync/vsync_mode=0
[editor_plugins]
enabled=PackedStringArray("res://addons/kiripythonrpcwrapper/plugin.cfg")
enabled=PackedStringArray("res://addons/KiriPythonRPCWrapper/plugin.cfg")
[rendering]