mirror of
https://github.com/FULU-Foundation/OrcaSlicer-bambulab.git
synced 2026-05-14 00:42:32 -07:00
Initial release
This commit is contained in:
30
scripts/.dockerignore
Normal file
30
scripts/.dockerignore
Normal file
@@ -0,0 +1,30 @@
|
||||
Build
|
||||
Build.bat
|
||||
/build/
|
||||
deps/build
|
||||
MYMETA.json
|
||||
MYMETA.yml
|
||||
_build
|
||||
blib
|
||||
xs/buildtmp
|
||||
*.o
|
||||
*.log
|
||||
MANIFEST.bak
|
||||
xs/MANIFEST.bak
|
||||
xs/assertlib*
|
||||
.init_bundle.ini
|
||||
.vs/*
|
||||
local-lib
|
||||
/src/TAGS
|
||||
/.vscode/
|
||||
build-linux/*
|
||||
deps/build-linux/*
|
||||
**/.DS_Store
|
||||
install_*
|
||||
build_*/
|
||||
!build_linux.sh
|
||||
SVG
|
||||
scripts/Dockerfile
|
||||
scripts/DockerBuild.sh
|
||||
scripts/DockerRun.sh
|
||||
scripts/.dockerignore
|
||||
17
scripts/DockerBuild.sh
Executable file
17
scripts/DockerBuild.sh
Executable file
@@ -0,0 +1,17 @@
|
||||
#!/bin/bash
|
||||
SCRIPT_DIR=$(cd -P -- "$(dirname -- "$0")" && printf '%s\n' "$(pwd -P)")
|
||||
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
||||
|
||||
set -x
|
||||
# Wishlist hint: For developers, creating a Docker Compose
|
||||
# setup with persistent volumes for the build & deps directories
|
||||
# would speed up recompile times significantly. For end users,
|
||||
# the simplicity of a single Docker image and a one-time compilation
|
||||
# seems better.
|
||||
docker build -t orcaslicer \
|
||||
--build-arg USER="$USER" \
|
||||
--build-arg UID="$(id -u)" \
|
||||
--build-arg GID="$(id -g)" \
|
||||
--build-arg NCORES="$NCORES" \
|
||||
-f "$SCRIPT_DIR/Dockerfile" \
|
||||
"$PROJECT_ROOT"
|
||||
28
scripts/DockerRun.sh
Executable file
28
scripts/DockerRun.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/bin/bash
|
||||
set -x
|
||||
# Just in case, here's some other things that might help:
|
||||
# Force the container's hostname to be the same as your workstation
|
||||
# -h $HOSTNAME \
|
||||
# If there's problems with the X display, try this
|
||||
# -v /tmp/.X11-unix:/tmp/.X11-unix \
|
||||
# If you get an error like "Authorization required, but no authorization protocol specified," run line 9 in your terminal before rerunning this program
|
||||
# xhost +local:docker
|
||||
docker run \
|
||||
`# Use the hosts networking. Printer wifi and also dbus communication` \
|
||||
--net=host \
|
||||
`# Some X installs will not have permissions to talk to sockets for shared memory` \
|
||||
--ipc host \
|
||||
`# Run as your workstations username to keep permissions the same` \
|
||||
-u "$USER" \
|
||||
`# Bind mount your home directory into the container for loading/saving files` \
|
||||
-v "$HOME:/home/$USER" \
|
||||
`# Pass the X display number to the container` \
|
||||
-e DISPLAY="$DISPLAY" \
|
||||
`# It seems that libGL and dbus things need privileged mode` \
|
||||
--privileged=true \
|
||||
`# Attach tty for running orca slicer with command line things` \
|
||||
-ti \
|
||||
`# Clean up after yourself` \
|
||||
--rm \
|
||||
`# Pass all parameters from this script to the orca slicer ENTRYPOINT binary` \
|
||||
orcaslicer "$@"
|
||||
100
scripts/Dockerfile
Normal file
100
scripts/Dockerfile
Normal file
@@ -0,0 +1,100 @@
|
||||
FROM docker.io/ubuntu:24.04
|
||||
LABEL maintainer "DeftDawg <DeftDawg@gmail.com>"
|
||||
|
||||
# Disable interactive package configuration
|
||||
RUN apt-get update && \
|
||||
echo 'debconf debconf/frontend select Noninteractive' | debconf-set-selections
|
||||
|
||||
# Add a deb-src
|
||||
RUN echo deb-src http://archive.ubuntu.com/ubuntu \
|
||||
$(cat /etc/*release | grep VERSION_CODENAME | cut -d= -f2) main universe>> /etc/apt/sources.list
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
autoconf \
|
||||
build-essential \
|
||||
cmake \
|
||||
curl \
|
||||
eglexternalplatform-dev \
|
||||
extra-cmake-modules \
|
||||
file \
|
||||
git \
|
||||
gstreamer1.0-plugins-bad \
|
||||
gstreamer1.0-libav \
|
||||
libcairo2-dev \
|
||||
libcurl4-openssl-dev \
|
||||
libdbus-1-dev \
|
||||
libglew-dev \
|
||||
libglu1-mesa-dev \
|
||||
libglu1-mesa-dev \
|
||||
libgstreamer1.0-dev \
|
||||
libgstreamerd-3-dev \
|
||||
libgstreamer-plugins-base1.0-dev \
|
||||
libgstreamer-plugins-good1.0-dev \
|
||||
libgtk-3-dev \
|
||||
libgtk-3-dev \
|
||||
libsecret-1-dev \
|
||||
libsoup2.4-dev \
|
||||
libssl3 \
|
||||
libssl-dev \
|
||||
libtool \
|
||||
libudev-dev \
|
||||
libwayland-dev \
|
||||
libwebkit2gtk-4.1-dev \
|
||||
libxkbcommon-dev \
|
||||
locales \
|
||||
locales-all \
|
||||
m4 \
|
||||
pkgconf \
|
||||
sudo \
|
||||
wayland-protocols \
|
||||
wget
|
||||
|
||||
# Change your locale here if you want. See the output
|
||||
# of `locale -a` to pick the correct string formatting.
|
||||
ENV LC_ALL=en_US.utf8
|
||||
RUN locale-gen $LC_ALL
|
||||
|
||||
# Set this so that Orca Slicer doesn't complain about
|
||||
# the CA cert path on every startup
|
||||
ENV SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt
|
||||
|
||||
COPY ./ OrcaSlicer
|
||||
|
||||
WORKDIR OrcaSlicer
|
||||
|
||||
# These can run together, but we run them separate for podman caching
|
||||
# Update System dependencies
|
||||
RUN ./build_linux.sh -u
|
||||
|
||||
# Build dependencies in ./deps
|
||||
RUN ./build_linux.sh -dr
|
||||
|
||||
# Build slic3r
|
||||
RUN ./build_linux.sh -sr
|
||||
|
||||
# Build AppImage
|
||||
ENV container podman
|
||||
RUN ./build_linux.sh -ir
|
||||
|
||||
# It's easier to run Orca Slicer as the same username,
|
||||
# UID and GID as your workstation. Since we bind mount
|
||||
# your home directory into the container, it's handy
|
||||
# to keep permissions the same. Just in case, defaults
|
||||
# are root.
|
||||
SHELL ["/bin/bash", "-l", "-c"]
|
||||
ARG USER=root
|
||||
ARG UID=0
|
||||
ARG GID=0
|
||||
RUN if [[ "$UID" != "0" ]]; then \
|
||||
# Create group if it doesn't exist \
|
||||
groupadd -f -g $GID $USER; \
|
||||
# Check if user with this UID already exists \
|
||||
if getent passwd $UID > /dev/null 2>&1; then \
|
||||
echo "User with UID $UID already exists, skipping user creation"; \
|
||||
else \
|
||||
useradd -u $UID -g $GID $USER; \
|
||||
fi \
|
||||
fi
|
||||
# Using an entrypoint instead of CMD because the binary
|
||||
# accepts several command line arguments.
|
||||
ENTRYPOINT ["/OrcaSlicer/build/package/bin/orca-slicer"]
|
||||
37
scripts/HintsToPot.py
Normal file
37
scripts/HintsToPot.py
Normal file
@@ -0,0 +1,37 @@
|
||||
# Helps converting hints.ini into POT
|
||||
|
||||
import sys
|
||||
|
||||
from configparser import ConfigParser
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def write_to_pot(path: Path, data: dict[str, str]):
|
||||
with open(path, "a+t") as pot_file:
|
||||
for key in data.keys():
|
||||
print(
|
||||
f"\n#: resources/data/hints.ini: [{ key }]\nmsgid \"{ data[key]['text'] }\"\nmsgstr \"\"",
|
||||
file=pot_file,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 3:
|
||||
print("HINTS_TO_POT FAILED: WRONG NUM OF ARGS")
|
||||
exit(-1)
|
||||
path_to_ini = Path(sys.argv[1]).parent / "resources" / "data" / "hints.ini"
|
||||
path_to_pot = Path(sys.argv[2]).parent / "i18n" / "OrcaSlicer.pot"
|
||||
if not path_to_ini.exists():
|
||||
print("HINTS_TO_POT FAILED: PATH TO INI DOES NOT EXISTS")
|
||||
print(str(path_to_ini))
|
||||
exit(-1)
|
||||
config = ConfigParser()
|
||||
with open(path_to_ini) as hints_file:
|
||||
config.read_file(hints_file)
|
||||
write_to_pot(path_to_pot, config._sections)
|
||||
print("HINTS_TO_POT SUCCESS")
|
||||
exit(0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
57
scripts/appimage_lib_policy.sh
Executable file
57
scripts/appimage_lib_policy.sh
Executable file
@@ -0,0 +1,57 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Libraries that are safer to resolve from the host than bundle into the AppImage.
|
||||
# Keep this list focused on the glibc/runtime loader and host-specific graphics/audio stacks.
|
||||
appimage_is_host_library() {
|
||||
local lib_name
|
||||
lib_name="$(basename "$1")"
|
||||
|
||||
case "$lib_name" in
|
||||
linux-vdso.so.*|linux-gate.so.*|ld-linux*.so*|ld64.so*|ld-musl-*.so*|libc.so*|libpthread.so*|libm.so*|libdl.so*|librt.so*|libresolv.so*|libutil.so*|libanl.so*|libnsl.so*|libBrokenLocale.so*|libcrypt.so*|libnss_*.so*|\
|
||||
libGL.so*|libOpenGL.so*|libGLX*.so*|libGLU.so*|libEGL.so*|libGLES*.so*|libGLdispatch.so*|libdrm.so*|libdrm_*.so*|libgbm.so*|libwayland-*.so*|libxcb*.so*|libX11.so*|libX11-xcb.so*|libXau.so*|libXdmcp.so*|libXext.so*|libXdamage.so*|libXfixes.so*|libXcomposite.so*|libXrender.so*|libXrandr.so*|libXcursor.so*|libXi.so*|libXinerama.so*|libxshmfence.so*|libxkbcommon.so*|libxkbcommon-x11.so*|libSM.so*|libICE.so*|libudev.so*|libasound.so*|libpulse.so*|libpulsecommon*.so*|libjack.so*|libpipewire-*.so*|libvulkan.so*|libva.so*|libva-*.so*|\
|
||||
libgtk-*.so*|libgdk-*.so*|libpango*.so*|libatk-bridge-*.so*|libatk*.so*|libatspi.so*|libcairo*.so*|libgdk_pixbuf-*.so*|libgio-2.0.so*|libgmodule-2.0.so*|libgobject-2.0.so*|libglib-2.0.so*|\
|
||||
libgstreamer-1.0.so*|libgst*.so*|libsoup-*.so*|libwebkit2gtk-*.so*|libjavascriptcoregtk-*.so*|libsecret-1.so*|libmanette-0.2.so*|libenchant-2.so*|libhyphen.so*|libtasn1.so*|\
|
||||
libfontconfig.so*|libfreetype.so*|libharfbuzz*.so*|libfribidi.so*|libgraphite2.so*|libthai.so*|libdatrie.so*|libepoxy.so*|libpixman-1.so*|\
|
||||
libstdc++.so*|libgcc_s.so*|libatomic.so*|libdbus-1.so*|libuuid.so*|libffi.so*|libselinux.so*|libmount.so*|libblkid.so*|libpcre2-*.so*|libsystemd.so*|libcap.so*|libseccomp.so*|\
|
||||
liborc-0.4.so*|libgudev-1.0.so*)
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
appimage_is_elf_file() {
|
||||
file -b "$1" 2>/dev/null | grep -q '^ELF '
|
||||
}
|
||||
|
||||
appimage_list_direct_dependencies() {
|
||||
local target="$1"
|
||||
local line dep target_real target_dir ldd_library_path
|
||||
|
||||
target_real="$(readlink -f "$target" 2>/dev/null || printf '%s' "$target")"
|
||||
target_dir="$(dirname "$target_real")"
|
||||
ldd_library_path="$target_dir"
|
||||
if [[ -n "${LD_LIBRARY_PATH:-}" ]]; then
|
||||
ldd_library_path+="${ldd_library_path:+:}${LD_LIBRARY_PATH}"
|
||||
fi
|
||||
|
||||
while IFS= read -r line; do
|
||||
if [[ "$line" == *"=> not found"* ]]; then
|
||||
echo "MISSING:${line%% *}"
|
||||
continue
|
||||
fi
|
||||
|
||||
dep=""
|
||||
if [[ "$line" == *"=>"* ]]; then
|
||||
dep="$(printf '%s\n' "$line" | sed -n 's/.*=> \(\/[^ ]*\).*/\1/p')"
|
||||
elif [[ "$line" =~ ^[[:space:]]/ ]]; then
|
||||
dep="$(printf '%s\n' "$line" | awk '{print $1}')"
|
||||
fi
|
||||
|
||||
if [[ -n "$dep" ]]; then
|
||||
echo "$dep"
|
||||
fi
|
||||
done < <(LD_LIBRARY_PATH="$ldd_library_path" ldd "$target" 2>/dev/null || true)
|
||||
}
|
||||
713
scripts/apply_pjarczak_linux_bridge_orca.py
Normal file
713
scripts/apply_pjarczak_linux_bridge_orca.py
Normal file
@@ -0,0 +1,713 @@
|
||||
|
||||
#!/usr/bin/env python3
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
def read(p: Path) -> str:
|
||||
return p.read_text(encoding='utf-8')
|
||||
|
||||
def write(p: Path, s: str) -> None:
|
||||
p.write_text(s, encoding='utf-8')
|
||||
|
||||
def replace_once(s: str, old: str, new: str, path: Path) -> str:
|
||||
if old not in s:
|
||||
raise RuntimeError(f"missing expected snippet in {path}: {old[:120]!r}")
|
||||
return s.replace(old, new, 1)
|
||||
|
||||
def replace_function_body(s: str, signature: str, new_body: str, path: Path) -> str:
|
||||
idx = s.find(signature)
|
||||
if idx < 0:
|
||||
raise RuntimeError(f"signature not found in {path}: {signature}")
|
||||
brace = s.find('{', idx)
|
||||
if brace < 0:
|
||||
raise RuntimeError(f"opening brace not found in {path}: {signature}")
|
||||
depth = 0
|
||||
end = brace
|
||||
while end < len(s):
|
||||
ch = s[end]
|
||||
if ch == '{':
|
||||
depth += 1
|
||||
elif ch == '}':
|
||||
depth -= 1
|
||||
if depth == 0:
|
||||
end += 1
|
||||
break
|
||||
end += 1
|
||||
if depth != 0:
|
||||
raise RuntimeError(f"unterminated function body in {path}: {signature}")
|
||||
return s[:brace] + new_body + s[end:]
|
||||
|
||||
BBL_INIT = r'''{
|
||||
clear_load_error();
|
||||
|
||||
std::string library;
|
||||
std::string data_dir_str = data_dir();
|
||||
boost::filesystem::path data_dir_path(data_dir_str);
|
||||
auto plugin_folder = data_dir_path / "plugins";
|
||||
|
||||
if (using_backup) {
|
||||
plugin_folder = plugin_folder / "backup";
|
||||
}
|
||||
|
||||
const bool pj_bridge = Slic3r::PJarczakLinuxBridge::enabled();
|
||||
|
||||
if (pj_bridge) {
|
||||
#if defined(_MSC_VER) || defined(_WIN32)
|
||||
_putenv_s("PJARCZAK_BAMBU_PLUGIN_DIR", plugin_folder.string().c_str());
|
||||
_putenv_s("PJARCZAK_EXPECTED_BAMBU_NETWORK_VERSION", version.c_str());
|
||||
#else
|
||||
setenv("PJARCZAK_BAMBU_PLUGIN_DIR", plugin_folder.string().c_str(), 1);
|
||||
setenv("PJARCZAK_EXPECTED_BAMBU_NETWORK_VERSION", version.c_str(), 1);
|
||||
#endif
|
||||
}
|
||||
|
||||
if (version.empty()) {
|
||||
BOOST_LOG_TRIVIAL(error) << __FUNCTION__ << ": version is required but not provided";
|
||||
set_load_error(
|
||||
"Network library version not specified",
|
||||
"A version must be specified to load the network library",
|
||||
""
|
||||
);
|
||||
return -1;
|
||||
}
|
||||
|
||||
#if defined(_MSC_VER) || defined(_WIN32)
|
||||
if (pj_bridge) {
|
||||
library = Slic3r::PJarczakLinuxBridge::bridge_network_library_path(plugin_folder);
|
||||
wchar_t lib_wstr[512];
|
||||
memset(lib_wstr, 0, sizeof(lib_wstr));
|
||||
::MultiByteToWideChar(CP_UTF8, 0, library.c_str(), int(library.size()) + 1, lib_wstr, int(sizeof(lib_wstr) / sizeof(lib_wstr[0])));
|
||||
m_networking_module = LoadLibrary(lib_wstr);
|
||||
} else {
|
||||
library = plugin_folder.string() + "\\" + std::string(BAMBU_NETWORK_LIBRARY) + "_" + version + ".dll";
|
||||
wchar_t lib_wstr[256];
|
||||
memset(lib_wstr, 0, sizeof(lib_wstr));
|
||||
::MultiByteToWideChar(CP_UTF8, NULL, library.c_str(), strlen(library.c_str()) + 1, lib_wstr, sizeof(lib_wstr) / sizeof(lib_wstr[0]));
|
||||
m_networking_module = LoadLibrary(lib_wstr);
|
||||
if (!m_networking_module) {
|
||||
std::string library_path = get_libpath_in_current_directory(std::string(BAMBU_NETWORK_LIBRARY));
|
||||
if (library_path.empty()) {
|
||||
set_load_error(
|
||||
"Network library not found",
|
||||
"Could not locate versioned library: " + library,
|
||||
library
|
||||
);
|
||||
return -1;
|
||||
}
|
||||
memset(lib_wstr, 0, sizeof(lib_wstr));
|
||||
::MultiByteToWideChar(CP_UTF8, NULL, library_path.c_str(), strlen(library_path.c_str()) + 1, lib_wstr, sizeof(lib_wstr) / sizeof(lib_wstr[0]));
|
||||
m_networking_module = LoadLibrary(lib_wstr);
|
||||
}
|
||||
}
|
||||
#else
|
||||
if (pj_bridge) {
|
||||
library = Slic3r::PJarczakLinuxBridge::bridge_network_library_path(plugin_folder);
|
||||
m_networking_module = dlopen(library.c_str(), RTLD_LAZY);
|
||||
} else {
|
||||
#if defined(__WXMAC__)
|
||||
std::string lib_ext = ".dylib";
|
||||
#else
|
||||
std::string lib_ext = ".so";
|
||||
#endif
|
||||
library = plugin_folder.string() + "/" + std::string("lib") + std::string(BAMBU_NETWORK_LIBRARY) + "_" + version + lib_ext;
|
||||
m_networking_module = dlopen(library.c_str(), RTLD_LAZY);
|
||||
if (!m_networking_module) {
|
||||
char* dll_error = dlerror();
|
||||
BOOST_LOG_TRIVIAL(error) << __FUNCTION__ << ": dlopen failed: " << (dll_error ? dll_error : "unknown error");
|
||||
set_load_error(
|
||||
"Failed to load network library",
|
||||
dll_error ? std::string(dll_error) : "Unknown dlopen error",
|
||||
library
|
||||
);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
if (!m_networking_module) {
|
||||
if (!m_load_error.has_error) {
|
||||
set_load_error(
|
||||
"Network library failed to load",
|
||||
"LoadLibrary/dlopen returned null",
|
||||
library
|
||||
);
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
InitFTModule(m_networking_module);
|
||||
|
||||
load_all_function_pointers();
|
||||
|
||||
m_use_legacy_network = NetworkAgent::use_legacy_network;
|
||||
|
||||
std::string loaded_version;
|
||||
if (m_get_version) {
|
||||
loaded_version = m_get_version();
|
||||
}
|
||||
|
||||
BOOST_LOG_TRIVIAL(info) << "BBLNetworkPlugin::initialize: legacy_mode="
|
||||
<< (m_use_legacy_network ? "true" : "false")
|
||||
<< ", bridge_mode=" << (pj_bridge ? "true" : "false")
|
||||
<< ", library=" << library
|
||||
<< ", version=" << (loaded_version.empty() ? "unknown" : loaded_version)
|
||||
<< ", send_message=" << (m_send_message ? "loaded" : "null")
|
||||
<< ", start_print=" << (m_start_print ? "loaded" : "null")
|
||||
<< ", start_local_print=" << (m_start_local_print ? "loaded" : "null");
|
||||
|
||||
return 0;
|
||||
}'''
|
||||
|
||||
BBL_UNLOAD = r'''{
|
||||
UnloadFTModule();
|
||||
|
||||
#if defined(_MSC_VER) || defined(_WIN32)
|
||||
const bool same_handles = m_source_module && (m_source_module == m_networking_module);
|
||||
if (m_source_module && !same_handles) {
|
||||
FreeLibrary(m_source_module);
|
||||
m_source_module = NULL;
|
||||
}
|
||||
if (m_networking_module) {
|
||||
FreeLibrary(m_networking_module);
|
||||
m_networking_module = NULL;
|
||||
}
|
||||
#else
|
||||
const bool same_handles = m_source_module && (m_source_module == m_networking_module);
|
||||
if (m_source_module && !same_handles) {
|
||||
dlclose(m_source_module);
|
||||
m_source_module = NULL;
|
||||
}
|
||||
if (m_networking_module) {
|
||||
dlclose(m_networking_module);
|
||||
m_networking_module = NULL;
|
||||
}
|
||||
#endif
|
||||
|
||||
m_source_module = NULL;
|
||||
clear_all_function_pointers();
|
||||
|
||||
return 0;
|
||||
}'''
|
||||
|
||||
BBL_SOURCE = r'''{
|
||||
if ((m_source_module) || (!m_networking_module))
|
||||
return m_source_module;
|
||||
|
||||
if (Slic3r::PJarczakLinuxBridge::enabled() && Slic3r::PJarczakLinuxBridge::source_module_is_network_module()) {
|
||||
m_source_module = m_networking_module;
|
||||
return m_source_module;
|
||||
}
|
||||
|
||||
std::string library;
|
||||
std::string data_dir_str = data_dir();
|
||||
boost::filesystem::path data_dir_path(data_dir_str);
|
||||
auto plugin_folder = data_dir_path / "plugins";
|
||||
|
||||
#if defined(_MSC_VER) || defined(_WIN32)
|
||||
wchar_t lib_wstr[128];
|
||||
|
||||
library = plugin_folder.string() + "/" + std::string(BAMBU_SOURCE_LIBRARY) + ".dll";
|
||||
memset(lib_wstr, 0, sizeof(lib_wstr));
|
||||
::MultiByteToWideChar(CP_UTF8, NULL, library.c_str(), strlen(library.c_str())+1, lib_wstr, sizeof(lib_wstr) / sizeof(lib_wstr[0]));
|
||||
m_source_module = LoadLibrary(lib_wstr);
|
||||
if (!m_source_module) {
|
||||
std::string library_path = get_libpath_in_current_directory(std::string(BAMBU_SOURCE_LIBRARY));
|
||||
if (library_path.empty()) {
|
||||
return m_source_module;
|
||||
}
|
||||
memset(lib_wstr, 0, sizeof(lib_wstr));
|
||||
::MultiByteToWideChar(CP_UTF8, NULL, library_path.c_str(), strlen(library_path.c_str()) + 1, lib_wstr, sizeof(lib_wstr) / sizeof(lib_wstr[0]));
|
||||
m_source_module = LoadLibrary(lib_wstr);
|
||||
}
|
||||
#else
|
||||
#if defined(__WXMAC__)
|
||||
library = plugin_folder.string() + "/" + std::string("lib") + std::string(BAMBU_SOURCE_LIBRARY) + ".dylib";
|
||||
#else
|
||||
library = plugin_folder.string() + "/" + std::string("lib") + std::string(BAMBU_SOURCE_LIBRARY) + ".so";
|
||||
#endif
|
||||
m_source_module = dlopen(library.c_str(), RTLD_LAZY);
|
||||
#endif
|
||||
|
||||
return m_source_module;
|
||||
}'''
|
||||
|
||||
GUI_DOWNLOAD = r'''{
|
||||
int result = 0;
|
||||
std::string err_msg;
|
||||
|
||||
AppConfig* app_config = wxGetApp().app_config;
|
||||
if (!app_config) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
m_networking_cancel_update = false;
|
||||
fs::path target_file_path = (fs::temp_directory_path() / package_name);
|
||||
fs::path tmp_path = target_file_path;
|
||||
tmp_path += format(".%1%%2%", get_current_pid(), ".tmp");
|
||||
|
||||
const bool pj_force_linux_payload = Slic3r::PJarczakLinuxBridge::should_force_linux_plugin_payload(name);
|
||||
std::map<std::string, std::string> saved_headers = Slic3r::Http::get_extra_headers();
|
||||
bool changed_headers = false;
|
||||
|
||||
auto restore_headers = [&]() {
|
||||
if (changed_headers) {
|
||||
Slic3r::Http::set_extra_headers(saved_headers);
|
||||
changed_headers = false;
|
||||
}
|
||||
};
|
||||
|
||||
if (pj_force_linux_payload) {
|
||||
auto headers = saved_headers;
|
||||
headers["X-BBL-OS-Type"] = Slic3r::PJarczakLinuxBridge::forced_download_os_type();
|
||||
Slic3r::Http::set_extra_headers(headers);
|
||||
changed_headers = true;
|
||||
}
|
||||
|
||||
std::string url = get_plugin_url(name, app_config->get_country_code());
|
||||
std::string download_url;
|
||||
Slic3r::Http http_url = Slic3r::Http::get(url);
|
||||
BOOST_LOG_TRIVIAL(info) << "[download_plugin]: check the plugin from " << url;
|
||||
http_url.timeout_connect(TIMEOUT_CONNECT)
|
||||
.timeout_max(TIMEOUT_RESPONSE)
|
||||
.on_complete(
|
||||
[&download_url](std::string body, unsigned status) {
|
||||
try {
|
||||
json j = json::parse(body);
|
||||
std::string message = j["message"].get<std::string>();
|
||||
if (message == "success") {
|
||||
json resource = j.at("resources");
|
||||
if (resource.is_array()) {
|
||||
for (auto iter = resource.begin(); iter != resource.end(); iter++) {
|
||||
for (auto sub_iter = iter.value().begin(); sub_iter != iter.value().end(); sub_iter++) {
|
||||
if (boost::iequals(sub_iter.key(), "url"))
|
||||
download_url = sub_iter.value();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (...) {}
|
||||
})
|
||||
.on_error(
|
||||
[&result, &err_msg](std::string body, std::string error, unsigned int status) {
|
||||
BOOST_LOG_TRIVIAL(error) << "[download_plugin 1] on_error: " << error << ", body = " << body;
|
||||
err_msg += "[download_plugin 1] on_error: " + error + ", body = " + body;
|
||||
result = -1;
|
||||
})
|
||||
.perform_sync();
|
||||
|
||||
restore_headers();
|
||||
|
||||
bool cancel = false;
|
||||
if (result < 0) {
|
||||
if (pro_fn) pro_fn(InstallStatusDownloadFailed, 0, cancel);
|
||||
return result;
|
||||
}
|
||||
|
||||
if (download_url.empty()) {
|
||||
if (pro_fn) pro_fn(InstallStatusDownloadFailed, 0, cancel);
|
||||
return -1;
|
||||
} else if (pro_fn) {
|
||||
pro_fn(InstallStatusNormal, 5, cancel);
|
||||
}
|
||||
|
||||
if (m_networking_cancel_update || cancel) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
BOOST_LOG_TRIVIAL(info) << "[download_plugin] get_url = " << download_url;
|
||||
|
||||
Slic3r::Http http = Slic3r::Http::get(download_url);
|
||||
int reported_percent = 0;
|
||||
http.on_progress(
|
||||
[this, &pro_fn, cancel_fn, &result, &reported_percent, &err_msg](Slic3r::Http::Progress progress, bool& cancel) {
|
||||
int percent = 0;
|
||||
if (progress.dltotal != 0)
|
||||
percent = progress.dlnow * 50 / progress.dltotal;
|
||||
bool was_cancel = false;
|
||||
if (pro_fn && ((percent - reported_percent) >= 10)) {
|
||||
pro_fn(InstallStatusNormal, percent, was_cancel);
|
||||
reported_percent = percent;
|
||||
BOOST_LOG_TRIVIAL(info) << "[download_plugin 2] progress: " << reported_percent;
|
||||
}
|
||||
cancel = m_networking_cancel_update || was_cancel;
|
||||
if (cancel_fn && cancel_fn())
|
||||
cancel = true;
|
||||
if (cancel) {
|
||||
err_msg += "[download_plugin] cancel";
|
||||
result = -1;
|
||||
}
|
||||
})
|
||||
.on_complete([&pro_fn, tmp_path, target_file_path](std::string body, unsigned status) {
|
||||
bool cancel = false;
|
||||
fs::fstream file(tmp_path, std::ios::out | std::ios::binary | std::ios::trunc);
|
||||
file.write(body.c_str(), body.size());
|
||||
file.close();
|
||||
fs::rename(tmp_path, target_file_path);
|
||||
if (pro_fn) pro_fn(InstallStatusDownloadCompleted, 80, cancel);
|
||||
})
|
||||
.on_error([&pro_fn, &result, &err_msg](std::string body, std::string error, unsigned int status) {
|
||||
bool cancel = false;
|
||||
if (pro_fn) pro_fn(InstallStatusDownloadFailed, 0, cancel);
|
||||
BOOST_LOG_TRIVIAL(error) << "[download_plugin 2] on_error: " << error << ", body = " << body;
|
||||
err_msg += "[download_plugin 2] on_error: " + error + ", body = " + body;
|
||||
result = -1;
|
||||
});
|
||||
|
||||
http.perform_sync();
|
||||
return result;
|
||||
}'''
|
||||
|
||||
GUI_INSTALL = r'''{
|
||||
bool cancel = false;
|
||||
std::string target_file_path = (fs::temp_directory_path() / package_name).string();
|
||||
|
||||
BOOST_LOG_TRIVIAL(info) << "[install_plugin] enter";
|
||||
std::string data_dir_str = data_dir();
|
||||
boost::filesystem::path data_dir_path(data_dir_str);
|
||||
auto plugin_folder = data_dir_path / name;
|
||||
auto backup_folder = plugin_folder / "backup";
|
||||
if (!boost::filesystem::exists(plugin_folder))
|
||||
boost::filesystem::create_directory(plugin_folder);
|
||||
if (!boost::filesystem::exists(backup_folder))
|
||||
boost::filesystem::create_directory(backup_folder);
|
||||
|
||||
if (m_networking_cancel_update)
|
||||
return -1;
|
||||
if (pro_fn)
|
||||
pro_fn(InstallStatusNormal, 50, cancel);
|
||||
|
||||
mz_zip_archive archive;
|
||||
mz_zip_zero_struct(&archive);
|
||||
if (!open_zip_reader(&archive, target_file_path)) {
|
||||
if (pro_fn) pro_fn(InstallStatusDownloadFailed, 0, cancel);
|
||||
return InstallStatusUnzipFailed;
|
||||
}
|
||||
|
||||
const bool pj_force_linux_payload = Slic3r::PJarczakLinuxBridge::should_force_linux_plugin_payload(name);
|
||||
const std::string manifest_name = Slic3r::PJarczakLinuxBridge::linux_payload_manifest_file_name();
|
||||
|
||||
mz_uint num_entries = mz_zip_reader_get_num_files(&archive);
|
||||
mz_zip_archive_file_stat stat;
|
||||
for (mz_uint i = 0; i < num_entries; i++) {
|
||||
if (m_networking_cancel_update || cancel) {
|
||||
close_zip_reader(&archive);
|
||||
return -1;
|
||||
}
|
||||
if (!mz_zip_reader_file_stat(&archive, i, &stat))
|
||||
continue;
|
||||
if (stat.m_uncomp_size == 0)
|
||||
continue;
|
||||
|
||||
std::string dest_file;
|
||||
if (stat.m_is_utf8) {
|
||||
dest_file = stat.m_filename;
|
||||
} else {
|
||||
std::string extra(1024, 0);
|
||||
size_t n = mz_zip_reader_get_extra(&archive, stat.m_file_index, extra.data(), extra.size());
|
||||
dest_file = decode(extra.substr(0, n), stat.m_filename);
|
||||
}
|
||||
|
||||
boost::filesystem::path relative(dest_file);
|
||||
if (pj_force_linux_payload) {
|
||||
const std::string file_name = relative.filename().string();
|
||||
if (!(file_name == manifest_name || Slic3r::PJarczakLinuxBridge::is_linux_payload_filename(file_name)))
|
||||
continue;
|
||||
relative = boost::filesystem::path(file_name);
|
||||
}
|
||||
|
||||
auto dest_path = plugin_folder / relative;
|
||||
boost::filesystem::create_directories(dest_path.parent_path());
|
||||
std::string dest_zip_file = encode_path(dest_path.string().c_str());
|
||||
|
||||
try {
|
||||
if (fs::exists(dest_path))
|
||||
fs::remove(dest_path);
|
||||
mz_bool res = 0;
|
||||
#ifndef WIN32
|
||||
if (S_ISLNK(stat.m_external_attr >> 16)) {
|
||||
std::string link(stat.m_uncomp_size + 1, 0);
|
||||
res = mz_zip_reader_extract_to_mem(&archive, stat.m_file_index, link.data(), stat.m_uncomp_size, 0);
|
||||
try {
|
||||
boost::filesystem::create_symlink(link, dest_path);
|
||||
} catch (const std::exception &) {}
|
||||
} else {
|
||||
#endif
|
||||
res = mz_zip_reader_extract_to_file(&archive, stat.m_file_index, dest_zip_file.c_str(), 0);
|
||||
#ifndef WIN32
|
||||
}
|
||||
#endif
|
||||
if (res == 0) {
|
||||
#ifdef WIN32
|
||||
std::wstring new_dest_zip_file = boost::locale::conv::utf_to_utf<wchar_t>(dest_path.generic_string());
|
||||
res = mz_zip_reader_extract_to_file_w(&archive, stat.m_file_index, new_dest_zip_file.c_str(), 0);
|
||||
#endif
|
||||
if (res == 0) {
|
||||
close_zip_reader(&archive);
|
||||
if (pro_fn) pro_fn(InstallStatusUnzipFailed, 0, cancel);
|
||||
return InstallStatusUnzipFailed;
|
||||
}
|
||||
}
|
||||
|
||||
if (pj_force_linux_payload && relative.filename().string() != manifest_name) {
|
||||
std::string validate_reason;
|
||||
if (!Slic3r::PJarczakLinuxBridge::validate_linux_payload_file(dest_path.string(), &validate_reason)) {
|
||||
BOOST_LOG_TRIVIAL(error) << "[install_plugin] linux payload validation failed for " << dest_path.string() << ": " << validate_reason;
|
||||
close_zip_reader(&archive);
|
||||
if (pro_fn) pro_fn(InstallStatusUnzipFailed, 0, cancel);
|
||||
return InstallStatusUnzipFailed;
|
||||
}
|
||||
}
|
||||
} catch (const std::exception &) {
|
||||
close_zip_reader(&archive);
|
||||
if (pro_fn) pro_fn(InstallStatusUnzipFailed, 0, cancel);
|
||||
return InstallStatusUnzipFailed;
|
||||
}
|
||||
}
|
||||
|
||||
close_zip_reader(&archive);
|
||||
|
||||
if (pj_force_linux_payload) {
|
||||
std::string validate_reason;
|
||||
const auto manifest_path = plugin_folder / manifest_name;
|
||||
if (boost::filesystem::exists(manifest_path) &&
|
||||
!Slic3r::PJarczakLinuxBridge::validate_linux_payload_set_against_manifest(plugin_folder, &validate_reason)) {
|
||||
BOOST_LOG_TRIVIAL(error) << "[install_plugin] manifest validation failed: " << validate_reason;
|
||||
if (pro_fn) pro_fn(InstallStatusUnzipFailed, 0, cancel);
|
||||
return InstallStatusUnzipFailed;
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
fs::path dir_path(plugin_folder);
|
||||
if (fs::exists(dir_path) && fs::is_directory(dir_path)) {
|
||||
int file_count = 0, file_index = 0;
|
||||
for (fs::directory_iterator it(dir_path); it != fs::directory_iterator(); ++it) {
|
||||
if (it->path().string() == backup_folder.string())
|
||||
continue;
|
||||
if (fs::is_regular_file(it->status()))
|
||||
++file_count;
|
||||
}
|
||||
for (fs::directory_iterator it(dir_path); it != fs::directory_iterator(); ++it) {
|
||||
if (it->path().string() == backup_folder.string())
|
||||
continue;
|
||||
auto dest_path = backup_folder.string() + "/" + it->path().filename().string();
|
||||
if (fs::is_regular_file(it->status())) {
|
||||
try {
|
||||
if (file_count > 0 && pro_fn)
|
||||
pro_fn(InstallStatusNormal, 50 + file_index / file_count, cancel);
|
||||
++file_index;
|
||||
if (fs::exists(dest_path))
|
||||
fs::remove(dest_path);
|
||||
std::string error_message;
|
||||
CopyFileResult cfr = copy_file(it->path().string(), dest_path, error_message, false);
|
||||
if (cfr != CopyFileResult::SUCCESS)
|
||||
BOOST_LOG_TRIVIAL(error) << "Copying to backup failed(" << cfr << "): " << error_message;
|
||||
} catch (const std::exception &) {}
|
||||
} else {
|
||||
copy_framework(it->path().string(), dest_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (pro_fn)
|
||||
pro_fn(InstallStatusInstallCompleted, 100, cancel);
|
||||
if (name == "plugins")
|
||||
app_config->set_bool("installed_networking", true);
|
||||
BOOST_LOG_TRIVIAL(info) << "[install_plugin] success";
|
||||
return 0;
|
||||
}'''
|
||||
|
||||
GUI_COPY = r'''{
|
||||
if (app_config->get("update_network_plugin") != "true")
|
||||
return;
|
||||
|
||||
std::string data_dir_str = data_dir();
|
||||
boost::filesystem::path data_dir_path(data_dir_str);
|
||||
auto plugin_folder = data_dir_path / "plugins";
|
||||
auto cache_folder = data_dir_path / "ota";
|
||||
std::string changelog_file = cache_folder.string() + "/network_plugins.json";
|
||||
|
||||
std::string cached_version;
|
||||
if (boost::filesystem::exists(changelog_file)) {
|
||||
try {
|
||||
boost::nowide::ifstream ifs(changelog_file);
|
||||
json j;
|
||||
ifs >> j;
|
||||
if (j.contains("version"))
|
||||
cached_version = j["version"];
|
||||
} catch (nlohmann::detail::parse_error&) {}
|
||||
}
|
||||
|
||||
if (!boost::filesystem::exists(plugin_folder))
|
||||
boost::filesystem::create_directory(plugin_folder);
|
||||
|
||||
const bool pj_force_linux_payload = Slic3r::PJarczakLinuxBridge::enabled();
|
||||
std::string error_message;
|
||||
|
||||
auto copy_one = [&](const boost::filesystem::path& src, const boost::filesystem::path& dst) -> bool {
|
||||
CopyFileResult cfr = copy_file(src.string(), dst.string(), error_message, false);
|
||||
if (cfr != CopyFileResult::SUCCESS) {
|
||||
BOOST_LOG_TRIVIAL(error) << __FUNCTION__ << ": Copying failed(" << cfr << "): " << error_message;
|
||||
return false;
|
||||
}
|
||||
static constexpr const auto perms = fs::owner_read | fs::owner_write | fs::group_read | fs::others_read;
|
||||
fs::permissions(dst, perms);
|
||||
return true;
|
||||
};
|
||||
|
||||
if (pj_force_linux_payload) {
|
||||
for (const auto& file_name : {
|
||||
Slic3r::PJarczakLinuxBridge::linux_network_library_name(),
|
||||
Slic3r::PJarczakLinuxBridge::linux_source_library_name(),
|
||||
"liblive555.so",
|
||||
Slic3r::PJarczakLinuxBridge::linux_payload_manifest_file_name() }) {
|
||||
const auto src = cache_folder / file_name;
|
||||
if (!boost::filesystem::exists(src))
|
||||
continue;
|
||||
if (file_name != Slic3r::PJarczakLinuxBridge::linux_payload_manifest_file_name()) {
|
||||
std::string validate_reason;
|
||||
if (!Slic3r::PJarczakLinuxBridge::validate_linux_payload_file(src.string(), &validate_reason)) {
|
||||
BOOST_LOG_TRIVIAL(error) << __FUNCTION__ << ": invalid linux payload " << src.string() << ": " << validate_reason;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if (!copy_one(src, plugin_folder / file_name))
|
||||
return;
|
||||
fs::remove(src);
|
||||
}
|
||||
|
||||
const auto manifest = plugin_folder / Slic3r::PJarczakLinuxBridge::linux_payload_manifest_file_name();
|
||||
if (boost::filesystem::exists(manifest)) {
|
||||
std::string validate_reason;
|
||||
if (!Slic3r::PJarczakLinuxBridge::validate_linux_payload_set_against_manifest(plugin_folder, &validate_reason)) {
|
||||
BOOST_LOG_TRIVIAL(error) << __FUNCTION__ << ": manifest validation failed after copy: " << validate_reason;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (!cached_version.empty()) {
|
||||
app_config->set(SETTING_NETWORK_PLUGIN_VERSION, cached_version);
|
||||
app_config->save();
|
||||
}
|
||||
if (boost::filesystem::exists(changelog_file))
|
||||
fs::remove(changelog_file);
|
||||
app_config->set("update_network_plugin", "false");
|
||||
return;
|
||||
}
|
||||
|
||||
if (cached_version.empty()) {
|
||||
app_config->set("update_network_plugin", "false");
|
||||
return;
|
||||
}
|
||||
|
||||
std::string network_library, player_library, live555_library, network_library_dst, player_library_dst, live555_library_dst;
|
||||
#if defined(_MSC_VER) || defined(_WIN32)
|
||||
network_library = cache_folder.string() + "/bambu_networking.dll";
|
||||
player_library = cache_folder.string() + "/BambuSource.dll";
|
||||
live555_library = cache_folder.string() + "/live555.dll";
|
||||
network_library_dst = plugin_folder.string() + "/" + std::string(BAMBU_NETWORK_LIBRARY) + "_" + cached_version + ".dll";
|
||||
player_library_dst = plugin_folder.string() + "/BambuSource.dll";
|
||||
live555_library_dst = plugin_folder.string() + "/live555.dll";
|
||||
#elif defined(__WXMAC__)
|
||||
network_library = cache_folder.string() + "/libbambu_networking.dylib";
|
||||
player_library = cache_folder.string() + "/libBambuSource.dylib";
|
||||
live555_library = cache_folder.string() + "/liblive555.dylib";
|
||||
network_library_dst = plugin_folder.string() + "/lib" + std::string(BAMBU_NETWORK_LIBRARY) + "_" + cached_version + ".dylib";
|
||||
player_library_dst = plugin_folder.string() + "/libBambuSource.dylib";
|
||||
live555_library_dst = plugin_folder.string() + "/liblive555.dylib";
|
||||
#else
|
||||
network_library = cache_folder.string() + "/libbambu_networking.so";
|
||||
player_library = cache_folder.string() + "/libBambuSource.so";
|
||||
live555_library = cache_folder.string() + "/liblive555.so";
|
||||
network_library_dst = plugin_folder.string() + "/lib" + std::string(BAMBU_NETWORK_LIBRARY) + "_" + cached_version + ".so";
|
||||
player_library_dst = plugin_folder.string() + "/libBambuSource.so";
|
||||
live555_library_dst = plugin_folder.string() + "/liblive555.so";
|
||||
#endif
|
||||
|
||||
if (boost::filesystem::exists(network_library)) {
|
||||
if (!copy_one(network_library, network_library_dst))
|
||||
return;
|
||||
fs::remove(network_library);
|
||||
app_config->set(SETTING_NETWORK_PLUGIN_VERSION, cached_version);
|
||||
app_config->save();
|
||||
}
|
||||
|
||||
if (boost::filesystem::exists(player_library)) {
|
||||
if (!copy_one(player_library, player_library_dst))
|
||||
return;
|
||||
fs::remove(player_library);
|
||||
}
|
||||
|
||||
if (boost::filesystem::exists(live555_library)) {
|
||||
if (!copy_one(live555_library, live555_library_dst))
|
||||
return;
|
||||
fs::remove(live555_library);
|
||||
}
|
||||
|
||||
if (boost::filesystem::exists(changelog_file))
|
||||
fs::remove(changelog_file);
|
||||
app_config->set("update_network_plugin", "false");
|
||||
}'''
|
||||
|
||||
def patch_cmake(repo: Path):
|
||||
path = repo / "src/slic3r/CMakeLists.txt"
|
||||
s = read(path)
|
||||
if 'add_subdirectory(Utils/PJarczakLinuxBridge)' not in s:
|
||||
s = replace_once(
|
||||
s,
|
||||
'add_subdirectory(GUI/DeviceCore)\nadd_subdirectory(GUI/DeviceTab)\n',
|
||||
'add_subdirectory(GUI/DeviceCore)\nadd_subdirectory(GUI/DeviceTab)\nadd_subdirectory(Utils/PJarczakLinuxBridge)\n',
|
||||
path
|
||||
)
|
||||
if 'Utils/PJarczakLinuxBridge/PJarczakLinuxBridgeConfig.cpp' not in s:
|
||||
s = replace_once(
|
||||
s,
|
||||
' Utils/bambu_networking.hpp\n',
|
||||
' Utils/bambu_networking.hpp\n Utils/PJarczakLinuxBridge/PJarczakLinuxBridgeConfig.cpp\n',
|
||||
path
|
||||
)
|
||||
write(path, s)
|
||||
|
||||
def patch_plugin_cpp(repo: Path):
|
||||
path = repo / "src/slic3r/Utils/BBLNetworkPlugin.cpp"
|
||||
s = read(path)
|
||||
if '#include "PJarczakLinuxBridge/PJarczakLinuxBridgeConfig.hpp"\n' not in s:
|
||||
s = replace_once(
|
||||
s,
|
||||
'#include "NetworkAgent.hpp"\n',
|
||||
'#include "NetworkAgent.hpp"\n#include "PJarczakLinuxBridge/PJarczakLinuxBridgeConfig.hpp"\n',
|
||||
path
|
||||
)
|
||||
s = replace_function_body(s, 'int BBLNetworkPlugin::initialize(bool using_backup, const std::string& version)\n', BBL_INIT, path)
|
||||
s = replace_function_body(s, 'int BBLNetworkPlugin::unload()\n', BBL_UNLOAD, path)
|
||||
s = replace_function_body(s, '#if defined(_MSC_VER) || defined(_WIN32)\nHMODULE BBLNetworkPlugin::get_source_module()\n#else\nvoid* BBLNetworkPlugin::get_source_module()\n#endif\n', BBL_SOURCE, path)
|
||||
write(path, s)
|
||||
|
||||
def patch_gui_app(repo: Path):
|
||||
path = repo / "src/slic3r/GUI/GUI_App.cpp"
|
||||
s = read(path)
|
||||
if '#include "slic3r/Utils/PJarczakLinuxBridge/PJarczakLinuxBridgeConfig.hpp"\n' not in s:
|
||||
s = replace_once(
|
||||
s,
|
||||
'#include "slic3r/Utils/bambu_networking.hpp"\n',
|
||||
'#include "slic3r/Utils/bambu_networking.hpp"\n#include "slic3r/Utils/PJarczakLinuxBridge/PJarczakLinuxBridgeConfig.hpp"\n',
|
||||
path
|
||||
)
|
||||
s = replace_function_body(s, 'int GUI_App::download_plugin(std::string name, std::string package_name, InstallProgressFn pro_fn, WasCancelledFn cancel_fn)\n', GUI_DOWNLOAD, path)
|
||||
s = replace_function_body(s, 'int GUI_App::install_plugin(std::string name, std::string package_name, InstallProgressFn pro_fn, WasCancelledFn cancel_fn)\n', GUI_INSTALL, path)
|
||||
s = replace_function_body(s, 'void GUI_App::copy_network_if_available()\n', GUI_COPY, path)
|
||||
write(path, s)
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 2:
|
||||
print("usage: apply_pjarczak_linux_bridge_orca.py /path/to/OrcaSlicer")
|
||||
raise SystemExit(2)
|
||||
|
||||
repo = Path(sys.argv[1]).resolve()
|
||||
patch_cmake(repo)
|
||||
patch_plugin_cpp(repo)
|
||||
patch_gui_app(repo)
|
||||
print("patched:", repo)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
277
scripts/auto-close-duplicates.ts
Normal file
277
scripts/auto-close-duplicates.ts
Normal file
@@ -0,0 +1,277 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
declare global {
|
||||
var process: {
|
||||
env: Record<string, string | undefined>;
|
||||
};
|
||||
}
|
||||
|
||||
interface GitHubIssue {
|
||||
number: number;
|
||||
title: string;
|
||||
user: { id: number };
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
interface GitHubComment {
|
||||
id: number;
|
||||
body: string;
|
||||
created_at: string;
|
||||
user: { type: string; id: number };
|
||||
}
|
||||
|
||||
interface GitHubReaction {
|
||||
user: { id: number };
|
||||
content: string;
|
||||
}
|
||||
|
||||
async function githubRequest<T>(endpoint: string, token: string, method: string = 'GET', body?: any): Promise<T> {
|
||||
const response = await fetch(`https://api.github.com${endpoint}`, {
|
||||
method,
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
Accept: "application/vnd.github.v3+json",
|
||||
"User-Agent": "auto-close-duplicates-script",
|
||||
...(body && { "Content-Type": "application/json" }),
|
||||
},
|
||||
...(body && { body: JSON.stringify(body) }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(
|
||||
`GitHub API request failed: ${response.status} ${response.statusText}`
|
||||
);
|
||||
}
|
||||
|
||||
return response.json();
|
||||
}
|
||||
|
||||
function extractDuplicateIssueNumber(commentBody: string): number | null {
|
||||
// Try to match #123 format first
|
||||
let match = commentBody.match(/#(\d+)/);
|
||||
if (match) {
|
||||
return parseInt(match[1], 10);
|
||||
}
|
||||
|
||||
// Try to match GitHub issue URL format: https://github.com/owner/repo/issues/123
|
||||
match = commentBody.match(/github\.com\/[^\/]+\/[^\/]+\/issues\/(\d+)/);
|
||||
if (match) {
|
||||
return parseInt(match[1], 10);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
async function closeIssueAsDuplicate(
|
||||
owner: string,
|
||||
repo: string,
|
||||
issueNumber: number,
|
||||
duplicateOfNumber: number,
|
||||
token: string
|
||||
): Promise<void> {
|
||||
await githubRequest(
|
||||
`/repos/${owner}/${repo}/issues/${issueNumber}`,
|
||||
token,
|
||||
'PATCH',
|
||||
{
|
||||
state: 'closed',
|
||||
state_reason: 'duplicate',
|
||||
labels: ['duplicate']
|
||||
}
|
||||
);
|
||||
|
||||
await githubRequest(
|
||||
`/repos/${owner}/${repo}/issues/${issueNumber}/comments`,
|
||||
token,
|
||||
'POST',
|
||||
{
|
||||
body: `This issue has been automatically closed as a duplicate of #${duplicateOfNumber}.
|
||||
|
||||
If this is incorrect, please re-open this issue or create a new one.
|
||||
|
||||
🤖 Generated with [Claude Code](https://claude.ai/code)`
|
||||
}
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
async function autoCloseDuplicates(): Promise<void> {
|
||||
console.log("[DEBUG] Starting auto-close duplicates script");
|
||||
|
||||
const token = process.env.GITHUB_TOKEN;
|
||||
if (!token) {
|
||||
throw new Error("GITHUB_TOKEN environment variable is required");
|
||||
}
|
||||
console.log("[DEBUG] GitHub token found");
|
||||
|
||||
const owner = process.env.GITHUB_REPOSITORY_OWNER || "OrcaSlicer";
|
||||
const repo = process.env.GITHUB_REPOSITORY_NAME || "OrcaSlicer";
|
||||
console.log(`[DEBUG] Repository: ${owner}/${repo}`);
|
||||
|
||||
const threeDaysAgo = new Date();
|
||||
threeDaysAgo.setDate(threeDaysAgo.getDate() - 3);
|
||||
console.log(
|
||||
`[DEBUG] Checking for duplicate comments older than: ${threeDaysAgo.toISOString()}`
|
||||
);
|
||||
|
||||
console.log("[DEBUG] Fetching open issues created more than 3 days ago...");
|
||||
const allIssues: GitHubIssue[] = [];
|
||||
let page = 1;
|
||||
const perPage = 100;
|
||||
|
||||
while (true) {
|
||||
const pageIssues: GitHubIssue[] = await githubRequest(
|
||||
`/repos/${owner}/${repo}/issues?state=open&per_page=${perPage}&page=${page}`,
|
||||
token
|
||||
);
|
||||
|
||||
if (pageIssues.length === 0) break;
|
||||
|
||||
// Filter for issues created more than 3 days ago
|
||||
const oldEnoughIssues = pageIssues.filter(issue =>
|
||||
new Date(issue.created_at) <= threeDaysAgo
|
||||
);
|
||||
|
||||
allIssues.push(...oldEnoughIssues);
|
||||
page++;
|
||||
|
||||
// Safety limit to avoid infinite loops
|
||||
if (page > 20) break;
|
||||
}
|
||||
|
||||
const issues = allIssues;
|
||||
console.log(`[DEBUG] Found ${issues.length} open issues`);
|
||||
|
||||
let processedCount = 0;
|
||||
let candidateCount = 0;
|
||||
|
||||
for (const issue of issues) {
|
||||
processedCount++;
|
||||
console.log(
|
||||
`[DEBUG] Processing issue #${issue.number} (${processedCount}/${issues.length}): ${issue.title}`
|
||||
);
|
||||
|
||||
console.log(`[DEBUG] Fetching comments for issue #${issue.number}...`);
|
||||
const comments: GitHubComment[] = await githubRequest(
|
||||
`/repos/${owner}/${repo}/issues/${issue.number}/comments`,
|
||||
token
|
||||
);
|
||||
console.log(
|
||||
`[DEBUG] Issue #${issue.number} has ${comments.length} comments`
|
||||
);
|
||||
|
||||
const dupeComments = comments.filter(
|
||||
(comment) =>
|
||||
comment.body.includes("Found") &&
|
||||
comment.body.includes("possible duplicate") &&
|
||||
comment.user.type === "Bot"
|
||||
);
|
||||
console.log(
|
||||
`[DEBUG] Issue #${issue.number} has ${dupeComments.length} duplicate detection comments`
|
||||
);
|
||||
|
||||
if (dupeComments.length === 0) {
|
||||
console.log(
|
||||
`[DEBUG] Issue #${issue.number} - no duplicate comments found, skipping`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
const lastDupeComment = dupeComments[dupeComments.length - 1];
|
||||
const dupeCommentDate = new Date(lastDupeComment.created_at);
|
||||
console.log(
|
||||
`[DEBUG] Issue #${
|
||||
issue.number
|
||||
} - most recent duplicate comment from: ${dupeCommentDate.toISOString()}`
|
||||
);
|
||||
|
||||
if (dupeCommentDate > threeDaysAgo) {
|
||||
console.log(
|
||||
`[DEBUG] Issue #${issue.number} - duplicate comment is too recent, skipping`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
console.log(
|
||||
`[DEBUG] Issue #${
|
||||
issue.number
|
||||
} - duplicate comment is old enough (${Math.floor(
|
||||
(Date.now() - dupeCommentDate.getTime()) / (1000 * 60 * 60 * 24)
|
||||
)} days)`
|
||||
);
|
||||
|
||||
const commentsAfterDupe = comments.filter(
|
||||
(comment) => new Date(comment.created_at) > dupeCommentDate
|
||||
);
|
||||
console.log(
|
||||
`[DEBUG] Issue #${issue.number} - ${commentsAfterDupe.length} comments after duplicate detection`
|
||||
);
|
||||
|
||||
if (commentsAfterDupe.length > 0) {
|
||||
console.log(
|
||||
`[DEBUG] Issue #${issue.number} - has activity after duplicate comment, skipping`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[DEBUG] Issue #${issue.number} - checking reactions on duplicate comment...`
|
||||
);
|
||||
const reactions: GitHubReaction[] = await githubRequest(
|
||||
`/repos/${owner}/${repo}/issues/comments/${lastDupeComment.id}/reactions`,
|
||||
token
|
||||
);
|
||||
console.log(
|
||||
`[DEBUG] Issue #${issue.number} - duplicate comment has ${reactions.length} reactions`
|
||||
);
|
||||
|
||||
const authorThumbsDown = reactions.some(
|
||||
(reaction) =>
|
||||
reaction.user.id === issue.user.id && reaction.content === "-1"
|
||||
);
|
||||
console.log(
|
||||
`[DEBUG] Issue #${issue.number} - author thumbs down reaction: ${authorThumbsDown}`
|
||||
);
|
||||
|
||||
if (authorThumbsDown) {
|
||||
console.log(
|
||||
`[DEBUG] Issue #${issue.number} - author disagreed with duplicate detection, skipping`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
const duplicateIssueNumber = extractDuplicateIssueNumber(lastDupeComment.body);
|
||||
if (!duplicateIssueNumber) {
|
||||
console.log(
|
||||
`[DEBUG] Issue #${issue.number} - could not extract duplicate issue number from comment, skipping`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
candidateCount++;
|
||||
const issueUrl = `https://github.com/${owner}/${repo}/issues/${issue.number}`;
|
||||
|
||||
try {
|
||||
console.log(
|
||||
`[INFO] Auto-closing issue #${issue.number} as duplicate of #${duplicateIssueNumber}: ${issueUrl}`
|
||||
);
|
||||
await closeIssueAsDuplicate(owner, repo, issue.number, duplicateIssueNumber, token);
|
||||
console.log(
|
||||
`[SUCCESS] Successfully closed issue #${issue.number} as duplicate of #${duplicateIssueNumber}`
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`[ERROR] Failed to close issue #${issue.number} as duplicate: ${error}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[DEBUG] Script completed. Processed ${processedCount} issues, found ${candidateCount} candidates for auto-close`
|
||||
);
|
||||
}
|
||||
|
||||
autoCloseDuplicates().catch(console.error);
|
||||
|
||||
// Make it a module
|
||||
export {};
|
||||
213
scripts/backfill-duplicate-comments.ts
Normal file
213
scripts/backfill-duplicate-comments.ts
Normal file
@@ -0,0 +1,213 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
declare global {
|
||||
var process: {
|
||||
env: Record<string, string | undefined>;
|
||||
};
|
||||
}
|
||||
|
||||
interface GitHubIssue {
|
||||
number: number;
|
||||
title: string;
|
||||
state: string;
|
||||
state_reason?: string;
|
||||
user: { id: number };
|
||||
created_at: string;
|
||||
closed_at?: string;
|
||||
}
|
||||
|
||||
interface GitHubComment {
|
||||
id: number;
|
||||
body: string;
|
||||
created_at: string;
|
||||
user: { type: string; id: number };
|
||||
}
|
||||
|
||||
async function githubRequest<T>(endpoint: string, token: string, method: string = 'GET', body?: any): Promise<T> {
|
||||
const response = await fetch(`https://api.github.com${endpoint}`, {
|
||||
method,
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
Accept: "application/vnd.github.v3+json",
|
||||
"User-Agent": "backfill-duplicate-comments-script",
|
||||
...(body && { "Content-Type": "application/json" }),
|
||||
},
|
||||
...(body && { body: JSON.stringify(body) }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(
|
||||
`GitHub API request failed: ${response.status} ${response.statusText}`
|
||||
);
|
||||
}
|
||||
|
||||
return response.json();
|
||||
}
|
||||
|
||||
async function triggerDedupeWorkflow(
|
||||
owner: string,
|
||||
repo: string,
|
||||
issueNumber: number,
|
||||
token: string,
|
||||
dryRun: boolean = true
|
||||
): Promise<void> {
|
||||
if (dryRun) {
|
||||
console.log(`[DRY RUN] Would trigger dedupe workflow for issue #${issueNumber}`);
|
||||
return;
|
||||
}
|
||||
|
||||
await githubRequest(
|
||||
`/repos/${owner}/${repo}/actions/workflows/claude-dedupe-issues.yml/dispatches`,
|
||||
token,
|
||||
'POST',
|
||||
{
|
||||
ref: 'main',
|
||||
inputs: {
|
||||
issue_number: issueNumber.toString()
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
async function backfillDuplicateComments(): Promise<void> {
|
||||
console.log("[DEBUG] Starting backfill duplicate comments script");
|
||||
|
||||
const token = process.env.GITHUB_TOKEN;
|
||||
if (!token) {
|
||||
throw new Error(`GITHUB_TOKEN environment variable is required
|
||||
|
||||
Usage:
|
||||
GITHUB_TOKEN=your_token bun run scripts/backfill-duplicate-comments.ts
|
||||
|
||||
Environment Variables:
|
||||
GITHUB_TOKEN - GitHub personal access token with repo and actions permissions (required)
|
||||
DRY_RUN - Set to "false" to actually trigger workflows (default: true for safety)
|
||||
MAX_ISSUE_NUMBER - Only process issues with numbers less than this value (default: 4050)`);
|
||||
}
|
||||
console.log("[DEBUG] GitHub token found");
|
||||
|
||||
const owner = "OrcaSlicer";
|
||||
const repo = "OrcaSlicer";
|
||||
const dryRun = process.env.DRY_RUN !== "false";
|
||||
const maxIssueNumber = parseInt(process.env.MAX_ISSUE_NUMBER || "11000", 10);
|
||||
const minIssueNumber = parseInt(process.env.MIN_ISSUE_NUMBER || "1", 10);
|
||||
|
||||
console.log(`[DEBUG] Repository: ${owner}/${repo}`);
|
||||
console.log(`[DEBUG] Dry run mode: ${dryRun}`);
|
||||
console.log(`[DEBUG] Looking at issues between #${minIssueNumber} and #${maxIssueNumber}`);
|
||||
|
||||
console.log(`[DEBUG] Fetching issues between #${minIssueNumber} and #${maxIssueNumber}...`);
|
||||
const allIssues: GitHubIssue[] = [];
|
||||
let page = 1;
|
||||
const perPage = 100;
|
||||
|
||||
while (true) {
|
||||
const pageIssues: GitHubIssue[] = await githubRequest(
|
||||
`/repos/${owner}/${repo}/issues?state=all&per_page=${perPage}&page=${page}&sort=created&direction=desc`,
|
||||
token
|
||||
);
|
||||
|
||||
if (pageIssues.length === 0) break;
|
||||
|
||||
// Filter to only include issues within the specified range
|
||||
const filteredIssues = pageIssues.filter(issue =>
|
||||
issue.number >= minIssueNumber && issue.number < maxIssueNumber
|
||||
);
|
||||
allIssues.push(...filteredIssues);
|
||||
|
||||
// If the oldest issue in this page is still above our minimum, we need to continue
|
||||
// but if the oldest issue is below our minimum, we can stop
|
||||
const oldestIssueInPage = pageIssues[pageIssues.length - 1];
|
||||
if (oldestIssueInPage && oldestIssueInPage.number >= maxIssueNumber) {
|
||||
console.log(`[DEBUG] Oldest issue in page #${page} is #${oldestIssueInPage.number}, continuing...`);
|
||||
} else if (oldestIssueInPage && oldestIssueInPage.number < minIssueNumber) {
|
||||
console.log(`[DEBUG] Oldest issue in page #${page} is #${oldestIssueInPage.number}, below minimum, stopping`);
|
||||
break;
|
||||
} else if (filteredIssues.length === 0 && pageIssues.length > 0) {
|
||||
console.log(`[DEBUG] No issues in page #${page} are in range #${minIssueNumber}-#${maxIssueNumber}, continuing...`);
|
||||
}
|
||||
|
||||
page++;
|
||||
|
||||
// Safety limit to avoid infinite loops
|
||||
if (page > 200) {
|
||||
console.log("[DEBUG] Reached page limit, stopping pagination");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`[DEBUG] Found ${allIssues.length} issues between #${minIssueNumber} and #${maxIssueNumber}`);
|
||||
|
||||
let processedCount = 0;
|
||||
let candidateCount = 0;
|
||||
let triggeredCount = 0;
|
||||
|
||||
for (const issue of allIssues) {
|
||||
processedCount++;
|
||||
console.log(
|
||||
`[DEBUG] Processing issue #${issue.number} (${processedCount}/${allIssues.length}): ${issue.title}`
|
||||
);
|
||||
|
||||
console.log(`[DEBUG] Fetching comments for issue #${issue.number}...`);
|
||||
const comments: GitHubComment[] = await githubRequest(
|
||||
`/repos/${owner}/${repo}/issues/${issue.number}/comments`,
|
||||
token
|
||||
);
|
||||
console.log(
|
||||
`[DEBUG] Issue #${issue.number} has ${comments.length} comments`
|
||||
);
|
||||
|
||||
// Look for existing duplicate detection comments (from the dedupe bot)
|
||||
const dupeDetectionComments = comments.filter(
|
||||
(comment) =>
|
||||
comment.body.includes("Found") &&
|
||||
comment.body.includes("possible duplicate") &&
|
||||
comment.user.type === "Bot"
|
||||
);
|
||||
|
||||
console.log(
|
||||
`[DEBUG] Issue #${issue.number} has ${dupeDetectionComments.length} duplicate detection comments`
|
||||
);
|
||||
|
||||
// Skip if there's already a duplicate detection comment
|
||||
if (dupeDetectionComments.length > 0) {
|
||||
console.log(
|
||||
`[DEBUG] Issue #${issue.number} already has duplicate detection comment, skipping`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
candidateCount++;
|
||||
const issueUrl = `https://github.com/${owner}/${repo}/issues/${issue.number}`;
|
||||
|
||||
try {
|
||||
console.log(
|
||||
`[INFO] ${dryRun ? '[DRY RUN] ' : ''}Triggering dedupe workflow for issue #${issue.number}: ${issueUrl}`
|
||||
);
|
||||
await triggerDedupeWorkflow(owner, repo, issue.number, token, dryRun);
|
||||
|
||||
if (!dryRun) {
|
||||
console.log(
|
||||
`[SUCCESS] Successfully triggered dedupe workflow for issue #${issue.number}`
|
||||
);
|
||||
}
|
||||
triggeredCount++;
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`[ERROR] Failed to trigger workflow for issue #${issue.number}: ${error}`
|
||||
);
|
||||
}
|
||||
|
||||
// Add a delay between workflow triggers to avoid overwhelming the system
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
}
|
||||
|
||||
console.log(
|
||||
`[DEBUG] Script completed. Processed ${processedCount} issues, found ${candidateCount} candidates without duplicate comments, ${dryRun ? 'would trigger' : 'triggered'} ${triggeredCount} workflows`
|
||||
);
|
||||
}
|
||||
|
||||
backfillDuplicateComments().catch(console.error);
|
||||
|
||||
// Make it a module
|
||||
export {};
|
||||
231
scripts/build_flatpak_with_docker.sh
Executable file
231
scripts/build_flatpak_with_docker.sh
Executable file
@@ -0,0 +1,231 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Build OrcaSlicer Flatpak locally using Docker with the same container image
|
||||
# as the CI (build_all.yml).
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/build_flatpak_with_docker.sh [--arch <x86_64|aarch64>] [--no-debug-info] [--pull]
|
||||
#
|
||||
# Requirements:
|
||||
# - Docker (or Podman with docker compatibility)
|
||||
#
|
||||
# The resulting .flatpak bundle is placed in the project root.
|
||||
|
||||
set -euo pipefail
|
||||
SECONDS=0
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
# ---------- defaults ----------
|
||||
ARCH="$(uname -m)"
|
||||
NO_DEBUG_INFO=false
|
||||
FORCE_PULL=false
|
||||
FORCE_CLEAN=true
|
||||
CONTAINER_IMAGE="ghcr.io/flathub-infra/flatpak-github-actions:gnome-49"
|
||||
|
||||
normalize_arch() {
|
||||
case "$1" in
|
||||
arm64|aarch64)
|
||||
echo "aarch64"
|
||||
;;
|
||||
x86_64|amd64)
|
||||
echo "x86_64"
|
||||
;;
|
||||
*)
|
||||
echo "$1"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# ---------- parse args ----------
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--arch)
|
||||
ARCH="$2"; shift 2 ;;
|
||||
--no-debug-info)
|
||||
NO_DEBUG_INFO=true; shift ;;
|
||||
--pull)
|
||||
FORCE_PULL=true; shift ;;
|
||||
--no-pull)
|
||||
FORCE_PULL=false; shift ;; # kept for backward compat (now default)
|
||||
--keep-build)
|
||||
FORCE_CLEAN=false; shift ;;
|
||||
--image)
|
||||
CONTAINER_IMAGE="$2"; shift 2 ;;
|
||||
-h|--help)
|
||||
echo "Usage: $0 [--arch <x86_64|aarch64>] [--no-debug-info] [--pull] [--keep-build] [--image 
|
||||
<caption>A model ready to be sliced on a buildplate.</caption>
|
||||
</screenshot>
|
||||
<screenshot>
|
||||

|
||||
<caption>A calibration test ready to be printed out.</caption>
|
||||
</screenshot>
|
||||
</screenshots>
|
||||
<description>
|
||||
<p>OrcaSlicer is a powerful, free and open-source 3D printer slicer with cutting-edge
|
||||
features for FDM printing. It supports a wide range of printers from manufacturers
|
||||
including Bambu Lab, Prusa, Voron, Creality, and many more.</p>
|
||||
<p>Key features include advanced calibration tools, adaptive layer heights, tree supports,
|
||||
multi-material support, and an intuitive interface for both beginners and experts.
|
||||
OrcaSlicer also provides built-in network printing capabilities for compatible printers.</p>
|
||||
</description>
|
||||
<branding>
|
||||
<color type="primary" scheme_preference="light">#009688</color>
|
||||
<color type="primary" scheme_preference="dark">#00695C</color>
|
||||
</branding>
|
||||
<releases>
|
||||
<release version="2.3.2" date="2025-03-23">
|
||||
<url type="details">https://github.com/OrcaSlicer/OrcaSlicer/releases/tag/v2.3.2</url>
|
||||
<description>
|
||||
<p>See the release page for detailed changelog.</p>
|
||||
</description>
|
||||
</release>
|
||||
</releases>
|
||||
</component>
|
||||
382
scripts/flatpak/com.orcaslicer.OrcaSlicer.yml
Normal file
382
scripts/flatpak/com.orcaslicer.OrcaSlicer.yml
Normal file
@@ -0,0 +1,382 @@
|
||||
app-id: com.orcaslicer.OrcaSlicer
|
||||
runtime: org.gnome.Platform
|
||||
runtime-version: "49"
|
||||
sdk: org.gnome.Sdk
|
||||
sdk-extensions:
|
||||
- org.freedesktop.Sdk.Extension.llvm21
|
||||
command: entrypoint
|
||||
separate-locales: true
|
||||
rename-icon: OrcaSlicer
|
||||
build-options:
|
||||
append-path: /usr/lib/sdk/llvm21/bin
|
||||
prepend-ld-library-path: /usr/lib/sdk/llvm21/lib
|
||||
env:
|
||||
CC: clang
|
||||
CXX: clang++
|
||||
LDFLAGS: "-fuse-ld=lld"
|
||||
finish-args:
|
||||
- --share=ipc
|
||||
- --socket=x11
|
||||
- --socket=wayland
|
||||
- --share=network
|
||||
- --device=all
|
||||
- --filesystem=home
|
||||
- --filesystem=xdg-run/gvfs
|
||||
- --filesystem=/run/media
|
||||
- --filesystem=/media
|
||||
- --filesystem=/run/spnav.sock:ro
|
||||
# Allow read-only access to OrcaSlicer's legacy config and cache directories (if they exist) for migration purposes.
|
||||
- --filesystem=~/.var/app/io.github.orcaslicer.OrcaSlicer:ro
|
||||
# Allow OrcaSlicer to own and talk to instance-check D-Bus names (InstanceCheck.cpp)
|
||||
- --talk-name=com.orcaslicer.OrcaSlicer.InstanceCheck.*
|
||||
- --own-name=com.orcaslicer.OrcaSlicer.InstanceCheck.*
|
||||
- --system-talk-name=org.freedesktop.UDisks2
|
||||
- --env=SPNAV_SOCKET=/run/spnav.sock
|
||||
|
||||
modules:
|
||||
|
||||
# JPEG codec for the liveview
|
||||
- name: gst-plugins-good
|
||||
buildsystem: meson
|
||||
config-opts:
|
||||
- -Dauto_features=disabled
|
||||
- -Djpeg=enabled
|
||||
- -Ddoc=disabled
|
||||
- -Dexamples=disabled
|
||||
- -Dtests=disabled
|
||||
sources:
|
||||
- type: archive
|
||||
url: https://gstreamer.freedesktop.org/src/gst-plugins-good/gst-plugins-good-1.22.8.tar.xz
|
||||
sha256: e305b9f07f52743ca481da0a4e0c76c35efd60adaf1b0694eb3bb021e2137e39
|
||||
|
||||
- name: glu
|
||||
build-options:
|
||||
cxxflags: -Wno-register
|
||||
config-opts:
|
||||
- --disable-static
|
||||
sources:
|
||||
- type: archive
|
||||
url: https://ftp.osuosl.org/pub/blfs/conglomeration/glu/glu-9.0.2.tar.xz
|
||||
sha256: 6e7280ff585c6a1d9dfcdf2fca489251634b3377bfc33c29e4002466a38d02d4
|
||||
cleanup:
|
||||
- /include
|
||||
- /lib/*.a
|
||||
- /lib/*.la
|
||||
- /lib/pkgconfig
|
||||
|
||||
- name: kde-extra-cmake-modules
|
||||
buildsystem: cmake-ninja
|
||||
sources:
|
||||
- type: git
|
||||
url: https://github.com/KDE/extra-cmake-modules
|
||||
tag: v5.249.0
|
||||
commit: 008ae77d0cd2a97c346228ab30b99279643e5022
|
||||
cleanup:
|
||||
- /
|
||||
|
||||
- name: libspnav
|
||||
sources:
|
||||
- type: archive
|
||||
url: https://github.com/FreeSpacenav/libspnav/releases/download/v1.2/libspnav-1.2.tar.gz
|
||||
sha256: 093747e7e03b232e08ff77f1ad7f48552c06ac5236316a5012db4269951c39db
|
||||
|
||||
# wxWidgets built as a separate module for Flathub (no network at build time)
|
||||
# Config-opts mirror deps/wxWidgets/wxWidgets.cmake with FLATPAK=ON, DEP_WX_GTK3=ON
|
||||
- name: wxWidgets
|
||||
buildsystem: cmake-ninja
|
||||
build-options:
|
||||
env:
|
||||
CMAKE_POLICY_VERSION_MINIMUM: "3.5"
|
||||
config-opts:
|
||||
- -DCMAKE_BUILD_TYPE=Release
|
||||
- -DwxBUILD_PRECOMP=ON
|
||||
- -DwxBUILD_TOOLKIT=gtk3
|
||||
- -DCMAKE_DEBUG_POSTFIX:STRING=d
|
||||
- -DwxBUILD_DEBUG_LEVEL=0
|
||||
- -DwxBUILD_SAMPLES=OFF
|
||||
- -DwxBUILD_SHARED=ON
|
||||
- -DBUILD_SHARED_LIBS=ON
|
||||
- -DwxUSE_MEDIACTRL=ON
|
||||
- -DwxUSE_DETECT_SM=OFF
|
||||
- -DwxUSE_PRIVATE_FONTS=ON
|
||||
- -DwxUSE_OPENGL=ON
|
||||
- -DwxUSE_GLCANVAS_EGL=ON
|
||||
- -DwxUSE_WEBREQUEST=ON
|
||||
- -DwxUSE_WEBVIEW=ON
|
||||
- -DwxUSE_WEBVIEW_EDGE=OFF
|
||||
- -DwxUSE_WEBVIEW_IE=OFF
|
||||
- -DwxUSE_REGEX=builtin
|
||||
- -DwxUSE_LIBSDL=OFF
|
||||
- -DwxUSE_XTEST=OFF
|
||||
- -DwxUSE_STC=OFF
|
||||
- -DwxUSE_AUI=ON
|
||||
- -DwxUSE_LIBPNG=sys
|
||||
- -DwxUSE_ZLIB=sys
|
||||
- -DwxUSE_LIBJPEG=sys
|
||||
- -DwxUSE_LIBTIFF=OFF
|
||||
- -DwxUSE_EXPAT=sys
|
||||
- -DCMAKE_EXE_LINKER_FLAGS=-fuse-ld=lld
|
||||
- -DCMAKE_SHARED_LINKER_FLAGS=-fuse-ld=lld
|
||||
- -DCMAKE_MODULE_LINKER_FLAGS=-fuse-ld=lld
|
||||
sources:
|
||||
# Use git instead of archive: wxWidgets 3.3 relies on multiple git
|
||||
# submodules (PCRE2, etc.) that are not included in GitHub tarballs.
|
||||
- type: git
|
||||
url: https://github.com/SoftFever/Orca-deps-wxWidgets.git
|
||||
tag: orca-3.3.2
|
||||
commit: db1005db3dea2c37a46fb455a9a02e37aa360751
|
||||
|
||||
# OrcaSlicer C++ dependencies (built offline with pre-downloaded archives)
|
||||
- name: orca_deps
|
||||
buildsystem: simple
|
||||
build-options:
|
||||
env:
|
||||
BUILD_DIR: deps/build_flatpak
|
||||
build-commands:
|
||||
- |
|
||||
cmake -S deps -B $BUILD_DIR \
|
||||
-DFLATPAK=ON \
|
||||
-DDEP_DOWNLOAD_DIR=/run/build/orca_deps/external-packages \
|
||||
-DCMAKE_PREFIX_PATH=/app \
|
||||
-DDESTDIR=/app \
|
||||
-DCMAKE_INSTALL_PREFIX=/app \
|
||||
-DCMAKE_EXE_LINKER_FLAGS="-fuse-ld=lld" \
|
||||
-DCMAKE_SHARED_LINKER_FLAGS="-fuse-ld=lld" \
|
||||
-DCMAKE_MODULE_LINKER_FLAGS="-fuse-ld=lld"
|
||||
- cmake --build $BUILD_DIR --parallel
|
||||
- rm -rf /run/build/orca_deps/external-packages
|
||||
|
||||
cleanup:
|
||||
- /include
|
||||
- "*.a"
|
||||
- "*.la"
|
||||
|
||||
sources:
|
||||
# OrcaSlicer deps/ directory (avoids copying .git from worktree)
|
||||
- type: dir
|
||||
path: ../../deps
|
||||
dest: deps
|
||||
|
||||
# ---------------------------------------------------------------
|
||||
# Pre-downloaded dependency archives
|
||||
# These are placed in external-packages/<Name>/ so CMake's
|
||||
# ExternalProject_Add finds them and skips network downloads.
|
||||
# ---------------------------------------------------------------
|
||||
|
||||
# Boost 1.84.0
|
||||
- type: file
|
||||
url: https://github.com/boostorg/boost/releases/download/boost-1.84.0/boost-1.84.0.tar.gz
|
||||
sha256: 4d27e9efed0f6f152dc28db6430b9d3dfb40c0345da7342eaa5a987dde57bd95
|
||||
dest: external-packages/Boost
|
||||
|
||||
# TBB v2021.5.0
|
||||
- type: file
|
||||
url: https://github.com/oneapi-src/oneTBB/archive/refs/tags/v2021.5.0.zip
|
||||
sha256: 83ea786c964a384dd72534f9854b419716f412f9d43c0be88d41874763e7bb47
|
||||
dest: external-packages/TBB
|
||||
|
||||
# Cereal v1.3.0
|
||||
- type: file
|
||||
url: https://github.com/USCiLab/cereal/archive/refs/tags/v1.3.0.zip
|
||||
sha256: 71642cb54658e98c8f07a0f0d08bf9766f1c3771496936f6014169d3726d9657
|
||||
dest: external-packages/Cereal
|
||||
|
||||
# Qhull v8.0.2
|
||||
- type: file
|
||||
url: https://github.com/qhull/qhull/archive/v8.0.2.zip
|
||||
sha256: a378e9a39e718e289102c20d45632f873bfdc58a7a5f924246ea4b176e185f1e
|
||||
dest: external-packages/Qhull
|
||||
|
||||
# GLFW 3.4
|
||||
- type: file
|
||||
url: https://github.com/glfw/glfw/archive/refs/tags/3.4.zip
|
||||
sha256: a133ddc3d3c66143eba9035621db8e0bcf34dba1ee9514a9e23e96afd39fd57a
|
||||
dest: external-packages/GLFW
|
||||
|
||||
# OpenCSG 1.4.2
|
||||
- type: file
|
||||
url: https://github.com/floriankirsch/OpenCSG/archive/refs/tags/opencsg-1-4-2-release.zip
|
||||
sha256: 51afe0db79af8386e2027d56d685177135581e0ee82ade9d7f2caff8deab5ec5
|
||||
dest: external-packages/OpenCSG
|
||||
|
||||
# Blosc 1.17.0 (tamasmeszaros fork)
|
||||
- type: file
|
||||
url: https://github.com/tamasmeszaros/c-blosc/archive/refs/heads/v1.17.0_tm.zip
|
||||
sha256: dcb48bf43a672fa3de6a4b1de2c4c238709dad5893d1e097b8374ad84b1fc3b3
|
||||
dest: external-packages/Blosc
|
||||
|
||||
# OpenEXR v2.5.5
|
||||
- type: file
|
||||
url: https://github.com/AcademySoftwareFoundation/openexr/archive/refs/tags/v2.5.5.zip
|
||||
sha256: 0307a3d7e1fa1e77e9d84d7e9a8694583fbbbfd50bdc6884e2c96b8ef6b902de
|
||||
dest: external-packages/OpenEXR
|
||||
|
||||
# OpenVDB (custom fork)
|
||||
- type: file
|
||||
url: https://github.com/tamasmeszaros/openvdb/archive/a68fd58d0e2b85f01adeb8b13d7555183ab10aa5.zip
|
||||
sha256: f353e7b99bd0cbfc27ac9082de51acf32a8bc0b3e21ff9661ecca6f205ec1d81
|
||||
dest: external-packages/OpenVDB
|
||||
|
||||
# GMP 6.2.1
|
||||
- type: file
|
||||
url: https://github.com/SoftFever/OrcaSlicer_deps/releases/download/gmp-6.2.1/gmp-6.2.1.tar.bz2
|
||||
sha256: eae9326beb4158c386e39a356818031bd28f3124cf915f8c5b1dc4c7a36b4d7c
|
||||
dest: external-packages/GMP
|
||||
|
||||
# MPFR 4.2.2
|
||||
- type: file
|
||||
url: https://ftp.gnu.org/gnu/mpfr/mpfr-4.2.2.tar.bz2
|
||||
sha256: 9ad62c7dc910303cd384ff8f1f4767a655124980bb6d8650fe62c815a231bb7b
|
||||
dest: external-packages/MPFR
|
||||
|
||||
# CGAL 5.6.3
|
||||
- type: file
|
||||
url: https://github.com/CGAL/cgal/releases/download/v5.6.3/CGAL-5.6.3.zip
|
||||
sha256: 5d577acb4a9918ccb960491482da7a3838f8d363aff47e14d703f19fd84733d4
|
||||
dest: external-packages/CGAL
|
||||
|
||||
# NLopt v2.5.0
|
||||
- type: file
|
||||
url: https://github.com/stevengj/nlopt/archive/v2.5.0.tar.gz
|
||||
sha256: c6dd7a5701fff8ad5ebb45a3dc8e757e61d52658de3918e38bab233e7fd3b4ae
|
||||
dest: external-packages/NLopt
|
||||
|
||||
# libnoise 1.0
|
||||
- type: file
|
||||
url: https://github.com/SoftFever/Orca-deps-libnoise/archive/refs/tags/1.0.zip
|
||||
sha256: 96ffd6cc47898dd8147aab53d7d1b1911b507d9dbaecd5613ca2649468afd8b6
|
||||
dest: external-packages/libnoise
|
||||
|
||||
# Draco 1.5.7
|
||||
- type: file
|
||||
url: https://github.com/google/draco/archive/refs/tags/1.5.7.zip
|
||||
sha256: 27b72ba2d5ff3d0a9814ad40d4cb88f8dc89a35491c0866d952473f8f9416b77
|
||||
dest: external-packages/Draco
|
||||
|
||||
# OpenSSL 1.1.1w (GNOME SDK has 3.x; OrcaSlicer requires 1.1.x)
|
||||
- type: file
|
||||
url: https://github.com/openssl/openssl/archive/OpenSSL_1_1_1w.tar.gz
|
||||
sha256: 2130e8c2fb3b79d1086186f78e59e8bc8d1a6aedf17ab3907f4cb9ae20918c41
|
||||
dest: external-packages/OpenSSL
|
||||
|
||||
# CURL 7.75.0 (built from source to link against OpenSSL 1.1.x)
|
||||
- type: file
|
||||
url: https://github.com/curl/curl/archive/refs/tags/curl-7_75_0.zip
|
||||
sha256: a63ae025bb0a14f119e73250f2c923f4bf89aa93b8d4fafa4a9f5353a96a765a
|
||||
dest: external-packages/CURL
|
||||
|
||||
# OCCT (OpenCASCADE) V7_6_0
|
||||
- type: file
|
||||
url: https://github.com/Open-Cascade-SAS/OCCT/archive/refs/tags/V7_6_0.zip
|
||||
sha256: 28334f0e98f1b1629799783e9b4d21e05349d89e695809d7e6dfa45ea43e1dbc
|
||||
dest: external-packages/OCCT
|
||||
|
||||
# OpenCV 4.6.0
|
||||
- type: file
|
||||
url: https://github.com/opencv/opencv/archive/refs/tags/4.6.0.tar.gz
|
||||
sha256: 1ec1cba65f9f20fe5a41fda1586e01c70ea0c9a6d7b67c9e13edf0cfe2239277
|
||||
dest: external-packages/OpenCV
|
||||
|
||||
# ---------------------------------------------------------------
|
||||
# Fallback archives for deps normally provided by the GNOME SDK.
|
||||
# These are only used if find_package() fails to locate them.
|
||||
# ---------------------------------------------------------------
|
||||
|
||||
# ZLIB 1.2.13
|
||||
- type: file
|
||||
url: https://github.com/madler/zlib/archive/refs/tags/v1.2.13.zip
|
||||
sha256: c2856951bbf30e30861ace3765595d86ba13f2cf01279d901f6c62258c57f4ff
|
||||
dest: external-packages/ZLIB
|
||||
|
||||
# libpng 1.6.35
|
||||
- type: file
|
||||
url: https://github.com/glennrp/libpng/archive/refs/tags/v1.6.35.zip
|
||||
sha256: 3d22d46c566b1761a0e15ea397589b3a5f36ac09b7c785382e6470156c04247f
|
||||
dest: external-packages/PNG
|
||||
|
||||
# libjpeg-turbo 3.0.1
|
||||
- type: file
|
||||
url: https://github.com/libjpeg-turbo/libjpeg-turbo/archive/refs/tags/3.0.1.zip
|
||||
sha256: d6d99e693366bc03897677650e8b2dfa76b5d6c54e2c9e70c03f0af821b0a52f
|
||||
dest: external-packages/JPEG
|
||||
|
||||
# Freetype 2.12.1
|
||||
- type: file
|
||||
url: https://github.com/SoftFever/orca_deps/releases/download/freetype-2.12.1.tar.gz/freetype-2.12.1.tar.gz
|
||||
sha256: efe71fd4b8246f1b0b1b9bfca13cfff1c9ad85930340c27df469733bbb620938
|
||||
dest: external-packages/FREETYPE
|
||||
|
||||
- name: OrcaSlicer
|
||||
buildsystem: simple
|
||||
build-commands:
|
||||
- |
|
||||
cmake . -B build_flatpak \
|
||||
-DFLATPAK=ON \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DCMAKE_PREFIX_PATH=/app \
|
||||
-DCMAKE_INSTALL_PREFIX=/app \
|
||||
-DCMAKE_EXE_LINKER_FLAGS="-fuse-ld=lld" \
|
||||
-DCMAKE_SHARED_LINKER_FLAGS="-fuse-ld=lld" \
|
||||
-DCMAKE_MODULE_LINKER_FLAGS="-fuse-ld=lld"
|
||||
- cmake --build build_flatpak --target OrcaSlicer -j$FLATPAK_BUILDER_N_JOBS
|
||||
- ./scripts/run_gettext.sh
|
||||
- cmake --build build_flatpak --target install -j$FLATPAK_BUILDER_N_JOBS
|
||||
|
||||
cleanup:
|
||||
- /include
|
||||
|
||||
post-install:
|
||||
|
||||
- | # Desktop integration files
|
||||
install -Dm644 -t /app/share/icons/hicolor/scalable/apps/ resources/images/OrcaSlicer.svg
|
||||
install -Dm644 ${FLATPAK_ID}.metainfo.xml /app/share/metainfo/${FLATPAK_ID}.metainfo.xml
|
||||
desktop-file-edit --set-key=Exec --set-value="entrypoint %U" /app/share/applications/${FLATPAK_ID}.desktop
|
||||
install -Dm755 entrypoint /app/bin
|
||||
install -Dm755 umount /app/bin
|
||||
|
||||
- install -Dm644 LICENSE.txt /app/share/licenses/${FLATPAK_ID}/LICENSE.txt
|
||||
|
||||
- | # Install fonts into fontconfig-scanned directory so Pango finds them
|
||||
# before initialization (avoids ensure_faces crash from AddPrivateFont)
|
||||
install -Dm644 -t /app/share/fonts/OrcaSlicer/ resources/fonts/*.ttf
|
||||
fc-cache -f /app/share/fonts/OrcaSlicer/
|
||||
|
||||
sources:
|
||||
# OrcaSlicer source tree (specific dirs to avoid copying .git from worktree)
|
||||
- type: dir
|
||||
path: ../../cmake
|
||||
dest: cmake
|
||||
- type: dir
|
||||
path: ../../deps_src
|
||||
dest: deps_src
|
||||
- type: dir
|
||||
path: ../../resources
|
||||
dest: resources
|
||||
- type: dir
|
||||
path: ../../src
|
||||
dest: src
|
||||
- type: dir
|
||||
path: ../../localization
|
||||
dest: localization
|
||||
|
||||
- type: file
|
||||
path: ../../CMakeLists.txt
|
||||
- type: file
|
||||
path: ../../LICENSE.txt
|
||||
- type: file
|
||||
path: ../../version.inc
|
||||
- type: file
|
||||
path: ../run_gettext.sh
|
||||
dest: scripts
|
||||
|
||||
# AppData metainfo for GNOME Software & Co.
|
||||
- type: file
|
||||
path: com.orcaslicer.OrcaSlicer.metainfo.xml
|
||||
|
||||
# Startup script
|
||||
- type: file
|
||||
path: entrypoint
|
||||
|
||||
# umount wrapper used to redirect umount calls to UDisks2
|
||||
- type: file
|
||||
path: umount
|
||||
11
scripts/flatpak/entrypoint
Normal file
11
scripts/flatpak/entrypoint
Normal file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
# Work-around https://gitlab.gnome.org/GNOME/gnome-build-meta/-/issues/754
|
||||
grep -q org.freedesktop.Platform.GL.nvidia /.flatpak-info && export WEBKIT_DISABLE_DMABUF_RENDERER=1
|
||||
|
||||
# Work-around https://github.com/bambulab/BambuStudio/issues/3440
|
||||
# Use LC_NUMERIC instead of LC_ALL to prevent decimal separator issues
|
||||
# while preserving the user's language/locale for translations.
|
||||
export LC_NUMERIC=C
|
||||
|
||||
exec /app/bin/orca-slicer "$@"
|
||||
BIN
scripts/flatpak/images/1.png
Normal file
BIN
scripts/flatpak/images/1.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 280 KiB |
BIN
scripts/flatpak/images/2.png
Normal file
BIN
scripts/flatpak/images/2.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 976 KiB |
15
scripts/flatpak/setup_env_ubuntu24.04.sh
Executable file
15
scripts/flatpak/setup_env_ubuntu24.04.sh
Executable file
@@ -0,0 +1,15 @@
|
||||
#! /bin/bash
|
||||
|
||||
sudo apt update
|
||||
sudo apt install build-essential flatpak flatpak-builder gnome-software-plugin-flatpak -y
|
||||
flatpak remote-add --if-not-exists flathub https://flathub.org/repo/flathub.flatpakrepo
|
||||
flatpak install flathub org.gnome.Platform//48 org.gnome.Sdk//48
|
||||
|
||||
|
||||
##
|
||||
# in OrcaSlicer folder, run following command to build Orca
|
||||
# # First time build
|
||||
# flatpak-builder --state-dir=.flatpak-builder --keep-build-dirs --user --force-clean build-dir scripts/flatpak/com.orcaslicer.OrcaSlicer.yml
|
||||
|
||||
# # Subsequent builds (only rebuilding OrcaSlicer)
|
||||
# flatpak-builder --state-dir=.flatpak-builder --keep-build-dirs --user build-dir scripts/flatpak/com.orcaslicer.OrcaSlicer.yml --build-only=OrcaSlicer
|
||||
10
scripts/flatpak/umount
Executable file
10
scripts/flatpak/umount
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/usr/bin/env sh
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "No arguments supplied"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DEVICE=$(basename $(findmnt -oSOURCE -n "$@") )
|
||||
|
||||
exec /usr/bin/gdbus call -y -d org.freedesktop.UDisks2 -o /org/freedesktop/UDisks2/block_devices/$DEVICE -m org.freedesktop.UDisks2.Filesystem.Unmount "{'b': <'false'>}" 1> /dev/null
|
||||
|
||||
36
scripts/generate_linux_payload_manifest.py
Normal file
36
scripts/generate_linux_payload_manifest.py
Normal file
@@ -0,0 +1,36 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse, hashlib, json
|
||||
from pathlib import Path
|
||||
|
||||
def sha256(p: Path) -> str:
|
||||
h = hashlib.sha256()
|
||||
with p.open('rb') as f:
|
||||
for chunk in iter(lambda: f.read(1024 * 1024), b''):
|
||||
h.update(chunk)
|
||||
return h.hexdigest()
|
||||
|
||||
def main():
|
||||
ap = argparse.ArgumentParser()
|
||||
ap.add_argument('plugin_dir', type=Path)
|
||||
ap.add_argument('--abi-version', default='02.05.02.58')
|
||||
ap.add_argument('--out', type=Path, default=None)
|
||||
args = ap.parse_args()
|
||||
net = args.plugin_dir / 'libbambu_networking.so'
|
||||
src = args.plugin_dir / 'libBambuSource.so'
|
||||
if not net.exists() or not src.exists():
|
||||
raise SystemExit('missing linux payload files')
|
||||
files = [
|
||||
{'name': net.name, 'sha256': sha256(net), 'abi_version': args.abi_version},
|
||||
{'name': src.name, 'sha256': sha256(src)},
|
||||
]
|
||||
for extra_name in ('liblive555.so', 'libagora_rtc_sdk.so', 'libagora-fdkaac.so'):
|
||||
extra = args.plugin_dir / extra_name
|
||||
if extra.exists():
|
||||
files.append({'name': extra.name, 'sha256': sha256(extra)})
|
||||
manifest = {'files': files}
|
||||
out = args.out or (args.plugin_dir / 'linux_payload_manifest.json')
|
||||
out.write_text(json.dumps(manifest, indent=2) + '\n')
|
||||
print(out)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
161
scripts/generate_presets_vendors.py
Normal file
161
scripts/generate_presets_vendors.py
Normal file
@@ -0,0 +1,161 @@
|
||||
# helps manage the static list of vendor names in src/slic3r/GUI/CreatePresetsDialog.cpp
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
|
||||
|
||||
scripts_dir = Path(__file__).resolve().parent
|
||||
print(f'Scripts dir: {scripts_dir}')
|
||||
root_dir = scripts_dir.parent
|
||||
profiles_dir = root_dir / 'resources' / 'profiles'
|
||||
|
||||
printers: Dict[str, List[str]] = {}
|
||||
|
||||
# generates the printer vendor list
|
||||
print(f'Looking in {profiles_dir.resolve()}')
|
||||
for entry in profiles_dir.glob('*.json'):
|
||||
if entry.is_file():
|
||||
entry_info = json.loads(entry.read_text())
|
||||
vendor_name = entry_info.get('name', None)
|
||||
if vendor_name and vendor_name != 'Custom Printer':
|
||||
models = [machine.get('name', None) for machine in entry_info.get('machine_model_list', []) if machine.get('name', None)]
|
||||
if not models:
|
||||
continue
|
||||
printers[vendor_name] = models
|
||||
|
||||
vendor_names = [f'"{vendor_name}",' for vendor_name in sorted(printers.keys(), key=str.casefold)]
|
||||
vend_col_width = len(max(vendor_names, key=len))
|
||||
vendors_formatted = ' {' + '\n '.join(' '.join(f"{vendor_name:{vend_col_width}}" for vendor_name in vendor_names[i:i+5]) for i in range(0, len(vendor_names), 5)).rstrip()[:-1] + '}'
|
||||
print(vendors_formatted)
|
||||
|
||||
# generates the printer model map
|
||||
models_formatted = ' {'
|
||||
models_indent = len(models_formatted) + vend_col_width + 2
|
||||
for vendor_name in sorted(printers.keys(), key=str.casefold):
|
||||
vendor_formatted = f'"{vendor_name}",'
|
||||
models_formatted += f'{{{vendor_formatted:{vend_col_width}}{{'
|
||||
|
||||
model_names = printers[vendor_name]
|
||||
model_names_formatted = [f'"{model_name}",' for model_name in model_names]
|
||||
model_col_width = len(max(model_names_formatted, key=len))
|
||||
model_names_str = ('\n' + ' ' * models_indent).join(' '.join(f"{model_name:{model_col_width}}" for model_name in model_names_formatted[i:i+5]) for i in range(0, len(model_names), 5)).rstrip()[:-1] + '}'
|
||||
|
||||
models_formatted += model_names_str
|
||||
|
||||
models_formatted += '},\n '
|
||||
|
||||
models_formatted = models_formatted.rstrip()[:-1] + '}'
|
||||
print(models_formatted)
|
||||
|
||||
|
||||
# Generate Filament Vendors
|
||||
filament_vendors = [
|
||||
'3Dgenius',
|
||||
'3DJake',
|
||||
'3DXTECH',
|
||||
'3D BEST-Q',
|
||||
'3D Hero',
|
||||
'3D-Fuel',
|
||||
'Aceaddity',
|
||||
'AddNorth',
|
||||
'Amazon Basics',
|
||||
'AMOLEN',
|
||||
'Ankermake',
|
||||
'Anycubic',
|
||||
'Atomic',
|
||||
'AzureFilm',
|
||||
'BASF',
|
||||
'Bblife',
|
||||
'BCN3D',
|
||||
'Beyond Plastic',
|
||||
'California Filament',
|
||||
'Capricorn',
|
||||
'CC3D',
|
||||
'CERPRiSE',
|
||||
'colorFabb',
|
||||
'Comgrow',
|
||||
'Cookiecad',
|
||||
'Creality',
|
||||
'Das Filament',
|
||||
'DO3D',
|
||||
'DOW',
|
||||
'DREMC',
|
||||
'DSM',
|
||||
'Duramic',
|
||||
'ELEGOO',
|
||||
'Eryone',
|
||||
'Essentium',
|
||||
'eSUN',
|
||||
'Extrudr',
|
||||
'Fiberforce',
|
||||
'Fiberlogy',
|
||||
'FilaCube',
|
||||
'Filamentive',
|
||||
'FilamentOne',
|
||||
'Fillamentum',
|
||||
'Fil X',
|
||||
'FLASHFORGE',
|
||||
'Formfutura',
|
||||
'Francofil',
|
||||
'FusRock',
|
||||
'GEEETECH',
|
||||
'Giantarm',
|
||||
'Gizmo Dorks',
|
||||
'GreenGate3D',
|
||||
'HATCHBOX',
|
||||
'Hello3D',
|
||||
'IC3D',
|
||||
'IEMAI',
|
||||
'IIID Max',
|
||||
'INLAND',
|
||||
'iProspect',
|
||||
'iSANMATE',
|
||||
'Justmaker',
|
||||
'Keene Village Plastics',
|
||||
'Kexcelled',
|
||||
'LDO',
|
||||
'MakerBot',
|
||||
'MatterHackers',
|
||||
'MIKA3D',
|
||||
'NinjaTek',
|
||||
'Nobufil',
|
||||
'Novamaker',
|
||||
'OVERTURE',
|
||||
'OVVNYXE',
|
||||
'Polymaker',
|
||||
'Priline',
|
||||
'Printed Solid',
|
||||
'Protopasta',
|
||||
'Prusament',
|
||||
'Push Plastic',
|
||||
'R3D',
|
||||
'Re-pet3D',
|
||||
'Recreus',
|
||||
'Regen',
|
||||
'Sain SMART',
|
||||
'SliceWorx',
|
||||
'Snapmaker',
|
||||
'SnoLabs',
|
||||
'Spectrum',
|
||||
'SUNLU',
|
||||
'TTYT3D',
|
||||
'Tianse',
|
||||
'UltiMaker',
|
||||
'Valment',
|
||||
'Verbatim',
|
||||
'VO3D',
|
||||
'Voxelab',
|
||||
'VOXELPLA',
|
||||
'YOOPAI',
|
||||
'Yousu',
|
||||
'Ziro',
|
||||
'Zyltech',
|
||||
]
|
||||
|
||||
filament_vendors_formatted = [f'"{vendor_name}",' for vendor_name in filament_vendors]
|
||||
fil_col_width = len(max(filament_vendors_formatted, key=len))
|
||||
filaments_formatted = ' {'
|
||||
filament_indent = len(filaments_formatted)
|
||||
filaments_formatted += ('\n' + ' ' * filament_indent).join(' '.join(f'{vendor_name:{fil_col_width}}' for vendor_name in filament_vendors_formatted[i:i+5]) for i in range(0, len(filament_vendors), 5)).rstrip()[:-1] + '};'
|
||||
print(filaments_formatted)
|
||||
3
scripts/linux.d/README.md
Normal file
3
scripts/linux.d/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
Files in this directory are named for the **exact** output of `awk -F= '/^ID=/ {print $2}' /etc/os-release` for their respective distribution.
|
||||
|
||||
When `build_linux.sh` is executed, the respective file for the distribution will be sourced so the distribution specific instructions/logic are used.
|
||||
46
scripts/linux.d/arch
Normal file
46
scripts/linux.d/arch
Normal file
@@ -0,0 +1,46 @@
|
||||
#!/bin/bash
|
||||
# these are the Arch Linux specific build functions
|
||||
|
||||
# Additional Dev packages for OrcaSlicer
|
||||
export REQUIRED_DEV_PACKAGES=(
|
||||
cmake
|
||||
curl
|
||||
dbus
|
||||
eglexternalplatform
|
||||
extra-cmake-modules
|
||||
file
|
||||
gettext
|
||||
git
|
||||
glew
|
||||
gstreamer
|
||||
gstreamermm
|
||||
gtk3
|
||||
libmspack
|
||||
libsecret
|
||||
libspnav
|
||||
mesa
|
||||
ninja
|
||||
openssl
|
||||
texinfo
|
||||
wayland-protocols
|
||||
webkit2gtk
|
||||
wget
|
||||
)
|
||||
|
||||
if [[ -n "$UPDATE_LIB" ]]
|
||||
then
|
||||
echo -n -e "Updating linux ...\n"
|
||||
NEEDED_PKGS=()
|
||||
for PKG in "${REQUIRED_DEV_PACKAGES[@]}"; do
|
||||
pacman -Q "${PKG}" > /dev/null || NEEDED_PKGS+=("${PKG}")
|
||||
done
|
||||
|
||||
if [[ "${#NEEDED_PKGS[*]}" -gt 0 ]]; then
|
||||
sudo pacman -Syy --noconfirm "${NEEDED_PKGS[@]}"
|
||||
fi
|
||||
echo -e "done\n"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
export FOUND_GTK3_DEV
|
||||
FOUND_GTK3_DEV=$(pacman -Q gtk3)
|
||||
45
scripts/linux.d/cachyos
Normal file
45
scripts/linux.d/cachyos
Normal file
@@ -0,0 +1,45 @@
|
||||
#!/bin/bash
|
||||
# these are the CachyOS Linux specific build functions
|
||||
|
||||
# Additional Dev packages for OrcaSlicer
|
||||
export REQUIRED_DEV_PACKAGES=(
|
||||
cmake
|
||||
curl
|
||||
dbus
|
||||
eglexternalplatform
|
||||
extra-cmake-modules
|
||||
file
|
||||
gettext
|
||||
git
|
||||
glew
|
||||
gstreamer
|
||||
gtk3
|
||||
libmspack
|
||||
libsecret
|
||||
libspnav
|
||||
mesa
|
||||
ninja
|
||||
openssl
|
||||
texinfo
|
||||
wayland-protocols
|
||||
webkit2gtk
|
||||
wget
|
||||
)
|
||||
|
||||
if [[ -n "$UPDATE_LIB" ]]
|
||||
then
|
||||
echo -n -e "Updating linux ...\n"
|
||||
NEEDED_PKGS=()
|
||||
for PKG in "${REQUIRED_DEV_PACKAGES[@]}"; do
|
||||
pacman -Q "${PKG}" > /dev/null || NEEDED_PKGS+=("${PKG}")
|
||||
done
|
||||
|
||||
if [[ "${#NEEDED_PKGS[*]}" -gt 0 ]]; then
|
||||
sudo pacman -Syy --noconfirm "${NEEDED_PKGS[@]}"
|
||||
fi
|
||||
echo -e "done\n"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
export FOUND_GTK3_DEV
|
||||
FOUND_GTK3_DEV=$(pacman -Q gtk3)
|
||||
34
scripts/linux.d/clear-linux-os
Normal file
34
scripts/linux.d/clear-linux-os
Normal file
@@ -0,0 +1,34 @@
|
||||
#!/bin/bash
|
||||
# these are the Clear Linux specific build functions
|
||||
|
||||
# Additional bundles for OrcaSlicer
|
||||
export REQUIRED_BUNDLES=(
|
||||
c-basic
|
||||
dev-utils
|
||||
devpkg-curl
|
||||
devpkg-glew
|
||||
devpkg-glu
|
||||
devpkg-gstreamer
|
||||
devpkg-gtk3
|
||||
devpkg-libmspack
|
||||
devpkg-libsecret
|
||||
devpkg-openssl
|
||||
devpkg-webkitgtk
|
||||
file
|
||||
git
|
||||
lib-opengl
|
||||
perl-basic
|
||||
texinfo
|
||||
wget
|
||||
)
|
||||
|
||||
if [[ -n "$UPDATE_LIB" ]]
|
||||
then
|
||||
echo "Updating linux ..."
|
||||
echo swupd bundle-add -y "${REQUIRED_BUNDLES[@]}"
|
||||
echo -e "done\n"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
export FOUND_GTK3_DEV
|
||||
FOUND_GTK3_DEV=$(find /usr/lib64/libgtk-3.so 2>/dev/null || true)
|
||||
75
scripts/linux.d/debian
Normal file
75
scripts/linux.d/debian
Normal file
@@ -0,0 +1,75 @@
|
||||
#!/bin/bash
|
||||
|
||||
REQUIRED_DEV_PACKAGES=(
|
||||
autoconf
|
||||
build-essential
|
||||
cmake
|
||||
eglexternalplatform-dev
|
||||
extra-cmake-modules
|
||||
file
|
||||
g++
|
||||
gettext
|
||||
git
|
||||
libbz2-dev
|
||||
libcurl4-openssl-dev
|
||||
libdbus-1-dev
|
||||
libfuse2
|
||||
libgl1-mesa-dev
|
||||
libglew-dev
|
||||
libgstreamerd-3-dev
|
||||
libgtk-3-dev
|
||||
libmspack-dev
|
||||
libosmesa6-dev
|
||||
libsecret-1-dev
|
||||
libspnav-dev
|
||||
libssl-dev
|
||||
libtool
|
||||
libudev-dev
|
||||
libunwind-dev
|
||||
libx264-dev
|
||||
libxkbcommon-dev
|
||||
libavcodec-dev
|
||||
libavutil-dev
|
||||
libswscale-dev
|
||||
nasm
|
||||
nlohmann-json3-dev
|
||||
clang
|
||||
lld
|
||||
ninja-build
|
||||
texinfo
|
||||
wget
|
||||
yasm
|
||||
)
|
||||
|
||||
if [[ -n "$UPDATE_LIB" ]]
|
||||
then
|
||||
# shellcheck source=/dev/null
|
||||
source /etc/os-release
|
||||
if [ "${ID}" == "ubuntu" ] && [ -n "${VERSION_ID}" ]; then
|
||||
if dpkg --compare-versions "${VERSION_ID}" ge 22 && dpkg --compare-versions "${VERSION_ID}" lt 24 ;
|
||||
then
|
||||
REQUIRED_DEV_PACKAGES+=(curl libfuse-dev m4)
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n "$BUILD_DEBUG" ]]
|
||||
then
|
||||
REQUIRED_DEV_PACKAGES+=(libssl-dev libcurl4-openssl-dev)
|
||||
fi
|
||||
|
||||
if [ "$(apt show --quiet libwebkit2gtk-4.0-dev 2>/dev/null)" != "" ]
|
||||
then
|
||||
REQUIRED_DEV_PACKAGES+=(libwebkit2gtk-4.0-dev)
|
||||
else
|
||||
REQUIRED_DEV_PACKAGES+=(libwebkit2gtk-4.1-dev)
|
||||
fi
|
||||
|
||||
sudo apt update
|
||||
sudo apt install -y "${REQUIRED_DEV_PACKAGES[@]}"
|
||||
|
||||
echo -e "done\n"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
export FOUND_GTK3_DEV
|
||||
FOUND_GTK3_DEV=$(dpkg -l libgtk* | grep gtk-3-dev || echo '')
|
||||
50
scripts/linux.d/fedora
Normal file
50
scripts/linux.d/fedora
Normal file
@@ -0,0 +1,50 @@
|
||||
#!/bin/bash
|
||||
|
||||
REQUIRED_DEV_PACKAGES=(
|
||||
autoconf
|
||||
automake
|
||||
cmake
|
||||
dbus-devel
|
||||
eglexternalplatform-devel
|
||||
extra-cmake-modules
|
||||
file
|
||||
gcc
|
||||
gcc-c++
|
||||
gettext
|
||||
git
|
||||
gstreamer1-devel
|
||||
gstreamermm-devel
|
||||
gtk3-devel
|
||||
libmspack-devel
|
||||
libquadmath-devel
|
||||
libsecret-devel
|
||||
libspnav-devel
|
||||
libtool
|
||||
m4
|
||||
mesa-libGLU-devel
|
||||
ninja-build
|
||||
openssl-devel
|
||||
perl-FindBin
|
||||
texinfo
|
||||
wayland-protocols-devel
|
||||
webkit2gtk4.1-devel
|
||||
wget
|
||||
libcurl-devel
|
||||
)
|
||||
|
||||
if [[ -n "$UPDATE_LIB" ]]
|
||||
then
|
||||
NEEDED_PKGS=()
|
||||
for PKG in "${REQUIRED_DEV_PACKAGES[@]}"; do
|
||||
rpm -q "${PKG}" > /dev/null || NEEDED_PKGS+=("${PKG}")
|
||||
done
|
||||
|
||||
if [[ "${#NEEDED_PKGS[*]}" -gt 0 ]]; then
|
||||
sudo dnf install -y "${NEEDED_PKGS[@]}"
|
||||
fi
|
||||
echo -e "done\n"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
export FOUND_GTK3_DEV
|
||||
FOUND_GTK3_DEV=$(rpm -qa | grep -P '^gtk3-devel' || true)
|
||||
75
scripts/linux.d/gentoo
Normal file
75
scripts/linux.d/gentoo
Normal file
@@ -0,0 +1,75 @@
|
||||
#!/bin/bash
|
||||
|
||||
if ! command -v qlist > /dev/null 2>&1; then
|
||||
echo "app-portage/portage-utils is required but not installed. Installing..."
|
||||
sudo emerge --ask --verbose app-portage/portage-utils
|
||||
fi
|
||||
|
||||
REQUIRED_DEV_PACKAGES=(
|
||||
app-crypt/libsecret
|
||||
dev-build/autoconf
|
||||
dev-build/cmake
|
||||
dev-build/libtool
|
||||
dev-build/ninja
|
||||
dev-cpp/gstreamermm
|
||||
dev-libs/libmspack
|
||||
dev-libs/libspnav
|
||||
dev-libs/openssl
|
||||
dev-vcs/git
|
||||
gui-libs/eglexternalplatform
|
||||
kde-frameworks/extra-cmake-modules
|
||||
media-libs/glew
|
||||
media-libs/gst-plugins-base:1.0
|
||||
media-libs/gstreamer:1.0
|
||||
net-misc/curl
|
||||
net-misc/wget
|
||||
sys-apps/dbus
|
||||
sys-apps/file
|
||||
sys-apps/texinfo
|
||||
sys-devel/gcc
|
||||
sys-devel/gettext
|
||||
sys-devel/m4
|
||||
virtual/libudev
|
||||
x11-libs/gtk+:3
|
||||
)
|
||||
|
||||
if [[ -n "$UPDATE_LIB" ]]
|
||||
then
|
||||
echo -e "Updating Gentoo ...\n"
|
||||
|
||||
# Check which version of webkit-gtk is available/preferred
|
||||
if qlist -I net-libs/webkit-gtk:4 > /dev/null 2>&1; then
|
||||
REQUIRED_DEV_PACKAGES+=(net-libs/webkit-gtk:4)
|
||||
elif qlist -I net-libs/webkit-gtk:4.1 > /dev/null 2>&1; then
|
||||
REQUIRED_DEV_PACKAGES+=(net-libs/webkit-gtk:4.1)
|
||||
else
|
||||
# Default to 4.1 if neither is installed
|
||||
REQUIRED_DEV_PACKAGES+=(net-libs/webkit-gtk:4.1)
|
||||
fi
|
||||
|
||||
if [[ -n "$BUILD_DEBUG" ]]
|
||||
then
|
||||
REQUIRED_DEV_PACKAGES+=(dev-libs/openssl net-misc/curl)
|
||||
fi
|
||||
|
||||
# Filter out packages that are already installed
|
||||
packages_to_install=()
|
||||
for pkg in "${REQUIRED_DEV_PACKAGES[@]}"; do
|
||||
if ! qlist -I "$pkg" > /dev/null 2>&1; then
|
||||
packages_to_install+=("$pkg")
|
||||
fi
|
||||
done
|
||||
|
||||
# Install them if there are any to install
|
||||
if [ ${#packages_to_install[@]} -gt 0 ]; then
|
||||
sudo emerge --ask --verbose --noreplace "${packages_to_install[@]}"
|
||||
else
|
||||
echo "All required packages are already installed."
|
||||
fi
|
||||
|
||||
echo -e "done\n"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
export FOUND_GTK3_DEV
|
||||
FOUND_GTK3_DEV=$(qlist -I x11-libs/gtk+:3 2>/dev/null || find /usr/lib64/libgtk-3.so 2>/dev/null || true)
|
||||
49
scripts/linux.d/suse
Normal file
49
scripts/linux.d/suse
Normal file
@@ -0,0 +1,49 @@
|
||||
#!/bin/bash
|
||||
|
||||
REQUIRED_DEV_PACKAGES=(
|
||||
autoconf
|
||||
automake
|
||||
cmake
|
||||
dbus-1-devel
|
||||
eglexternalplatform-devel
|
||||
extra-cmake-modules
|
||||
file
|
||||
gcc
|
||||
gcc-c++
|
||||
gettext
|
||||
git
|
||||
gstreamer-devel
|
||||
gtk3-devel
|
||||
libmspack-devel
|
||||
libquadmath-devel
|
||||
libsecret-devel
|
||||
libspnav-devel
|
||||
libtool
|
||||
m4
|
||||
glu-devel
|
||||
ninja-build
|
||||
openssl-devel
|
||||
perl-FindBin-Real
|
||||
texinfo
|
||||
wayland-protocols-devel
|
||||
webkit2gtk4-devel
|
||||
wget
|
||||
libcurl-devel
|
||||
)
|
||||
|
||||
if [[ -n "$UPDATE_LIB" ]]
|
||||
then
|
||||
NEEDED_PKGS=()
|
||||
for PKG in "${REQUIRED_DEV_PACKAGES[@]}"; do
|
||||
rpm -q "${PKG}" > /dev/null || NEEDED_PKGS+=("${PKG}")
|
||||
done
|
||||
|
||||
if [[ "${#NEEDED_PKGS[*]}" -gt 0 ]]; then
|
||||
sudo zypper install -y "${NEEDED_PKGS[@]}"
|
||||
fi
|
||||
echo -e "done\n"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
export FOUND_GTK3_DEV
|
||||
FOUND_GTK3_DEV=$(rpm -qa | grep -P '^gtk3-devel' || true)
|
||||
582
scripts/optimize_cover_images.py
Normal file
582
scripts/optimize_cover_images.py
Normal file
@@ -0,0 +1,582 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Optimize cover images:
|
||||
1. Scale the image to maintain proper margins around the content.
|
||||
2. Reduce the image size using pngquant.
|
||||
3. Resize the image to fit within the maximum allowed dimensions.
|
||||
|
||||
To run the script:
|
||||
python3 optimize_cover_images.py --optimize
|
||||
|
||||
This script searches for *_cover.png images in ./resources/profiles/
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from PIL import Image, ImageChops
|
||||
import argparse
|
||||
|
||||
|
||||
def get_file_size(path):
|
||||
"""Get file size in bytes."""
|
||||
return os.path.getsize(path)
|
||||
|
||||
|
||||
def format_size(size_bytes):
|
||||
"""Format file size in human-readable format."""
|
||||
for unit in ['B', 'KB', 'MB']:
|
||||
if size_bytes < 1024.0:
|
||||
return f"{size_bytes:.1f} {unit}"
|
||||
size_bytes /= 1024.0
|
||||
return f"{size_bytes:.1f} GB"
|
||||
|
||||
|
||||
def check_pngquant_available():
|
||||
"""Check if pngquant is available in the system."""
|
||||
return shutil.which('pngquant') is not None
|
||||
|
||||
|
||||
def optimize_png_with_pngquant(img_path, quality_range="65-80"):
|
||||
"""
|
||||
Optimize PNG using pngquant for better compression.
|
||||
|
||||
Args:
|
||||
img_path: Path to PNG file
|
||||
quality_range: Quality range for pngquant (e.g., "65-80")
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
# pngquant --quality 65-80 --force --ext .png image.png
|
||||
result = subprocess.run(
|
||||
['pngquant', '--quality', quality_range,
|
||||
'--force', '--ext', '.png', str(img_path)],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=30
|
||||
)
|
||||
return result.returncode == 0
|
||||
except Exception as e:
|
||||
print(f" Warning: pngquant failed: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def optimize_png_pillow(img, output_path, has_transparency=True):
|
||||
"""
|
||||
Optimize PNG using Pillow's best compression settings.
|
||||
|
||||
Args:
|
||||
img: PIL Image object
|
||||
output_path: Path to save optimized image
|
||||
has_transparency: Whether image has transparency
|
||||
"""
|
||||
# Use maximum compression
|
||||
# compress_level: 0-9, where 9 is maximum compression (slower but smaller)
|
||||
save_kwargs = {
|
||||
'format': 'PNG',
|
||||
'optimize': True,
|
||||
'compress_level': 9
|
||||
}
|
||||
|
||||
# For images with transparency, ensure we're saving as RGBA
|
||||
if has_transparency and img.mode != 'RGBA':
|
||||
img = img.convert('RGBA')
|
||||
|
||||
img.save(output_path, **save_kwargs)
|
||||
|
||||
|
||||
def get_image_bbox(img):
|
||||
"""
|
||||
Get the bounding box of non-transparent/non-white content in an image.
|
||||
|
||||
Args:
|
||||
img: PIL Image object
|
||||
|
||||
Returns:
|
||||
Tuple (left, top, right, bottom) or None if image is empty
|
||||
"""
|
||||
# Convert to RGBA if not already
|
||||
if img.mode != 'RGBA':
|
||||
img = img.convert('RGBA')
|
||||
|
||||
# Get the alpha channel
|
||||
alpha = img.split()[-1]
|
||||
|
||||
# Find bounding box of non-transparent pixels
|
||||
bbox = alpha.getbbox()
|
||||
|
||||
if bbox is None:
|
||||
# If all transparent, try to find non-white pixels in RGB
|
||||
if img.mode == 'RGBA':
|
||||
rgb = Image.new('RGB', img.size, (255, 255, 255))
|
||||
rgb.paste(img, mask=img.split()[-1])
|
||||
bg = Image.new('RGB', img.size, (255, 255, 255))
|
||||
diff = ImageChops.difference(rgb, bg)
|
||||
bbox = diff.getbbox()
|
||||
|
||||
return bbox
|
||||
|
||||
|
||||
def calculate_margins(bbox, img_size):
|
||||
"""
|
||||
Calculate the current margins as a percentage of image size.
|
||||
|
||||
Args:
|
||||
bbox: Tuple (left, top, right, bottom)
|
||||
img_size: Tuple (width, height)
|
||||
|
||||
Returns:
|
||||
Dict with margin percentages
|
||||
"""
|
||||
if bbox is None:
|
||||
return None
|
||||
|
||||
left, top, right, bottom = bbox
|
||||
width, height = img_size
|
||||
|
||||
content_width = right - left
|
||||
content_height = bottom - top
|
||||
|
||||
margin_left = left / width * 100
|
||||
margin_top = top / height * 100
|
||||
margin_right = (width - right) / width * 100
|
||||
margin_bottom = (height - bottom) / height * 100
|
||||
|
||||
content_width_pct = content_width / width * 100
|
||||
content_height_pct = content_height / height * 100
|
||||
|
||||
return {
|
||||
'left': margin_left,
|
||||
'top': margin_top,
|
||||
'right': margin_right,
|
||||
'bottom': margin_bottom,
|
||||
'content_width': content_width_pct,
|
||||
'content_height': content_height_pct
|
||||
}
|
||||
|
||||
|
||||
def adjust_image_margins(img_path, target_content_ratio=0.84, dry_run=False, use_pngquant=False, quality_range="65-80", max_size=None):
|
||||
"""
|
||||
Adjust image so content takes up target_content_ratio of the image size.
|
||||
|
||||
Args:
|
||||
img_path: Path to the image file
|
||||
target_content_ratio: Target ratio of content to image size (0.84 = 84%)
|
||||
dry_run: If True, don't save changes, just report
|
||||
use_pngquant: Use pngquant for additional compression
|
||||
quality_range: Quality range for pngquant
|
||||
max_size: Maximum dimension (width or height) in pixels, None to disable
|
||||
|
||||
Returns:
|
||||
Dict with adjustment info or None if not adjusted
|
||||
"""
|
||||
try:
|
||||
# Get original file size
|
||||
original_file_size = get_file_size(img_path)
|
||||
|
||||
img = Image.open(img_path)
|
||||
original_size = img.size
|
||||
original_mode = img.mode
|
||||
|
||||
# Convert to RGBA if the image has transparency
|
||||
has_transparency = original_mode in ('RGBA', 'LA') or (
|
||||
original_mode == 'P' and 'transparency' in img.info)
|
||||
if has_transparency and img.mode != 'RGBA':
|
||||
img = img.convert('RGBA')
|
||||
|
||||
# Resize if image is too large
|
||||
was_resized = False
|
||||
if max_size and (img.size[0] > max_size or img.size[1] > max_size):
|
||||
# Calculate new size maintaining aspect ratio
|
||||
aspect_ratio = img.size[0] / img.size[1]
|
||||
if img.size[0] > img.size[1]:
|
||||
new_width = max_size
|
||||
new_height = int(max_size / aspect_ratio)
|
||||
else:
|
||||
new_height = max_size
|
||||
new_width = int(max_size * aspect_ratio)
|
||||
|
||||
# Use high-quality resampling (LANCZOS for best quality)
|
||||
# Handle both old and new Pillow API
|
||||
try:
|
||||
resample = Image.Resampling.LANCZOS
|
||||
except AttributeError:
|
||||
resample = Image.LANCZOS
|
||||
|
||||
img = img.resize((new_width, new_height), resample)
|
||||
was_resized = True
|
||||
|
||||
# Get bounding box of actual content
|
||||
bbox = get_image_bbox(img)
|
||||
|
||||
if bbox is None:
|
||||
print(f" ⚠️ {img_path}: Image appears to be empty, skipping")
|
||||
return None
|
||||
|
||||
left, top, right, bottom = bbox
|
||||
content_width = right - left
|
||||
content_height = bottom - top
|
||||
|
||||
# Calculate current content ratio
|
||||
current_width_ratio = content_width / img.size[0]
|
||||
current_height_ratio = content_height / img.size[1]
|
||||
|
||||
# Calculate margins
|
||||
margins = calculate_margins(bbox, img.size)
|
||||
|
||||
print(f"\n📄 {img_path}")
|
||||
if was_resized:
|
||||
print(
|
||||
f" Original Size: {original_size[0]}x{original_size[1]} → Resized to {img.size[0]}x{img.size[1]}")
|
||||
print(
|
||||
f" Size: {img.size[0]}x{img.size[1]} (Mode: {original_mode}, Transparency: {has_transparency})")
|
||||
print(f" File: {format_size(original_file_size)}")
|
||||
print(f" Content: {content_width}x{content_height} " +
|
||||
f"({margins['content_width']:.1f}% x {margins['content_height']:.1f}%)")
|
||||
print(f" Margins: L:{margins['left']:.1f}% T:{margins['top']:.1f}% " +
|
||||
f"R:{margins['right']:.1f}% B:{margins['bottom']:.1f}%")
|
||||
|
||||
# Check if adjustment is needed (allow 5% tolerance)
|
||||
avg_ratio = (current_width_ratio + current_height_ratio) / 2
|
||||
tolerance = 0.05
|
||||
|
||||
if abs(avg_ratio - target_content_ratio) < tolerance:
|
||||
print(f" ✓ Already properly sized (avg ratio: {avg_ratio:.2f})")
|
||||
|
||||
# If image was resized, we still need to save it
|
||||
if was_resized and not dry_run:
|
||||
optimize_png_pillow(img, img_path, has_transparency)
|
||||
new_file_size = get_file_size(img_path)
|
||||
|
||||
if use_pngquant:
|
||||
print(f" 🔧 Applying pngquant optimization...")
|
||||
if optimize_png_with_pngquant(img_path, quality_range):
|
||||
pngquant_size = get_file_size(img_path)
|
||||
print(f" pngquant: {format_size(new_file_size)} → {format_size(pngquant_size)} " +
|
||||
f"({(pngquant_size/new_file_size-1)*100:+.1f}%)")
|
||||
new_file_size = pngquant_size
|
||||
|
||||
size_change_pct = (
|
||||
new_file_size / original_file_size - 1) * 100
|
||||
print(f" ✓ Saved (resized): {format_size(original_file_size)} → {format_size(new_file_size)} " +
|
||||
f"({size_change_pct:+.1f}%)")
|
||||
|
||||
return {
|
||||
'adjusted': True,
|
||||
'original_size': original_file_size,
|
||||
'new_size': new_file_size,
|
||||
'size_saved': original_file_size - new_file_size
|
||||
}
|
||||
|
||||
return None
|
||||
|
||||
# Crop to content
|
||||
cropped = img.crop(bbox)
|
||||
|
||||
# Calculate new image size to achieve target ratio while preserving aspect ratio
|
||||
# We want: content_size / new_image_size = target_ratio
|
||||
# So: new_image_size = content_size / target_ratio
|
||||
# But we need to maintain the original aspect ratio
|
||||
|
||||
original_aspect_ratio = img.size[0] / img.size[1]
|
||||
|
||||
# Calculate required sizes for each dimension
|
||||
required_width = content_width / target_content_ratio
|
||||
required_height = content_height / target_content_ratio
|
||||
|
||||
# Choose the larger requirement to ensure content fits within target ratio
|
||||
# Then adjust the other dimension to maintain aspect ratio
|
||||
if required_width / original_aspect_ratio > required_height:
|
||||
# Width is the limiting factor
|
||||
new_width = int(required_width)
|
||||
new_height = int(new_width / original_aspect_ratio)
|
||||
else:
|
||||
# Height is the limiting factor
|
||||
new_height = int(required_height)
|
||||
new_width = int(new_height * original_aspect_ratio)
|
||||
|
||||
# Create new image with transparent/white background
|
||||
if has_transparency:
|
||||
new_img = Image.new(
|
||||
'RGBA', (new_width, new_height), (255, 255, 255, 0))
|
||||
else:
|
||||
new_img = Image.new(
|
||||
'RGB', (new_width, new_height), (255, 255, 255))
|
||||
|
||||
# Calculate position to center the content
|
||||
paste_x = (new_width - content_width) // 2
|
||||
paste_y = (new_height - content_height) // 2
|
||||
|
||||
# Paste cropped content onto new image
|
||||
if has_transparency:
|
||||
new_img.paste(cropped, (paste_x, paste_y), cropped)
|
||||
else:
|
||||
new_img.paste(cropped, (paste_x, paste_y))
|
||||
|
||||
actual_content_ratio_w = content_width / new_width
|
||||
actual_content_ratio_h = content_height / new_height
|
||||
print(f" → Adjusting to {new_width}x{new_height} " +
|
||||
f"(aspect ratio: {original_aspect_ratio:.2f}, " +
|
||||
f"content: {actual_content_ratio_w*100:.1f}% x {actual_content_ratio_h*100:.1f}%)")
|
||||
|
||||
if not dry_run:
|
||||
# Save the adjusted image with optimization
|
||||
optimize_png_pillow(new_img, img_path, has_transparency)
|
||||
|
||||
# Get new file size after Pillow optimization
|
||||
new_file_size = get_file_size(img_path)
|
||||
|
||||
# Optionally use pngquant for additional compression
|
||||
if use_pngquant:
|
||||
print(f" 🔧 Applying pngquant optimization...")
|
||||
if optimize_png_with_pngquant(img_path, quality_range):
|
||||
pngquant_size = get_file_size(img_path)
|
||||
print(f" pngquant: {format_size(new_file_size)} → {format_size(pngquant_size)} " +
|
||||
f"({(pngquant_size/new_file_size-1)*100:+.1f}%)")
|
||||
new_file_size = pngquant_size
|
||||
|
||||
size_change_pct = (new_file_size / original_file_size - 1) * 100
|
||||
print(f" ✓ Saved: {format_size(original_file_size)} → {format_size(new_file_size)} " +
|
||||
f"({size_change_pct:+.1f}%)")
|
||||
|
||||
return {
|
||||
'adjusted': True,
|
||||
'original_size': original_file_size,
|
||||
'new_size': new_file_size,
|
||||
'size_saved': original_file_size - new_file_size
|
||||
}
|
||||
else:
|
||||
print(f" ⚠️ Dry run - not saved")
|
||||
return {
|
||||
'adjusted': False,
|
||||
'original_size': original_file_size,
|
||||
'new_size': original_file_size,
|
||||
'size_saved': 0
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
print(f" ❌ Error processing {img_path}: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return None
|
||||
|
||||
|
||||
def find_and_process_cover_images(base_path, target_ratio=0.84, dry_run=False, use_pngquant=False, quality_range="65-80", max_size=None):
|
||||
"""
|
||||
Find all *_cover.png images and process them.
|
||||
|
||||
Args:
|
||||
base_path: Base directory to search
|
||||
target_ratio: Target content to image ratio
|
||||
dry_run: If True, don't save changes
|
||||
use_pngquant: Use pngquant for additional compression
|
||||
quality_range: Quality range for pngquant
|
||||
max_size: Maximum dimension (width or height) in pixels
|
||||
|
||||
Returns:
|
||||
Dict with statistics
|
||||
"""
|
||||
base_path = Path(base_path)
|
||||
|
||||
if not base_path.exists():
|
||||
print(f"❌ Path does not exist: {base_path}")
|
||||
return {'total': 0, 'adjusted': 0, 'skipped': 0, 'errors': 0,
|
||||
'original_total_size': 0, 'new_total_size': 0, 'total_saved': 0}
|
||||
|
||||
# Find all *_cover.png files
|
||||
cover_images = list(base_path.rglob('*_cover.png'))
|
||||
|
||||
if not cover_images:
|
||||
print(f"⚠️ No *_cover.png files found in {base_path}")
|
||||
return {'total': 0, 'adjusted': 0, 'skipped': 0, 'errors': 0,
|
||||
'original_total_size': 0, 'new_total_size': 0, 'total_saved': 0}
|
||||
|
||||
print(f"🔍 Found {len(cover_images)} cover image(s) in {base_path}")
|
||||
|
||||
if use_pngquant:
|
||||
if check_pngquant_available():
|
||||
print(f"✓ pngquant is available and will be used")
|
||||
else:
|
||||
print(f"⚠️ pngquant not found in PATH, will use Pillow optimization only")
|
||||
print(
|
||||
f" Install: brew install pngquant (macOS) or apt install pngquant (Linux)")
|
||||
use_pngquant = False
|
||||
|
||||
stats = {
|
||||
'total': len(cover_images),
|
||||
'adjusted': 0,
|
||||
'skipped': 0,
|
||||
'errors': 0,
|
||||
'original_total_size': 0,
|
||||
'new_total_size': 0,
|
||||
'total_saved': 0
|
||||
}
|
||||
|
||||
for img_path in cover_images:
|
||||
try:
|
||||
result = adjust_image_margins(
|
||||
img_path, target_ratio, dry_run, use_pngquant, quality_range, max_size)
|
||||
if result is None:
|
||||
stats['errors'] += 1
|
||||
elif result.get('adjusted'):
|
||||
stats['adjusted'] += 1
|
||||
stats['original_total_size'] += result['original_size']
|
||||
stats['new_total_size'] += result['new_size']
|
||||
stats['total_saved'] += result['size_saved']
|
||||
else:
|
||||
stats['skipped'] += 1
|
||||
stats['original_total_size'] += result['original_size']
|
||||
stats['new_total_size'] += result['original_size']
|
||||
except Exception as e:
|
||||
print(f"❌ Error processing {img_path}: {e}")
|
||||
stats['errors'] += 1
|
||||
|
||||
return stats
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Optimize cover images: \n'
|
||||
'1. Scale the image to maintain proper margins around the content. \n'
|
||||
'2. Reduce the image size using pngquant. \n'
|
||||
'3. Resize the image to fit within the maximum allowed dimensions.',
|
||||
epilog='Examples:\n'
|
||||
' %(prog)s --dry-run\n'
|
||||
' %(prog)s --optimize\n'
|
||||
' %(prog)s --optimize --quality 70-85\n'
|
||||
' %(prog)s --vendor Custom\n'
|
||||
' %(prog)s --vendor Custom --optimize\n'
|
||||
' %(prog)s --max-size 200\n'
|
||||
' %(prog)s --no-resize\n'
|
||||
' %(prog)s --path ./custom/path --ratio 0.80\n'
|
||||
'\n'
|
||||
'Dependencies:\n'
|
||||
' Required: pip3 install Pillow\n'
|
||||
' Optional (for --optimize):\n'
|
||||
' macOS: brew install pngquant\n'
|
||||
' Linux: sudo apt install pngquant\n'
|
||||
' Arch: sudo pacman -S pngquant\n'
|
||||
' Windows: choco install pngquant or download from https://pngquant.org/',
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter
|
||||
)
|
||||
parser.add_argument(
|
||||
'--path',
|
||||
default='./resources/profiles',
|
||||
help='Base path to search for cover images (default: ./resources/profiles)'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--vendor',
|
||||
type=str,
|
||||
help='Process only a specific vendor subfolder (e.g., "Custom")'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--ratio',
|
||||
type=float,
|
||||
default=1,
|
||||
help='Target content to image ratio (default: 1 = 100%%)'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--dry-run',
|
||||
action='store_true',
|
||||
help='Preview changes without saving'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--optimize',
|
||||
action='store_true',
|
||||
help='Use pngquant for additional compression (must be installed)'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--quality',
|
||||
default='65-80',
|
||||
help='Quality range for pngquant (default: 65-80). Lower = smaller files'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--max-size',
|
||||
type=int,
|
||||
default=240,
|
||||
help='Maximum image dimension in pixels (default: 240). Images larger than this will be resized'
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no-resize',
|
||||
action='store_true',
|
||||
help='Disable automatic resizing of large images'
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
print("=" * 70)
|
||||
print("Cover Image Margin Adjuster & Optimizer")
|
||||
print("=" * 70)
|
||||
|
||||
if args.dry_run:
|
||||
print("⚠️ DRY RUN MODE - No changes will be saved\n")
|
||||
|
||||
# Determine the search path
|
||||
search_path = args.path
|
||||
if args.vendor:
|
||||
search_path = os.path.join(args.path, args.vendor)
|
||||
print(f"🎯 Processing vendor: {args.vendor}")
|
||||
print(f" Path: {search_path}")
|
||||
|
||||
# Check if vendor path exists
|
||||
if not os.path.exists(search_path):
|
||||
print(f"❌ Error: Vendor path does not exist: {search_path}")
|
||||
print(f"\nAvailable vendors in {args.path}:")
|
||||
try:
|
||||
vendors = [d for d in os.listdir(args.path)
|
||||
if os.path.isdir(os.path.join(args.path, d)) and not d.startswith('.')]
|
||||
for vendor in sorted(vendors):
|
||||
print(f" - {vendor}")
|
||||
except Exception:
|
||||
pass
|
||||
return 1
|
||||
print()
|
||||
|
||||
# Determine max size (None if --no-resize is specified)
|
||||
max_size = None if args.no_resize else args.max_size
|
||||
|
||||
if max_size:
|
||||
print(f"📏 Images will be resized to max {max_size}px if larger\n")
|
||||
|
||||
stats = find_and_process_cover_images(
|
||||
search_path,
|
||||
args.ratio,
|
||||
args.dry_run,
|
||||
args.optimize,
|
||||
args.quality,
|
||||
max_size
|
||||
)
|
||||
|
||||
print("\n" + "=" * 70)
|
||||
print("Summary:")
|
||||
print(f" Total images: {stats['total']}")
|
||||
print(f" Adjusted: {stats['adjusted']}")
|
||||
print(f" Already correct: {stats['skipped']}")
|
||||
print(f" Errors: {stats['errors']}")
|
||||
|
||||
if stats['adjusted'] > 0:
|
||||
print(f"\n File Size:")
|
||||
print(f" Original: {format_size(stats['original_total_size'])}")
|
||||
print(f" New: {format_size(stats['new_total_size'])}")
|
||||
if stats['total_saved'] > 0:
|
||||
saved_pct = (stats['total_saved'] /
|
||||
stats['original_total_size']) * 100
|
||||
print(
|
||||
f" Saved: {format_size(stats['total_saved'])} ({saved_pct:.1f}%)")
|
||||
elif stats['total_saved'] < 0:
|
||||
increased_pct = (-stats['total_saved'] /
|
||||
stats['original_total_size']) * 100
|
||||
print(
|
||||
f" Increased: {format_size(-stats['total_saved'])} (+{increased_pct:.1f}%)")
|
||||
|
||||
print("=" * 70)
|
||||
|
||||
return 0 if stats['errors'] == 0 else 1
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
481
scripts/orca_extra_profile_check.py
Normal file
481
scripts/orca_extra_profile_check.py
Normal file
@@ -0,0 +1,481 @@
|
||||
import os
|
||||
import json
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
OBSOLETE_KEYS = {
|
||||
"acceleration", "scale", "rotate", "duplicate", "duplicate_grid",
|
||||
"bed_size", "print_center", "g0", "wipe_tower_per_color_wipe",
|
||||
"support_sharp_tails", "support_remove_small_overhangs", "support_with_sheath",
|
||||
"tree_support_collision_resolution", "tree_support_with_infill",
|
||||
"max_volumetric_speed", "max_print_speed", "support_closing_radius",
|
||||
"remove_freq_sweep", "remove_bed_leveling", "remove_extrusion_calibration",
|
||||
"support_transition_line_width", "support_transition_speed", "bed_temperature",
|
||||
"bed_temperature_initial_layer", "can_switch_nozzle_type", "can_add_auxiliary_fan",
|
||||
"extra_flush_volume", "spaghetti_detector", "adaptive_layer_height",
|
||||
"z_hop_type", "z_lift_type", "bed_temperature_difference", "long_retraction_when_cut",
|
||||
"retraction_distance_when_cut", "extruder_type", "internal_bridge_support_thickness",
|
||||
"extruder_clearance_max_radius", "top_area_threshold", "reduce_wall_solid_infill",
|
||||
"filament_load_time", "filament_unload_time", "smooth_coefficient",
|
||||
"overhang_totally_speed", "silent_mode", "overhang_speed_classic"
|
||||
}
|
||||
|
||||
# Utility functions for printing messages in different colors.
|
||||
def print_error(msg):
|
||||
print(f"\033[91m[ERROR]\033[0m {msg}") # Red
|
||||
|
||||
def print_warning(msg):
|
||||
print(f"\033[93m[WARNING]\033[0m {msg}") # Yellow
|
||||
|
||||
def print_info(msg):
|
||||
print(f"\033[94m[INFO]\033[0m {msg}") # Blue
|
||||
|
||||
def print_success(msg):
|
||||
print(f"\033[92m[SUCCESS]\033[0m {msg}") # Green
|
||||
|
||||
|
||||
# Add helper function for duplicate key detection.
|
||||
def no_duplicates_object_pairs_hook(pairs):
|
||||
seen = {}
|
||||
for key, value in pairs:
|
||||
if key in seen:
|
||||
raise ValueError(f"Duplicate key detected: {key}")
|
||||
seen[key] = value
|
||||
return seen
|
||||
|
||||
# NOTE: currently Orca expects compatible_printers to be a defined in every instantiation profile, inheritation is not supported in Profile page
|
||||
def check_filament_compatible_printers(vendor_folder):
|
||||
"""
|
||||
Checks JSON files in the vendor folder for missing or empty 'compatible_printers'
|
||||
when 'instantiation' is flagged as true.
|
||||
|
||||
Parameters:
|
||||
vendor_folder (str or Path): The directory to search for JSON profile files.
|
||||
|
||||
Returns:
|
||||
int: The number of profiles with missing or empty 'compatible_printers'.
|
||||
"""
|
||||
error = 0
|
||||
vendor_path = Path(vendor_folder)
|
||||
if not vendor_path.exists():
|
||||
return 0
|
||||
|
||||
profiles = {}
|
||||
|
||||
# Use rglob to recursively find .json files.
|
||||
for file_path in vendor_path.rglob("*.json"):
|
||||
if file_path.name == 'filaments_color_codes.json': # Ignore non-profile file
|
||||
continue
|
||||
|
||||
try:
|
||||
with open(file_path, 'r', encoding='UTF-8') as fp:
|
||||
# Use custom hook to detect duplicates.
|
||||
data = json.load(fp, object_pairs_hook=no_duplicates_object_pairs_hook)
|
||||
except ValueError as ve:
|
||||
print_error(f"Duplicate key error in {file_path}: {ve}")
|
||||
error += 1
|
||||
continue
|
||||
except Exception as e:
|
||||
print_error(f"Error processing {file_path}: {e}")
|
||||
error += 1
|
||||
continue
|
||||
|
||||
profile_name = data['name']
|
||||
if profile_name in profiles:
|
||||
print_error(f"Duplicated profile {profile_name}: {file_path}")
|
||||
error += 1
|
||||
continue
|
||||
|
||||
profiles[profile_name] = {
|
||||
'file_path': file_path,
|
||||
'content': data,
|
||||
}
|
||||
|
||||
def get_property(profile, key):
|
||||
content = profile['content']
|
||||
if key in content:
|
||||
return content[key]
|
||||
return None
|
||||
|
||||
def get_inherit_property(profile, key):
|
||||
content = profile['content']
|
||||
if key in content:
|
||||
return content[key]
|
||||
|
||||
if 'inherits' in content:
|
||||
inherits = content['inherits']
|
||||
if inherits not in profiles:
|
||||
raise ValueError(f"Parent profile not found: {inherits}, referenced in {profile['file_path']}")
|
||||
|
||||
return get_inherit_property(profiles[inherits], key)
|
||||
|
||||
return None
|
||||
|
||||
for profile in profiles.values():
|
||||
instantiation = str(profile['content'].get("instantiation", "")).lower() == "true"
|
||||
if instantiation:
|
||||
try:
|
||||
compatible_printers = get_property(profile, "compatible_printers")
|
||||
if not compatible_printers or (isinstance(compatible_printers, list) and not compatible_printers):
|
||||
print_error(f"'compatible_printers' missing in {profile['file_path']}")
|
||||
error += 1
|
||||
except ValueError as ve:
|
||||
print_error(f"Unable to parse {profile['file_path']}: {ve}")
|
||||
error += 1
|
||||
continue
|
||||
|
||||
return error
|
||||
|
||||
def load_available_filament_profiles(profiles_dir, vendor_name):
|
||||
"""
|
||||
Load all available filament profiles from a vendor's directory.
|
||||
|
||||
Parameters:
|
||||
profiles_dir (Path): The directory containing vendor profile directories
|
||||
vendor_name (str): The name of the vendor directory
|
||||
|
||||
Returns:
|
||||
set: A set of filament profile names
|
||||
"""
|
||||
profiles = set()
|
||||
vendor_path = profiles_dir / vendor_name / "filament"
|
||||
|
||||
if not vendor_path.exists():
|
||||
return profiles
|
||||
|
||||
for file_path in vendor_path.rglob("*.json"):
|
||||
try:
|
||||
with open(file_path, 'r', encoding='UTF-8') as fp:
|
||||
data = json.load(fp)
|
||||
if "name" in data:
|
||||
profiles.add(data["name"])
|
||||
except Exception as e:
|
||||
print_error(f"Error loading filament profile {file_path}: {e}")
|
||||
|
||||
return profiles
|
||||
|
||||
def check_machine_default_materials(profiles_dir, vendor_name):
|
||||
"""
|
||||
Checks if default materials referenced in machine profiles exist in
|
||||
the vendor's filament library or in the global OrcaFilamentLibrary.
|
||||
|
||||
Parameters:
|
||||
profiles_dir (Path): The base profiles directory
|
||||
vendor_name (str): The vendor name to check
|
||||
|
||||
Returns:
|
||||
int: Number of missing filament references found
|
||||
int: the number of warnings found (0 or 1)
|
||||
"""
|
||||
error_count = 0
|
||||
machine_dir = profiles_dir / vendor_name / "machine"
|
||||
|
||||
if not machine_dir.exists():
|
||||
print_warning(f"No machine profiles found for vendor: {vendor_name}")
|
||||
return 0, 1
|
||||
|
||||
# Load available filament profiles
|
||||
vendor_filaments = load_available_filament_profiles(profiles_dir, vendor_name)
|
||||
global_filaments = load_available_filament_profiles(profiles_dir, "OrcaFilamentLibrary")
|
||||
all_available_filaments = vendor_filaments.union(global_filaments)
|
||||
|
||||
# Check each machine profile
|
||||
for file_path in machine_dir.rglob("*.json"):
|
||||
try:
|
||||
with open(file_path, 'r', encoding='UTF-8') as fp:
|
||||
data = json.load(fp)
|
||||
|
||||
default_materials = None
|
||||
if "default_materials" in data:
|
||||
default_materials = data["default_materials"]
|
||||
elif "default_filament_profile" in data:
|
||||
default_materials = data["default_filament_profile"]
|
||||
|
||||
if default_materials:
|
||||
if isinstance(default_materials, list):
|
||||
for material in default_materials:
|
||||
if material not in all_available_filaments:
|
||||
print_error(f"Missing filament profile: '{material}' referenced in {file_path.relative_to(profiles_dir)}")
|
||||
error_count += 1
|
||||
else:
|
||||
# Handle semicolon-separated list of materials in a string
|
||||
if ";" in default_materials:
|
||||
for material in default_materials.split(";"):
|
||||
material = material.strip()
|
||||
if material and material not in all_available_filaments:
|
||||
print_error(f"Missing filament profile: '{material}' referenced in {file_path.relative_to(profiles_dir)}")
|
||||
error_count += 1
|
||||
else:
|
||||
# Single material in a string
|
||||
if default_materials not in all_available_filaments:
|
||||
print_error(f"Missing filament profile: '{default_materials}' referenced in {file_path.relative_to(profiles_dir)}")
|
||||
error_count += 1
|
||||
|
||||
except Exception as e:
|
||||
print_error(f"Error processing machine profile {file_path}: {e}")
|
||||
error_count += 1
|
||||
|
||||
return error_count, 0
|
||||
|
||||
def check_name_consistency(profiles_dir, vendor_name):
|
||||
"""
|
||||
Make sure filament profile names match in both vendor json and subpath files.
|
||||
Filament profiles work only if the name in <vendor>.json matches the name in sub_path file,
|
||||
or if it's one of the sub_path file's `renamed_from`.
|
||||
|
||||
Parameters:
|
||||
profiles_dir (Path): Base profiles directory
|
||||
vendor_name (str): Vendor name
|
||||
|
||||
Returns:
|
||||
int: Number of errors found
|
||||
int: Number of warnings found (0 or 1)
|
||||
"""
|
||||
error_count = 0
|
||||
vendor_dir = profiles_dir / vendor_name
|
||||
vendor_file = profiles_dir / (vendor_name + ".json")
|
||||
|
||||
if not vendor_file.exists():
|
||||
print_warning(f"No profiles found for vendor: {vendor_name} at {vendor_file}")
|
||||
return 0, 1
|
||||
|
||||
try:
|
||||
with open(vendor_file, 'r', encoding='UTF-8') as fp:
|
||||
data = json.load(fp)
|
||||
except Exception as e:
|
||||
print_error(f"Error loading vendor profile {vendor_file}: {e}")
|
||||
return 1, 0
|
||||
|
||||
for section in ['filament_list', 'machine_model_list', 'machine_list', 'process_list']:
|
||||
if section not in data:
|
||||
continue
|
||||
|
||||
for child in data[section]:
|
||||
name_in_vendor = child['name']
|
||||
sub_path = child['sub_path']
|
||||
sub_file = vendor_dir / sub_path
|
||||
|
||||
if not sub_file.exists():
|
||||
print_error(f"Missing sub profile: '{sub_path}' declared in {vendor_file.relative_to(profiles_dir)}")
|
||||
error_count += 1
|
||||
continue
|
||||
|
||||
try:
|
||||
with open(sub_file, 'r', encoding='UTF-8') as fp:
|
||||
sub_data = json.load(fp)
|
||||
except Exception as e:
|
||||
print_error(f"Error loading profile {sub_file}: {e}")
|
||||
error_count += 1
|
||||
continue
|
||||
|
||||
name_in_sub = sub_data['name']
|
||||
|
||||
if name_in_sub == name_in_vendor:
|
||||
continue
|
||||
|
||||
# if 'renamed_from' in sub_data:
|
||||
# renamed_from = [n.strip() for n in sub_data['renamed_from'].split(';')]
|
||||
# if name_in_vendor in renamed_from:
|
||||
# continue
|
||||
|
||||
print_error(f"{section} name mismatch: required '{name_in_vendor}' in {vendor_file.relative_to(profiles_dir)} but found '{name_in_sub}' in {sub_file.relative_to(profiles_dir)}")
|
||||
error_count += 1
|
||||
|
||||
return error_count, 0
|
||||
|
||||
def check_filament_id(vendor, vendor_folder):
|
||||
"""
|
||||
Make sure filament_id is not longer than 8 characters, otherwise AMS won't work properly
|
||||
"""
|
||||
if vendor not in ('BBL', 'OrcaFilamentLibrary'):
|
||||
return 0
|
||||
|
||||
error = 0
|
||||
vendor_path = Path(vendor_folder)
|
||||
if not vendor_path.exists():
|
||||
return 0
|
||||
|
||||
# Use rglob to recursively find .json files.
|
||||
for file_path in vendor_path.rglob("*.json"):
|
||||
try:
|
||||
with open(file_path, 'r', encoding='UTF-8') as fp:
|
||||
# Use custom hook to detect duplicates.
|
||||
data = json.load(fp, object_pairs_hook=no_duplicates_object_pairs_hook)
|
||||
except ValueError as ve:
|
||||
print_error(f"Duplicate key error in {file_path}: {ve}")
|
||||
error += 1
|
||||
continue
|
||||
except Exception as e:
|
||||
print_error(f"Error processing {file_path}: {e}")
|
||||
error += 1
|
||||
continue
|
||||
|
||||
if 'filament_id' not in data:
|
||||
continue
|
||||
|
||||
filament_id = data['filament_id']
|
||||
|
||||
if len(filament_id) > 8:
|
||||
error += 1
|
||||
print_error(f"Filament id too long \"{filament_id}\": {file_path}")
|
||||
|
||||
return error
|
||||
|
||||
def check_obsolete_keys(profiles_dir, vendor_name):
|
||||
"""
|
||||
Check for obsolete keys in all filament profiles for a vendor.
|
||||
|
||||
Parameters:
|
||||
profiles_dir (Path): Base profiles directory
|
||||
vendor_name (str): Vendor name
|
||||
obsolete_keys (set): Set of obsolete key names to check
|
||||
|
||||
Returns:
|
||||
int: Number of obsolete keys found
|
||||
"""
|
||||
error_count = 0
|
||||
vendor_path = profiles_dir / vendor_name / "filament"
|
||||
|
||||
if not vendor_path.exists():
|
||||
return 0
|
||||
|
||||
for file_path in vendor_path.rglob("*.json"):
|
||||
try:
|
||||
with open(file_path, "r", encoding="UTF-8") as fp:
|
||||
data = json.load(fp)
|
||||
except Exception as e:
|
||||
print_warning(f"Error reading profile {file_path.relative_to(profiles_dir)}: {e}")
|
||||
error_count += 1
|
||||
continue
|
||||
|
||||
for key in data.keys():
|
||||
if key in OBSOLETE_KEYS:
|
||||
print_warning(f"Obsolete key: '{key}' found in {file_path.relative_to(profiles_dir)}")
|
||||
error_count += 1
|
||||
|
||||
return error_count
|
||||
|
||||
|
||||
CONFLICT_KEYS = [
|
||||
['extruder_clearance_radius', 'extruder_clearance_max_radius'],
|
||||
]
|
||||
|
||||
def check_conflict_keys(profiles_dir, vendor_name):
|
||||
"""
|
||||
Check for keys that could not be specified at the same time,
|
||||
due to option renaming & backward compatibility reasons.
|
||||
|
||||
For example, `extruder_clearance_max_radius` and `extruder_clearance_radius` cannot co-exist
|
||||
otherwise slicer won't know which one to use.
|
||||
|
||||
Parameters:
|
||||
profiles_dir (Path): Base profiles directory
|
||||
vendor_name (str): Vendor name
|
||||
|
||||
Returns:
|
||||
int: Number of errors found
|
||||
int: Number of warnings found
|
||||
"""
|
||||
error_count = 0
|
||||
warn_count = 0
|
||||
vendor_path = profiles_dir / vendor_name
|
||||
|
||||
if not vendor_path.exists():
|
||||
print_warning(f"No machine profiles found for vendor: {vendor_name}")
|
||||
return 0, 1
|
||||
|
||||
for file_path in vendor_path.rglob("*.json"):
|
||||
try:
|
||||
with open(file_path, 'r', encoding='UTF-8') as fp:
|
||||
# Use custom hook to detect duplicates.
|
||||
data = json.load(fp, object_pairs_hook=no_duplicates_object_pairs_hook)
|
||||
except ValueError as ve:
|
||||
print_error(f"Duplicate key error in {file_path.relative_to(profiles_dir)}: {ve}")
|
||||
error_count += 1
|
||||
continue
|
||||
except Exception as e:
|
||||
print_error(f"Error processing {file_path.relative_to(profiles_dir)}: {e}")
|
||||
error_count += 1
|
||||
continue
|
||||
|
||||
for key_sets in CONFLICT_KEYS:
|
||||
if sum([1 if k in data else 0 for k in key_sets]) > 1:
|
||||
print_error(f"Conflict keys {key_sets} co-exist in {file_path.relative_to(profiles_dir)}")
|
||||
error_count += 1
|
||||
|
||||
return error_count, warn_count
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Check 3D printer profiles for common issues",
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter
|
||||
)
|
||||
parser.add_argument("--vendor", type=str, help="Specify a single vendor to check")
|
||||
parser.add_argument("--check-filaments", action="store_true", help="Check 'compatible_printers' in filament profiles")
|
||||
parser.add_argument("--check-materials", action="store_true", help="Check default materials in machine profiles")
|
||||
parser.add_argument("--check-obsolete-keys", action="store_true", help="Warn if obsolete keys are found in filament profiles")
|
||||
args = parser.parse_args()
|
||||
|
||||
print_info("Checking profiles ...")
|
||||
|
||||
script_dir = Path(__file__).resolve().parent
|
||||
profiles_dir = script_dir.parent / "resources" / "profiles"
|
||||
checked_vendor_count = 0
|
||||
errors_found = 0
|
||||
warnings_found = 0
|
||||
|
||||
def run_checks(vendor_name):
|
||||
nonlocal errors_found, warnings_found, checked_vendor_count
|
||||
vendor_path = profiles_dir / vendor_name
|
||||
|
||||
if args.check_filaments or not (args.check_materials and not args.check_filaments):
|
||||
errors_found += check_filament_compatible_printers(vendor_path / "filament")
|
||||
|
||||
if args.check_materials:
|
||||
new_errors, new_warnings = check_machine_default_materials(profiles_dir, vendor_name)
|
||||
errors_found += new_errors
|
||||
warnings_found += new_warnings
|
||||
|
||||
if args.check_obsolete_keys:
|
||||
warnings_found += check_obsolete_keys(profiles_dir, vendor_name)
|
||||
|
||||
new_errors, new_warnings = check_name_consistency(profiles_dir, vendor_name)
|
||||
errors_found += new_errors
|
||||
warnings_found += new_warnings
|
||||
|
||||
new_errors, new_warnings = check_conflict_keys(profiles_dir, vendor_name)
|
||||
errors_found += new_errors
|
||||
warnings_found += new_warnings
|
||||
|
||||
errors_found += check_filament_id(vendor_name, vendor_path / "filament")
|
||||
checked_vendor_count += 1
|
||||
|
||||
if args.vendor:
|
||||
run_checks(args.vendor)
|
||||
else:
|
||||
for vendor_dir in profiles_dir.iterdir():
|
||||
if not vendor_dir.is_dir() or vendor_dir.name == "OrcaFilamentLibrary":
|
||||
continue
|
||||
run_checks(vendor_dir.name)
|
||||
|
||||
# ✨ Output finale in stile "compilatore"
|
||||
print("\n==================== SUMMARY ====================")
|
||||
print_info(f"Checked vendors : {checked_vendor_count}")
|
||||
if errors_found > 0:
|
||||
print_error(f"Files with errors : {errors_found}")
|
||||
else:
|
||||
print_success("Files with errors : 0")
|
||||
if warnings_found > 0:
|
||||
print_warning(f"Files with warnings : {warnings_found}")
|
||||
else:
|
||||
print_success("Files with warnings : 0")
|
||||
print("=================================================")
|
||||
if errors_found > 0 or warnings_found > 0 :
|
||||
print_warning('Issue(s) found, try `orca_filament_lib.py --fix` to fix common issues automatically')
|
||||
|
||||
exit(-1 if errors_found > 0 else 0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
307
scripts/orca_filament_lib.py
Normal file
307
scripts/orca_filament_lib.py
Normal file
@@ -0,0 +1,307 @@
|
||||
import os
|
||||
import json
|
||||
import argparse
|
||||
from collections import defaultdict
|
||||
|
||||
def create_ordered_profile(profile_dict, priority_fields=['name', 'type']):
|
||||
"""Create a new dictionary with priority fields first"""
|
||||
ordered_profile = {}
|
||||
|
||||
# Add priority fields first
|
||||
for field in priority_fields:
|
||||
if field in profile_dict:
|
||||
ordered_profile[field] = profile_dict[field]
|
||||
|
||||
# Add remaining fields
|
||||
for key, value in profile_dict.items():
|
||||
if key not in priority_fields:
|
||||
ordered_profile[key] = value
|
||||
|
||||
return ordered_profile
|
||||
|
||||
def topological_sort(filaments):
|
||||
# Build a graph of dependencies
|
||||
graph = defaultdict(list)
|
||||
in_degree = defaultdict(int)
|
||||
name_to_filament = {f['name']: f for f in filaments}
|
||||
all_names = set(name_to_filament.keys())
|
||||
|
||||
# Create the dependency graph
|
||||
processed_files = set()
|
||||
for filament in filaments:
|
||||
if 'inherits' in filament:
|
||||
parent = filament['inherits']
|
||||
child = filament['name']
|
||||
# Only create dependency if parent exists
|
||||
if parent in all_names:
|
||||
graph[parent].append(child)
|
||||
in_degree[child] += 1
|
||||
if parent not in in_degree:
|
||||
in_degree[parent] = 0
|
||||
processed_files.add(child)
|
||||
processed_files.add(parent)
|
||||
|
||||
# Initialize queue with nodes having no dependencies (now sorted)
|
||||
queue = sorted([name for name, degree in in_degree.items() if degree == 0])
|
||||
result = []
|
||||
|
||||
# Process the queue
|
||||
while queue:
|
||||
current = queue.pop(0)
|
||||
result.append(name_to_filament[current])
|
||||
processed_files.add(current)
|
||||
|
||||
# Process children (now sorted)
|
||||
children = sorted(graph[current])
|
||||
for child in children:
|
||||
in_degree[child] -= 1
|
||||
if in_degree[child] == 0:
|
||||
queue.append(child)
|
||||
|
||||
# Add remaining files that weren't part of inheritance tree (now sorted)
|
||||
remaining = sorted(all_names - processed_files)
|
||||
for name in remaining:
|
||||
result.append(name_to_filament[name])
|
||||
|
||||
return result
|
||||
|
||||
def update_profile_library(vendor="",profile_type="filament"):
|
||||
# change current working directory to the relative path(..\resources\profiles) compare to script location
|
||||
os.chdir(os.path.join(os.path.dirname(__file__), '..', 'resources', 'profiles'))
|
||||
|
||||
# Collect current profile entries
|
||||
if vendor:
|
||||
vendors = [vendor]
|
||||
else:
|
||||
profiles_dir = os.path.join(os.path.dirname(__file__), '..', 'resources', 'profiles')
|
||||
vendors = [f[:-5] for f in os.listdir(profiles_dir) if f.lower().endswith('.json')]
|
||||
for vendor in vendors:
|
||||
current_profiles = []
|
||||
base_dir = vendor
|
||||
# Orca expects machine_model to be in the machine folder
|
||||
if profile_type == 'machine_model':
|
||||
profile_dir = os.path.join(base_dir, 'machine')
|
||||
else:
|
||||
profile_dir = os.path.join(base_dir, profile_type)
|
||||
|
||||
for root, dirs, files in os.walk(profile_dir):
|
||||
for file in files:
|
||||
if file.lower().endswith('.json'):
|
||||
full_path = os.path.join(root, file)
|
||||
|
||||
# Get relative path from base directory
|
||||
sub_path = os.path.relpath(full_path, base_dir).replace('\\', '/')
|
||||
|
||||
try:
|
||||
with open(full_path, 'r', encoding='utf-8') as f:
|
||||
_profile = json.load(f)
|
||||
if _profile.get('type') != profile_type:
|
||||
continue
|
||||
name = _profile.get('name')
|
||||
inherits = _profile.get('inherits')
|
||||
|
||||
if name:
|
||||
entry = {
|
||||
"name": name,
|
||||
"sub_path": sub_path
|
||||
}
|
||||
if inherits:
|
||||
entry['inherits'] = inherits
|
||||
current_profiles.append(entry)
|
||||
else:
|
||||
print(f"Warning: Missing 'name' in {full_path}")
|
||||
except Exception as e:
|
||||
print(f"Error reading {full_path}: {str(e)}")
|
||||
continue
|
||||
|
||||
# Sort profiles based on inheritance
|
||||
sorted_profiles = topological_sort(current_profiles)
|
||||
|
||||
# Remove the inherits field as it's not needed in the final JSON
|
||||
for p in sorted_profiles:
|
||||
p.pop('inherits', None)
|
||||
|
||||
# Update library file
|
||||
lib_path = f'{vendor}.json'
|
||||
|
||||
profile_section = profile_type+'_list'
|
||||
|
||||
try:
|
||||
with open(lib_path, 'r+', encoding='utf-8') as f:
|
||||
library = json.load(f)
|
||||
library[profile_section] = sorted_profiles
|
||||
f.seek(0)
|
||||
json.dump(library, f, indent=4, ensure_ascii=False)
|
||||
f.truncate()
|
||||
|
||||
print(f"Profile library for {vendor} updated successfully!")
|
||||
except Exception as e:
|
||||
print(f"Error updating library file: {str(e)}")
|
||||
|
||||
def clean_up_profile(vendor="", profile_type="", force=False):
|
||||
# change current working directory to the relative path(..\resources\profiles) compare to script location
|
||||
os.chdir(os.path.join(os.path.dirname(__file__), '..', 'resources', 'profiles'))
|
||||
|
||||
# Collect current profile entries
|
||||
if vendor:
|
||||
vendors = [vendor]
|
||||
else:
|
||||
profiles_dir = os.path.join(os.path.dirname(__file__), '..', 'resources', 'profiles')
|
||||
vendors = [f[:-5] for f in os.listdir(profiles_dir) if f.lower().endswith('.json')]
|
||||
for vendor in vendors:
|
||||
current_profiles = []
|
||||
base_dir = vendor
|
||||
# Orca expects machine_model to be in the machine folder
|
||||
if profile_type == 'machine_model':
|
||||
profile_dir = os.path.join(base_dir, 'machine')
|
||||
else:
|
||||
profile_dir = os.path.join(base_dir, profile_type)
|
||||
|
||||
for root, dirs, files in os.walk(profile_dir):
|
||||
for file in files:
|
||||
if file.lower().endswith('.json'):
|
||||
if file == 'filaments_color_codes.json': # Ignore non-profile file
|
||||
continue
|
||||
|
||||
full_path = os.path.join(root, file)
|
||||
|
||||
# Get relative path from base directory
|
||||
sub_path = os.path.relpath(full_path, base_dir).replace('\\', '/')
|
||||
|
||||
try:
|
||||
with open(full_path, 'r+', encoding='utf-8') as f:
|
||||
_profile = json.load(f)
|
||||
need_update = False
|
||||
if not _profile.get('type') or _profile.get('type') == "":
|
||||
need_update = True
|
||||
name = _profile.get('name')
|
||||
inherits = _profile.get('inherits')
|
||||
if profile_type == "machine_model" or profile_type == "machine":
|
||||
if "nozzle" in name or "Nozzle" in name:
|
||||
_profile['type'] = "machine"
|
||||
else:
|
||||
_profile['type'] = "machine_model"
|
||||
else:
|
||||
_profile['type'] = profile_type
|
||||
print(f"Added type: {_profile['type']} to {file}")
|
||||
|
||||
fields_to_remove = ['version', 'is_custom_defined']
|
||||
for field in fields_to_remove:
|
||||
if _profile.get(field):
|
||||
# remove version field
|
||||
del _profile[field]
|
||||
print(f"Removed {field} field from {file}")
|
||||
need_update = True
|
||||
|
||||
# Handle `extruder_clearance_radius`.
|
||||
if 'extruder_clearance_radius' in _profile and 'extruder_clearance_max_radius' in _profile:
|
||||
# BBS renamed `extruder_clearance_radius` to `extruder_clearance_max_radius`
|
||||
# however some of their profiles have both options exists with different value, which
|
||||
# could cause very bad consequence such as toolhead collision.
|
||||
# Here we make sure only one of these options exist, and if both present, we keep
|
||||
# the one with greater value.
|
||||
need_update = True
|
||||
if float(_profile['extruder_clearance_max_radius']) > float(_profile['extruder_clearance_radius']):
|
||||
del _profile['extruder_clearance_radius']
|
||||
else:
|
||||
del _profile['extruder_clearance_max_radius']
|
||||
|
||||
# Convert filament fields to arrays if not already
|
||||
if profile_type == 'filament':
|
||||
fields_to_arrayify = ['filament_cost', 'filament_density', 'filament_type', "temperature_vitrification", "filament_max_volumetric_speed", "filament_vendor"]
|
||||
for field in fields_to_arrayify:
|
||||
if field in _profile and not isinstance(_profile[field], list):
|
||||
original_value = _profile[field]
|
||||
_profile[field] = [original_value]
|
||||
print(f"Converted {field} to array in {file}")
|
||||
need_update = True
|
||||
|
||||
# remove following fields from filament profile
|
||||
fields_to_remove = ['initial_layer_print_speed', 'outer_wall_speed', 'inner_wall_speed', 'infill_speed', 'top_surface_speed', 'travel_speed']
|
||||
for field in fields_to_remove:
|
||||
if field in _profile:
|
||||
del _profile[field]
|
||||
print(f"Removed {field} field from {file}")
|
||||
need_update = True
|
||||
|
||||
|
||||
if need_update or force:
|
||||
# write back to file
|
||||
f.seek(0)
|
||||
ordered_profile = create_ordered_profile(_profile, ['type', 'name', 'renamed_from', 'inherits', 'from', 'setting_id', 'filament_id', 'instantiation'])
|
||||
json.dump(ordered_profile, f, indent=4, ensure_ascii=False)
|
||||
f.truncate()
|
||||
print(f"Updated profile: {full_path}")
|
||||
except Exception as e:
|
||||
print(f"Error reading {full_path}: {str(e)}")
|
||||
continue
|
||||
|
||||
# For each JSON file, it will:
|
||||
# - Replace "BBL X1C" with "System" in the name field
|
||||
# - Empty the compatible_printers array
|
||||
# - Ensure setting_id starts with 'O'
|
||||
def rename_filament_system(vendor="OrcaFilamentLibrary"):
|
||||
# change current working directory to the relative path
|
||||
os.chdir(os.path.join(os.path.dirname(__file__), '..', 'resources', 'profiles'))
|
||||
|
||||
base_dir = vendor
|
||||
filament_dir = os.path.join(base_dir, 'filament')
|
||||
|
||||
for root, dirs, files in os.walk(filament_dir):
|
||||
for file in files:
|
||||
if file.lower().endswith('.json'):
|
||||
full_path = os.path.join(root, file)
|
||||
try:
|
||||
with open(full_path, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
modified = False
|
||||
|
||||
# Update name if it contains "BBL X1C"
|
||||
if 'name' in data and "BBL X1C" in data['name']:
|
||||
data['name'] = data['name'].replace("BBL X1C", "System")
|
||||
modified = True
|
||||
|
||||
# Empty compatible_printers if exists
|
||||
if 'compatible_printers' in data:
|
||||
data['compatible_printers'] = []
|
||||
modified = True
|
||||
|
||||
# Update setting_id if needed
|
||||
if 'setting_id' in data and not data['setting_id'].startswith('O'):
|
||||
data['setting_id'] = 'O' + data['setting_id']
|
||||
modified = True
|
||||
|
||||
if modified:
|
||||
with open(full_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(data, f, indent=4, ensure_ascii=False)
|
||||
print(f"Updated {full_path}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error processing {full_path}: {str(e)}")
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Update filament library for specified vendor')
|
||||
parser.add_argument('-v', '--vendor', type=str, default="",
|
||||
help='Vendor name (default: "" which means all vendors)')
|
||||
parser.add_argument('-u', '--update', action='store_true', help='update vendor.json')
|
||||
parser.add_argument('-p', '--profile_type', type=str, choices=['machine_model', 'process', 'filament', 'machine'], help='profile type (default: "" which means all types)')
|
||||
parser.add_argument('-f', '--fix', action='store_true', help='Fix errors like missing type field, and clean up the profile')
|
||||
parser.add_argument('--force', action='store_true', help='Force update the profile files, for --fix option')
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.fix:
|
||||
if(args.profile_type):
|
||||
clean_up_profile(args.vendor, args.profile_type, args.force)
|
||||
else:
|
||||
clean_up_profile(args.vendor, 'machine_model', args.force)
|
||||
clean_up_profile(args.vendor, 'process', args.force)
|
||||
clean_up_profile(args.vendor, 'filament', args.force)
|
||||
clean_up_profile(args.vendor, 'machine', args.force)
|
||||
|
||||
if args.update:
|
||||
update_profile_library(args.vendor, 'machine_model')
|
||||
update_profile_library(args.vendor, 'process')
|
||||
update_profile_library(args.vendor, 'filament')
|
||||
update_profile_library(args.vendor, 'machine')
|
||||
# else:
|
||||
# rename_filament_system(args.vendor)
|
||||
81
scripts/pack_profiles.sh
Executable file
81
scripts/pack_profiles.sh
Executable file
@@ -0,0 +1,81 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Check if required arguments are provided
|
||||
if [ "$#" -lt 3 ]; then
|
||||
echo "Usage: $0 VERSION NUMBER VENDOR1 [VENDOR2 ...]"
|
||||
echo "Example: $0 2.3.0 1 OrcaFilamentLibrary BBL"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get version and number from arguments
|
||||
VERSION="$1"
|
||||
NUMBER="$2"
|
||||
shift 2 # Remove first two arguments, leaving only vendor names
|
||||
|
||||
# Set paths
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
RESOURCES_DIR="$SCRIPT_DIR/../resources/profiles"
|
||||
ORIGINAL_DIR="$(pwd)"
|
||||
OUTPUT_FILE="orcaslicer-profiles_ota_${VERSION}.${NUMBER}.zip"
|
||||
TEMP_DIR="/tmp/orca_profiles_$$" # Use PID to make temp dir unique
|
||||
|
||||
# Check if resources directory exists
|
||||
if [ ! -d "$RESOURCES_DIR" ]; then
|
||||
echo "Error: Profiles directory not found at $RESOURCES_DIR"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Create temporary directory with profiles root folder
|
||||
mkdir -p "$TEMP_DIR/profiles"
|
||||
|
||||
# Process each vendor
|
||||
for VENDOR in "$@"; do
|
||||
echo "Processing vendor: $VENDOR"
|
||||
|
||||
# Copy JSON file if it exists
|
||||
if [ -f "$RESOURCES_DIR/$VENDOR.json" ]; then
|
||||
cp "$RESOURCES_DIR/$VENDOR.json" "$TEMP_DIR/profiles/"
|
||||
echo "Added $VENDOR.json"
|
||||
else
|
||||
echo "Warning: $VENDOR.json not found"
|
||||
fi
|
||||
|
||||
# Copy vendor directory if it exists
|
||||
if [ -d "$RESOURCES_DIR/$VENDOR" ]; then
|
||||
cp -r "$RESOURCES_DIR/$VENDOR" "$TEMP_DIR/profiles/"
|
||||
echo "Added $VENDOR directory"
|
||||
|
||||
# Remove excluded file types
|
||||
find "$TEMP_DIR/profiles/$VENDOR" -type f \( \
|
||||
-name "*.jpg" -o \
|
||||
-name "*.stl" -o \
|
||||
-name "*.svg" -o \
|
||||
-name "*.png" -o \
|
||||
-name "*.py" \
|
||||
\) -delete
|
||||
else
|
||||
echo "Warning: $VENDOR directory not found"
|
||||
fi
|
||||
done
|
||||
|
||||
# Create zip file
|
||||
pushd "$TEMP_DIR" || exit 1
|
||||
zip -r "$OUTPUT_FILE" profiles/
|
||||
|
||||
# Move zip file to original directory
|
||||
mv "$OUTPUT_FILE" "$ORIGINAL_DIR/"
|
||||
|
||||
# Return to original directory
|
||||
popd || exit 1
|
||||
|
||||
# Clean up
|
||||
rm -rf "$TEMP_DIR"
|
||||
|
||||
# Print results
|
||||
if [ -f "$OUTPUT_FILE" ]; then
|
||||
echo "Created profiles package: $OUTPUT_FILE"
|
||||
echo "Size: $(du -h "$OUTPUT_FILE" | cut -f1)"
|
||||
else
|
||||
echo "Error: Failed to create zip file"
|
||||
exit 1
|
||||
fi
|
||||
41
scripts/run_gettext.bat
Normal file
41
scripts/run_gettext.bat
Normal file
@@ -0,0 +1,41 @@
|
||||
@echo off
|
||||
setlocal EnableExtensions
|
||||
REM OrcaSlicer gettext
|
||||
REM Created by SoftFever on 27/5/23.
|
||||
|
||||
REM Check for --full argument
|
||||
set FULL_MODE=0
|
||||
for %%a in (%*) do (
|
||||
if "%%a"=="--full" set FULL_MODE=1
|
||||
)
|
||||
|
||||
if %FULL_MODE%==1 (
|
||||
.\tools\xgettext.exe --keyword=L --keyword=_L --keyword=_u8L --keyword=L_CONTEXT:1,2c --keyword=_L_PLURAL:1,2 --add-comments=TRN --from-code=UTF-8 --no-location --debug --boost -f ./localization/i18n/list.txt -o ./localization/i18n/OrcaSlicer.pot
|
||||
if errorlevel 1 exit /b 1
|
||||
python scripts/HintsToPot.py ./resources ./localization/i18n
|
||||
if errorlevel 1 exit /b 1
|
||||
)
|
||||
REM Print the current directory
|
||||
echo %cd%
|
||||
set pot_file="./localization/i18n/OrcaSlicer.pot"
|
||||
|
||||
REM Run the script for each .po file
|
||||
for /r "./localization/i18n/" %%f in (*.po) do (
|
||||
call :processFile "%%f"
|
||||
if errorlevel 1 exit /b 1
|
||||
)
|
||||
exit /b 0
|
||||
|
||||
:processFile
|
||||
set "file=%~1"
|
||||
set "dir=%~dp1"
|
||||
set "name=%~n1"
|
||||
set "lang=%name:OrcaSlicer_=%"
|
||||
if %FULL_MODE%==1 (
|
||||
.\tools\msgmerge.exe -N -o "%file%" "%file%" "%pot_file%"
|
||||
if errorlevel 1 exit /b 1
|
||||
)
|
||||
if not exist "./resources/i18n/%lang%" mkdir "./resources/i18n/%lang%"
|
||||
.\tools\msgfmt.exe --check-format -o "./resources/i18n/%lang%/OrcaSlicer.mo" "%file%"
|
||||
if errorlevel 1 exit /b 1
|
||||
exit /b 0
|
||||
39
scripts/run_gettext.sh
Executable file
39
scripts/run_gettext.sh
Executable file
@@ -0,0 +1,39 @@
|
||||
#!/bin/sh
|
||||
|
||||
# OrcaSlicer gettext
|
||||
# Created by SoftFever on 27/5/23.
|
||||
#
|
||||
|
||||
# Check for --full argument
|
||||
FULL_MODE=false
|
||||
for arg in "$@"
|
||||
do
|
||||
if [ "$arg" = "--full" ]; then
|
||||
FULL_MODE=true
|
||||
fi
|
||||
done
|
||||
|
||||
if $FULL_MODE; then
|
||||
xgettext --keyword=L --keyword=_L --keyword=_u8L --keyword=L_CONTEXT:1,2c --keyword=_L_PLURAL:1,2 --add-comments=TRN --from-code=UTF-8 --no-location --debug --boost -f ./localization/i18n/list.txt -o ./localization/i18n/OrcaSlicer.pot
|
||||
python3 scripts/HintsToPot.py ./resources ./localization/i18n
|
||||
fi
|
||||
|
||||
|
||||
echo "$0: working dir = $PWD"
|
||||
pot_file="./localization/i18n/OrcaSlicer.pot"
|
||||
for dir in ./localization/i18n/*/
|
||||
do
|
||||
dir=${dir%*/} # remove the trailing "/"
|
||||
lang=${dir##*/} # extract the language identifier
|
||||
|
||||
if [ -f "$dir/OrcaSlicer_${lang}.po" ]; then
|
||||
if $FULL_MODE; then
|
||||
msgmerge -N -o "$dir/OrcaSlicer_${lang}.po" "$dir/OrcaSlicer_${lang}.po" "$pot_file"
|
||||
fi
|
||||
mkdir -p "resources/i18n/${lang}"
|
||||
if ! msgfmt --check-format -o "resources/i18n/${lang}/OrcaSlicer.mo" "$dir/OrcaSlicer_${lang}.po"; then
|
||||
echo "Error encountered with msgfmt command for language ${lang}."
|
||||
exit 1 # Exit the script with an error status
|
||||
fi
|
||||
fi
|
||||
done
|
||||
12
scripts/run_unit_tests.sh
Executable file
12
scripts/run_unit_tests.sh
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
|
||||
# This file is made to support the unit tests workflow.
|
||||
# It should only require the directories build/tests, scripts/, and tests/ to function,
|
||||
# and cmake (with ctest) installed.
|
||||
# (otherwise, update the workflow too, but try to avoid to keep things self-contained)
|
||||
|
||||
ROOT_DIR="$(dirname "$0")/.."
|
||||
|
||||
cd "${ROOT_DIR}" || exit 1
|
||||
|
||||
ctest --test-dir build/tests -L "Http|PlaceholderParser" --output-junit "$(pwd)/ctest_results.xml" --output-on-failure -j
|
||||
477
scripts/test_moonraker_lane_data.py
Executable file
477
scripts/test_moonraker_lane_data.py
Executable file
@@ -0,0 +1,477 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Test script for MoonrakerPrinterAgent filament sync feature.
|
||||
Inserts/deletes/modifies random lane data in Moonraker database,
|
||||
then reads back and displays with colored output.
|
||||
"""
|
||||
|
||||
import requests
|
||||
import random
|
||||
import argparse
|
||||
import json
|
||||
import time
|
||||
import sys
|
||||
|
||||
# Configuration
|
||||
DEFAULT_HOST = "192.168.88.9"
|
||||
DEFAULT_PORT = 7125
|
||||
NAMESPACE = "lane_data"
|
||||
LANE_KEYS = [f"lane{i}" for i in range(1, 9)] # lane1-lane8
|
||||
MATERIALS = ["PLA", "ABS", "PETG", "ASA", "ASA Sparkle", "TPU", ""]
|
||||
|
||||
# Material default temperatures (None = use null)
|
||||
MATERIAL_TEMPS = {
|
||||
"PLA": {"nozzle": 210, "bed": 60},
|
||||
"ABS": {"nozzle": 240, "bed": 100},
|
||||
"PETG": {"nozzle": 235, "bed": 80},
|
||||
"ASA": {"nozzle": 245, "bed": 105},
|
||||
"ASA Sparkle":{"nozzle": 245, "bed": 105},
|
||||
"TPU": {"nozzle": 220, "bed": 50},
|
||||
"": {"nozzle": None, "bed": None},
|
||||
}
|
||||
|
||||
def test_connection(host, port, api_key=None, verbose=False):
|
||||
"""Test basic connectivity to Moonraker."""
|
||||
url = f"http://{host}:{port}/server/info"
|
||||
headers = {"X-Api-Key": api_key} if api_key else {}
|
||||
|
||||
if verbose:
|
||||
print(f" Testing: GET {url}")
|
||||
|
||||
try:
|
||||
resp = requests.get(url, headers=headers, timeout=10)
|
||||
if verbose:
|
||||
print(f" Response: HTTP {resp.status_code}")
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
if verbose:
|
||||
print(f" Moonraker version: {data.get('result', {}).get('moonraker_version', 'unknown')}")
|
||||
return True
|
||||
else:
|
||||
print(f" Server returned HTTP {resp.status_code}")
|
||||
if verbose:
|
||||
print(f" Response: {resp.text[:500]}")
|
||||
return False
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
print(f" Connection error: {e}")
|
||||
return False
|
||||
except requests.exceptions.Timeout:
|
||||
print(f" Connection timed out")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f" Error: {type(e).__name__}: {e}")
|
||||
return False
|
||||
|
||||
def hex_to_rgb(hex_color):
|
||||
"""Convert hex color to RGB tuple."""
|
||||
hex_color = hex_color.lstrip('#')
|
||||
if hex_color.startswith('0x') or hex_color.startswith('0X'):
|
||||
hex_color = hex_color[2:]
|
||||
if len(hex_color) == 6:
|
||||
return tuple(int(hex_color[i:i+2], 16) for i in (0, 2, 4))
|
||||
return (128, 128, 128) # Default gray
|
||||
|
||||
def color_block(hex_color):
|
||||
"""Return ANSI color block for terminal display."""
|
||||
r, g, b = hex_to_rgb(hex_color)
|
||||
return f"\033[48;2;{r};{g};{b}m \033[0m"
|
||||
|
||||
def random_color():
|
||||
"""Generate random hex color, occasionally returning empty or '#None' like real data."""
|
||||
r = random.random()
|
||||
if r < 0.1:
|
||||
return "" # Empty color (empty lane)
|
||||
if r < 0.15:
|
||||
return "#None" # Observed in real data for unknown colors
|
||||
return "#{:06x}".format(random.randint(0, 0xFFFFFF))
|
||||
|
||||
def get_lane_data(host, port, api_key=None):
|
||||
"""Fetch all lane data from Moonraker database."""
|
||||
url = f"http://{host}:{port}/server/database/item"
|
||||
params = {"namespace": NAMESPACE}
|
||||
headers = {"X-Api-Key": api_key} if api_key else {}
|
||||
|
||||
try:
|
||||
resp = requests.get(url, params=params, headers=headers, timeout=5)
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
return data.get("result", {}).get("value", {})
|
||||
elif resp.status_code == 404:
|
||||
return {} # Namespace doesn't exist yet
|
||||
else:
|
||||
print(f"Error fetching lane data: HTTP {resp.status_code}")
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"Error fetching lane data: {e}")
|
||||
return None
|
||||
|
||||
def set_lane_data(host, port, lane_key, lane_data, api_key=None):
|
||||
"""Set lane data in Moonraker database."""
|
||||
url = f"http://{host}:{port}/server/database/item"
|
||||
headers = {"Content-Type": "application/json"}
|
||||
if api_key:
|
||||
headers["X-Api-Key"] = api_key
|
||||
|
||||
payload = {
|
||||
"namespace": NAMESPACE,
|
||||
"key": lane_key,
|
||||
"value": lane_data
|
||||
}
|
||||
|
||||
try:
|
||||
resp = requests.post(url, json=payload, headers=headers, timeout=5)
|
||||
return resp.status_code == 200
|
||||
except Exception as e:
|
||||
print(f"Error setting lane data: {e}")
|
||||
return False
|
||||
|
||||
def delete_lane_data(host, port, lane_key, api_key=None):
|
||||
"""Delete lane data from Moonraker database."""
|
||||
url = f"http://{host}:{port}/server/database/item"
|
||||
params = {"namespace": NAMESPACE, "key": lane_key}
|
||||
headers = {"X-Api-Key": api_key} if api_key else {}
|
||||
|
||||
try:
|
||||
resp = requests.delete(url, params=params, headers=headers, timeout=5)
|
||||
return resp.status_code == 200
|
||||
except Exception as e:
|
||||
print(f"Error deleting lane data: {e}")
|
||||
return False
|
||||
|
||||
def display_lanes(lanes):
|
||||
"""Display lane data with color blocks."""
|
||||
print("\n" + "="*70)
|
||||
print("CURRENT LANE DATA")
|
||||
print("="*70)
|
||||
|
||||
if not lanes:
|
||||
print(" (no lanes configured)")
|
||||
return
|
||||
|
||||
# Sort by lane number
|
||||
sorted_lanes = sorted(lanes.items(),
|
||||
key=lambda x: int(x[1].get("lane", "0")) if x[1].get("lane", "").isdigit() else 0)
|
||||
|
||||
for lane_key, data in sorted_lanes:
|
||||
lane_num = data.get("lane", "?")
|
||||
material = data.get("material", "") or "(empty)"
|
||||
color = data.get("color", "")
|
||||
bed_temp = data.get("bed_temp")
|
||||
nozzle_temp = data.get("nozzle_temp")
|
||||
spool_id = data.get("spool_id")
|
||||
|
||||
# Show color block only for valid hex colors
|
||||
if color and color.startswith("#") and color != "#None" and len(color) == 7:
|
||||
block = color_block(color)
|
||||
else:
|
||||
block = " " # No color block
|
||||
|
||||
bed_str = f"{bed_temp}°C" if bed_temp is not None else "-"
|
||||
noz_str = f"{nozzle_temp}°C" if nozzle_temp is not None else "-"
|
||||
spool_str = f" Spool: {spool_id}" if spool_id is not None else ""
|
||||
color_str = color if color else "(none)"
|
||||
|
||||
print(f" {lane_key} (T{lane_num}): {block} {color_str:10s} {material:12s} "
|
||||
f"Nozzle: {noz_str:6s} Bed: {bed_str:5s}{spool_str}")
|
||||
|
||||
print("="*70 + "\n")
|
||||
|
||||
def make_lane_entry(tool_number, material=None):
|
||||
"""Generate a lane data entry matching real Moonraker AFC structure."""
|
||||
if material is None:
|
||||
material = random.choice(MATERIALS)
|
||||
temps = MATERIAL_TEMPS[material]
|
||||
color = random_color()
|
||||
|
||||
bed = None
|
||||
nozzle = None
|
||||
if temps["bed"] is not None:
|
||||
bed = temps["bed"] + random.randint(-5, 5)
|
||||
if temps["nozzle"] is not None:
|
||||
nozzle = temps["nozzle"] + random.randint(-10, 10)
|
||||
|
||||
spool_id = random.choice([None, random.randint(1, 50)])
|
||||
|
||||
return {
|
||||
"color": color,
|
||||
"material": material,
|
||||
"bed_temp": bed,
|
||||
"nozzle_temp": nozzle,
|
||||
"scan_time": "",
|
||||
"td": "",
|
||||
"lane": str(tool_number),
|
||||
"spool_id": spool_id,
|
||||
}
|
||||
|
||||
def get_used_tool_numbers(host, port, api_key=None, exclude_key=None):
|
||||
"""Get set of tool numbers currently in use."""
|
||||
lanes = get_lane_data(host, port, api_key) or {}
|
||||
used = set()
|
||||
for key, data in lanes.items():
|
||||
if key == exclude_key:
|
||||
continue
|
||||
lane_val = data.get("lane", "")
|
||||
if lane_val.isdigit():
|
||||
used.add(int(lane_val))
|
||||
return used
|
||||
|
||||
def pick_available_tool_number(used_tool_numbers):
|
||||
"""Pick a random tool number (0-7) not already in use. Returns None if all taken."""
|
||||
available = [n for n in range(8) if n not in used_tool_numbers]
|
||||
if not available:
|
||||
return None
|
||||
return random.choice(available)
|
||||
|
||||
def fix_duplicate_lanes(host, port, lanes, api_key=None):
|
||||
"""Detect and fix duplicate tool numbers in existing lane data.
|
||||
|
||||
Returns the updated lane data after fixes.
|
||||
"""
|
||||
if not lanes:
|
||||
return lanes
|
||||
|
||||
# Map tool number -> list of lane keys using it
|
||||
tool_to_keys = {}
|
||||
for key, data in lanes.items():
|
||||
tool = data.get("lane", "")
|
||||
if tool == "":
|
||||
continue
|
||||
tool_to_keys.setdefault(tool, []).append(key)
|
||||
|
||||
# Find duplicates
|
||||
duplicates = {tool: keys for tool, keys in tool_to_keys.items() if len(keys) > 1}
|
||||
if not duplicates:
|
||||
return lanes
|
||||
|
||||
print("DUPLICATE TOOL NUMBERS DETECTED:")
|
||||
for tool, keys in duplicates.items():
|
||||
print(f" Tool T{tool} used by: {', '.join(keys)}")
|
||||
|
||||
# Collect all used tool numbers
|
||||
used = set()
|
||||
for tool, keys in tool_to_keys.items():
|
||||
if tool.isdigit():
|
||||
used.add(int(tool))
|
||||
|
||||
# Fix: keep the first key for each tool, reassign the rest
|
||||
print("\nFixing duplicates...")
|
||||
for tool, keys in duplicates.items():
|
||||
# Keep the first one, reassign the rest
|
||||
for key in keys[1:]:
|
||||
available = [n for n in range(8) if n not in used]
|
||||
if not available:
|
||||
print(f" {key}: cannot fix, no available tool numbers!")
|
||||
continue
|
||||
|
||||
new_tool = available[0]
|
||||
used.add(new_tool)
|
||||
|
||||
lanes[key]["lane"] = str(new_tool)
|
||||
if set_lane_data(host, port, key, lanes[key], api_key):
|
||||
print(f" {key}: T{tool} -> T{new_tool}")
|
||||
else:
|
||||
print(f" {key}: FAILED to update")
|
||||
|
||||
print()
|
||||
return lanes
|
||||
|
||||
def perform_random_operations(host, port, api_key=None, num_ops=5):
|
||||
"""Perform random insert/modify/delete operations."""
|
||||
operations = ["insert", "modify", "delete"]
|
||||
|
||||
print(f"\nPerforming {num_ops} random operations...")
|
||||
print("-"*50)
|
||||
|
||||
for i in range(num_ops):
|
||||
op = random.choice(operations)
|
||||
lane_key = random.choice(LANE_KEYS)
|
||||
|
||||
if op in ("insert", "modify"):
|
||||
# Get currently used tool numbers, excluding this key (ok to reuse its own)
|
||||
used = get_used_tool_numbers(host, port, api_key, exclude_key=lane_key)
|
||||
tool_num = pick_available_tool_number(used)
|
||||
if tool_num is None:
|
||||
print(f" [{op.upper()}] {lane_key}: SKIPPED (all tool numbers in use)")
|
||||
continue
|
||||
|
||||
lane_data = make_lane_entry(tool_num)
|
||||
action = "INSERT" if op == "insert" else "MODIFY"
|
||||
color = lane_data["color"]
|
||||
material = lane_data["material"] or "(empty)"
|
||||
tool = lane_data["lane"]
|
||||
|
||||
if color and color.startswith("#") and color != "#None" and len(color) == 7:
|
||||
block = color_block(color)
|
||||
else:
|
||||
block = " "
|
||||
|
||||
if set_lane_data(host, port, lane_key, lane_data, api_key):
|
||||
print(f" [{action}] {lane_key} (T{tool}): {block} {color or '(none)'} "
|
||||
f"{material} spool={lane_data['spool_id']}")
|
||||
else:
|
||||
print(f" [{action}] {lane_key}: FAILED")
|
||||
|
||||
elif op == "delete":
|
||||
if delete_lane_data(host, port, lane_key, api_key):
|
||||
print(f" [DELETE] {lane_key}")
|
||||
else:
|
||||
print(f" [DELETE] {lane_key}: FAILED (may not exist)")
|
||||
|
||||
time.sleep(0.1) # Small delay between operations
|
||||
|
||||
print("-"*50)
|
||||
|
||||
def load_lanes_from_file(filepath, host, port, api_key=None):
|
||||
"""Load lane data from a JSON file and overwrite all lanes on the printer.
|
||||
|
||||
Accepts either the raw Moonraker response format:
|
||||
{"result": {"namespace": "lane_data", "value": {"lane1": {...}, ...}}}
|
||||
or the plain value object:
|
||||
{"lane1": {...}, "lane2": {...}, ...}
|
||||
"""
|
||||
try:
|
||||
with open(filepath, "r") as f:
|
||||
data = json.load(f)
|
||||
except FileNotFoundError:
|
||||
print(f"Error: file not found: {filepath}")
|
||||
return False
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"Error: invalid JSON in {filepath}: {e}")
|
||||
return False
|
||||
|
||||
# Accept both wrapped and unwrapped formats
|
||||
if "result" in data and "value" in data.get("result", {}):
|
||||
lanes = data["result"]["value"]
|
||||
else:
|
||||
lanes = data
|
||||
|
||||
if not isinstance(lanes, dict):
|
||||
print(f"Error: expected object with lane keys, got {type(lanes).__name__}")
|
||||
return False
|
||||
|
||||
# Validate no duplicate tool numbers
|
||||
tool_to_keys = {}
|
||||
for key, entry in lanes.items():
|
||||
tool = entry.get("lane", "")
|
||||
if tool:
|
||||
tool_to_keys.setdefault(tool, []).append(key)
|
||||
dupes = {t: keys for t, keys in tool_to_keys.items() if len(keys) > 1}
|
||||
if dupes:
|
||||
print("Error: input JSON has duplicate tool numbers:")
|
||||
for tool, keys in dupes.items():
|
||||
print(f" Tool T{tool} used by: {', '.join(keys)}")
|
||||
return False
|
||||
|
||||
print(f"Loading {len(lanes)} lane(s) from {filepath}...")
|
||||
|
||||
# Clear all existing lanes first
|
||||
print(" Clearing existing lanes...")
|
||||
for lane_key in LANE_KEYS:
|
||||
delete_lane_data(host, port, lane_key, api_key)
|
||||
|
||||
# Write each lane from the file
|
||||
ok = True
|
||||
for lane_key, lane_data in lanes.items():
|
||||
if set_lane_data(host, port, lane_key, lane_data, api_key):
|
||||
tool = lane_data.get("lane", "?")
|
||||
material = lane_data.get("material", "") or "(empty)"
|
||||
color = lane_data.get("color", "")
|
||||
if color and color.startswith("#") and color != "#None" and len(color) == 7:
|
||||
block = color_block(color)
|
||||
else:
|
||||
block = " "
|
||||
print(f" [LOAD] {lane_key} (T{tool}): {block} {color or '(none)'} {material}")
|
||||
else:
|
||||
print(f" [LOAD] {lane_key}: FAILED")
|
||||
ok = False
|
||||
|
||||
return ok
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Test Moonraker lane data for MoonrakerPrinterAgent filament sync"
|
||||
)
|
||||
parser.add_argument("--host", default=DEFAULT_HOST,
|
||||
help=f"Moonraker host (default: {DEFAULT_HOST})")
|
||||
parser.add_argument("--port", type=int, default=DEFAULT_PORT,
|
||||
help=f"Moonraker port (default: {DEFAULT_PORT})")
|
||||
parser.add_argument("--api-key", help="Moonraker API key (if required)")
|
||||
parser.add_argument("--ops", type=int, default=5,
|
||||
help="Number of random operations (default: 5)")
|
||||
parser.add_argument("--clear", action="store_true",
|
||||
help="Clear all lane data before starting")
|
||||
parser.add_argument("--read-only", action="store_true",
|
||||
help="Only read and display current lane data")
|
||||
parser.add_argument("--load", metavar="FILE",
|
||||
help="Load lane data from JSON file and overwrite printer lanes")
|
||||
parser.add_argument("--verbose", "-v", action="store_true",
|
||||
help="Verbose output for debugging")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
print(f"\nConnecting to Moonraker at {args.host}:{args.port}...")
|
||||
|
||||
# First test basic connectivity
|
||||
if not test_connection(args.host, args.port, args.api_key, args.verbose):
|
||||
print("\nFailed to connect to Moonraker!")
|
||||
print("\nTroubleshooting:")
|
||||
print(f" 1. Check if Moonraker is running on {args.host}")
|
||||
print(f" 2. Verify port {args.port} is correct (default Moonraker port is 7125)")
|
||||
print(f" 3. Try: curl http://{args.host}:{args.port}/server/info")
|
||||
print(f" 4. Check if API key is required (--api-key)")
|
||||
return 1
|
||||
|
||||
print("Connected!")
|
||||
|
||||
# Now fetch lane data
|
||||
current = get_lane_data(args.host, args.port, args.api_key)
|
||||
if current is None:
|
||||
print("Connected to Moonraker but failed to fetch lane data!")
|
||||
return 1
|
||||
|
||||
# Check for and fix duplicate tool numbers
|
||||
current = fix_duplicate_lanes(args.host, args.port, current, args.api_key)
|
||||
|
||||
# Show current state
|
||||
display_lanes(current)
|
||||
|
||||
if args.read_only:
|
||||
return 0
|
||||
|
||||
# Load from JSON file if requested
|
||||
if args.load:
|
||||
if not load_lanes_from_file(args.load, args.host, args.port, args.api_key):
|
||||
return 1
|
||||
final = get_lane_data(args.host, args.port, args.api_key)
|
||||
display_lanes(final)
|
||||
if final is not None:
|
||||
print("RAW JSON:")
|
||||
print(json.dumps({"result": {"namespace": NAMESPACE, "key": None, "value": final}}, indent=2))
|
||||
print()
|
||||
return 0
|
||||
|
||||
# Clear if requested
|
||||
if args.clear:
|
||||
print("Clearing all lane data...")
|
||||
for lane_key in LANE_KEYS:
|
||||
delete_lane_data(args.host, args.port, lane_key, args.api_key)
|
||||
print("Cleared!")
|
||||
display_lanes({})
|
||||
|
||||
# Perform random operations
|
||||
perform_random_operations(args.host, args.port, args.api_key, args.ops)
|
||||
|
||||
# Read back and display final state
|
||||
final = get_lane_data(args.host, args.port, args.api_key)
|
||||
display_lanes(final)
|
||||
|
||||
# Print raw JSON
|
||||
if final is not None:
|
||||
print("RAW JSON:")
|
||||
print(json.dumps({"result": {"namespace": NAMESPACE, "key": None, "value": final}}, indent=2))
|
||||
print()
|
||||
|
||||
return 0
|
||||
|
||||
if __name__ == "__main__":
|
||||
exit(main())
|
||||
Reference in New Issue
Block a user