From bb4a01c4f9346d394997962037e7519289b2680c Mon Sep 17 00:00:00 2001 From: Scott Lahteine Date: Sat, 25 Mar 2023 20:40:50 -0500 Subject: [PATCH] =?UTF-8?q?=F0=9F=94=A8=20Newer=20PlatformIO=20support?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Marlin/config.ini | 211 +++++++++ Marlin/src/HAL/LPC1768/upload_extra_script.py | 38 +- .../scripts/STM32F103RC_MEEB_3DP.py | 1 - .../PlatformIO/scripts/STM32F103RC_fysetc.py | 1 - .../scripts/STM32F1_create_variant.py | 27 +- .../share/PlatformIO/scripts/chitu_crypt.py | 27 +- .../PlatformIO/scripts/common-cxxflags.py | 25 +- .../PlatformIO/scripts/common-dependencies.py | 1 + .../share/PlatformIO/scripts/configuration.py | 240 ++++++++++ .../PlatformIO/scripts/download_mks_assets.py | 42 +- .../scripts/generic_create_variant.py | 24 +- .../jgaurora_a5s_a1_with_bootloader.py | 51 +-- buildroot/share/PlatformIO/scripts/lerdge.py | 4 +- buildroot/share/PlatformIO/scripts/marlin.py | 31 +- .../share/PlatformIO/scripts/mc-apply.py | 1 - .../PlatformIO/scripts/offset_and_rename.py | 17 +- buildroot/share/PlatformIO/scripts/openblt.py | 1 - buildroot/share/PlatformIO/scripts/pioutil.py | 1 + .../PlatformIO/scripts/preflight-checks.py | 53 ++- .../share/PlatformIO/scripts/preprocessor.py | 66 ++- buildroot/share/PlatformIO/scripts/schema.py | 421 ++++++++++++++++++ .../share/PlatformIO/scripts/signature.py | 121 ++++- .../share/PlatformIO/scripts/simulator.py | 1 + buildroot/share/dwin/bin/DWIN_ICO.py | 2 +- buildroot/share/dwin/bin/makeIco.py | 1 - buildroot/share/dwin/bin/splitIco.py | 1 - .../scripts/createTemperatureLookupMarlin.py | 16 +- buildroot/share/scripts/gen-tft-image.py | 4 +- buildroot/share/scripts/upload.py | 12 +- buildroot/share/vscode/auto_build.py | 8 +- .../create_custom_upload_command_CDC.py | 2 +- .../create_custom_upload_command_DFU.py | 2 +- ini/esp32.ini | 1 + platformio.ini | 15 +- 34 files changed, 1225 insertions(+), 244 deletions(-) create mode 100644 Marlin/config.ini create mode 100644 buildroot/share/PlatformIO/scripts/configuration.py create mode 100644 buildroot/share/PlatformIO/scripts/schema.py diff --git a/Marlin/config.ini b/Marlin/config.ini new file mode 100644 index 000000000000..0fb9fb0c9308 --- /dev/null +++ b/Marlin/config.ini @@ -0,0 +1,211 @@ +# +# Marlin Firmware +# config.ini - Options to apply before the build +# +[config:base] +ini_use_config = none + +# Load all config: sections in this file +;ini_use_config = all +# Load config file relative to Marlin/ +;ini_use_config = another.ini +# Download configurations from GitHub +;ini_use_config = example/Creality/Ender-5 Plus @ bugfix-2.1.x +# Download configurations from your server +;ini_use_config = https://me.myserver.com/path/to/configs +# Evaluate config:base and do a config dump +;ini_use_config = base +;config_export = 2 + +[config:minimal] +motherboard = BOARD_RAMPS_14_EFB +serial_port = 0 +baudrate = 250000 + +use_watchdog = on +thermal_protection_hotends = on +thermal_protection_hysteresis = 4 +thermal_protection_period = 40 + +bufsize = 4 +block_buffer_size = 16 +max_cmd_size = 96 + +extruders = 1 +temp_sensor_0 = 1 + +temp_hysteresis = 3 +heater_0_mintemp = 5 +heater_0_maxtemp = 275 +preheat_1_temp_hotend = 180 + +bang_max = 255 +pidtemp = on +pid_k1 = 0.95 +pid_max = BANG_MAX +pid_functional_range = 10 + +default_kp = 22.20 +default_ki = 1.08 +default_kd = 114.00 + +x_driver_type = A4988 +y_driver_type = A4988 +z_driver_type = A4988 +e0_driver_type = A4988 + +x_bed_size = 200 +x_min_pos = 0 +x_max_pos = X_BED_SIZE + +y_bed_size = 200 +y_min_pos = 0 +y_max_pos = Y_BED_SIZE + +z_min_pos = 0 +z_max_pos = 200 + +x_home_dir = -1 +y_home_dir = -1 +z_home_dir = -1 + +use_xmin_plug = on +use_ymin_plug = on +use_zmin_plug = on + +x_min_endstop_inverting = false +y_min_endstop_inverting = false +z_min_endstop_inverting = false + +default_axis_steps_per_unit = { 80, 80, 400, 500 } +axis_relative_modes = { false, false, false, false } +default_max_feedrate = { 300, 300, 5, 25 } +default_max_acceleration = { 3000, 3000, 100, 10000 } + +homing_feedrate_mm_m = { (50*60), (50*60), (4*60) } +homing_bump_divisor = { 2, 2, 4 } + +x_enable_on = 0 +y_enable_on = 0 +z_enable_on = 0 +e_enable_on = 0 + +invert_x_dir = false +invert_y_dir = true +invert_z_dir = false +invert_e0_dir = false + +invert_e_step_pin = false +invert_x_step_pin = false +invert_y_step_pin = false +invert_z_step_pin = false + +disable_x = false +disable_y = false +disable_z = false +disable_e = false + +proportional_font_ratio = 1.0 +default_nominal_filament_dia = 1.75 + +junction_deviation_mm = 0.013 + +default_acceleration = 3000 +default_travel_acceleration = 3000 +default_retract_acceleration = 3000 + +default_minimumfeedrate = 0.0 +default_mintravelfeedrate = 0.0 + +minimum_planner_speed = 0.05 +min_steps_per_segment = 6 +default_minsegmenttime = 20000 + +[config:basic] +bed_overshoot = 10 +busy_while_heating = on +default_ejerk = 5.0 +default_keepalive_interval = 2 +default_leveling_fade_height = 0.0 +disable_inactive_extruder = on +display_charset_hd44780 = JAPANESE +eeprom_boot_silent = on +eeprom_chitchat = on +endstoppullups = on +extrude_maxlength = 200 +extrude_mintemp = 170 +host_keepalive_feature = on +hotend_overshoot = 15 +jd_handle_small_segments = on +lcd_info_screen_style = 0 +lcd_language = en +max_bed_power = 255 +mesh_inset = 0 +min_software_endstops = on +max_software_endstops = on +min_software_endstop_x = on +min_software_endstop_y = on +min_software_endstop_z = on +max_software_endstop_x = on +max_software_endstop_y = on +max_software_endstop_z = on +preheat_1_fan_speed = 0 +preheat_1_label = "PLA" +preheat_1_temp_bed = 70 +prevent_cold_extrusion = on +prevent_lengthy_extrude = on +printjob_timer_autostart = on +probing_margin = 10 +show_bootscreen = on +soft_pwm_scale = 0 +string_config_h_author = "(none, default config)" +temp_bed_hysteresis = 3 +temp_bed_residency_time = 10 +temp_bed_window = 1 +temp_residency_time = 10 +temp_window = 1 +validate_homing_endstops = on +xy_probe_feedrate = (133*60) +z_clearance_between_probes = 5 +z_clearance_deploy_probe = 10 +z_clearance_multi_probe = 5 + +[config:advanced] +arc_support = on +auto_report_temperatures = on +autotemp = on +autotemp_oldweight = 0.98 +bed_check_interval = 5000 +default_stepper_deactive_time = 120 +default_volumetric_extruder_limit = 0.00 +disable_inactive_e = true +disable_inactive_x = true +disable_inactive_y = true +disable_inactive_z = true +e0_auto_fan_pin = -1 +encoder_100x_steps_per_sec = 80 +encoder_10x_steps_per_sec = 30 +encoder_rate_multiplier = on +extended_capabilities_report = on +extruder_auto_fan_speed = 255 +extruder_auto_fan_temperature = 50 +fanmux0_pin = -1 +fanmux1_pin = -1 +fanmux2_pin = -1 +faster_gcode_parser = on +homing_bump_mm = { 5, 5, 2 } +max_arc_segment_mm = 1.0 +min_arc_segment_mm = 0.1 +min_circle_segments = 72 +n_arc_correction = 25 +serial_overrun_protection = on +slowdown = on +slowdown_divisor = 2 +temp_sensor_bed = 0 +thermal_protection_bed_hysteresis = 2 +thermocouple_max_errors = 15 +tx_buffer_size = 0 +watch_bed_temp_increase = 2 +watch_bed_temp_period = 60 +watch_temp_increase = 2 +watch_temp_period = 20 diff --git a/Marlin/src/HAL/LPC1768/upload_extra_script.py b/Marlin/src/HAL/LPC1768/upload_extra_script.py index 1b78bb12891e..efd46fdd6309 100755 --- a/Marlin/src/HAL/LPC1768/upload_extra_script.py +++ b/Marlin/src/HAL/LPC1768/upload_extra_script.py @@ -12,7 +12,7 @@ target_filename = "FIRMWARE.CUR" target_drive = "REARM" - import os,getpass,platform + import platform current_OS = platform.system() Import("env") @@ -26,6 +26,7 @@ def print_error(e): def before_upload(source, target, env): try: + from pathlib import Path # # Find a disk for upload # @@ -38,6 +39,7 @@ def before_upload(source, target, env): # Windows - doesn't care about the disk's name, only cares about the drive letter import subprocess,string from ctypes import windll + from pathlib import PureWindowsPath # getting list of drives # https://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python @@ -49,7 +51,7 @@ def before_upload(source, target, env): bitmask >>= 1 for drive in drives: - final_drive_name = drive + ':\\' + final_drive_name = drive + ':' # print ('disc check: {}'.format(final_drive_name)) try: volume_info = str(subprocess.check_output('cmd /C dir ' + final_drive_name, stderr=subprocess.STDOUT)) @@ -59,29 +61,33 @@ def before_upload(source, target, env): else: if target_drive in volume_info and not target_file_found: # set upload if not found target file yet target_drive_found = True - upload_disk = final_drive_name + upload_disk = PureWindowsPath(final_drive_name) if target_filename in volume_info: if not target_file_found: - upload_disk = final_drive_name + upload_disk = PureWindowsPath(final_drive_name) target_file_found = True elif current_OS == 'Linux': # # platformio.ini will accept this for a Linux upload port designation: 'upload_port = /media/media_name/drive' # - drives = os.listdir(os.path.join(os.sep, 'media', getpass.getuser())) + import getpass + user = getpass.getuser() + mpath = Path('/media', user) + drives = [ x for x in mpath.iterdir() if x.is_dir() ] if target_drive in drives: # If target drive is found, use it. target_drive_found = True - upload_disk = os.path.join(os.sep, 'media', getpass.getuser(), target_drive) + os.sep + upload_disk = mpath / target_drive else: for drive in drives: try: - files = os.listdir(os.path.join(os.sep, 'media', getpass.getuser(), drive)) + fpath = mpath / drive + filenames = [ x.name for x in fpath.iterdir() if x.is_file() ] except: continue else: - if target_filename in files: - upload_disk = os.path.join(os.sep, 'media', getpass.getuser(), drive) + os.sep + if target_filename in filenames: + upload_disk = mpath / drive target_file_found = True break # @@ -97,26 +103,28 @@ def before_upload(source, target, env): # # platformio.ini will accept this for a OSX upload port designation: 'upload_port = /media/media_name/drive' # - drives = os.listdir('/Volumes') # human readable names + dpath = Path('/Volumes') # human readable names + drives = [ x for x in dpath.iterdir() if x.is_dir() ] if target_drive in drives and not target_file_found: # set upload if not found target file yet target_drive_found = True - upload_disk = '/Volumes/' + target_drive + '/' + upload_disk = dpath / target_drive for drive in drives: try: - filenames = os.listdir('/Volumes/' + drive + '/') # will get an error if the drive is protected + fpath = dpath / drive # will get an error if the drive is protected + filenames = [ x.name for x in fpath.iterdir() if x.is_file() ] except: continue else: if target_filename in filenames: - if not target_file_found: - upload_disk = '/Volumes/' + drive + '/' + upload_disk = dpath / drive target_file_found = True + break # # Set upload_port to drive if found # if target_file_found or target_drive_found: - env.Replace(UPLOAD_PORT=upload_disk) + env.Replace(UPLOAD_PORT=str(upload_disk)) print('\nUpload disk: ', upload_disk, '\n') else: print_error('Autodetect Error') diff --git a/buildroot/share/PlatformIO/scripts/STM32F103RC_MEEB_3DP.py b/buildroot/share/PlatformIO/scripts/STM32F103RC_MEEB_3DP.py index 7766435fedb4..4f2da9cdc0d0 100644 --- a/buildroot/share/PlatformIO/scripts/STM32F103RC_MEEB_3DP.py +++ b/buildroot/share/PlatformIO/scripts/STM32F103RC_MEEB_3DP.py @@ -4,7 +4,6 @@ import pioutil if pioutil.is_pio_build(): - import os Import("env", "projenv") flash_size = 0 diff --git a/buildroot/share/PlatformIO/scripts/STM32F103RC_fysetc.py b/buildroot/share/PlatformIO/scripts/STM32F103RC_fysetc.py index c9fbd9cdc0d3..ecb0cc145c04 100644 --- a/buildroot/share/PlatformIO/scripts/STM32F103RC_fysetc.py +++ b/buildroot/share/PlatformIO/scripts/STM32F103RC_fysetc.py @@ -3,7 +3,6 @@ # import pioutil if pioutil.is_pio_build(): - import os from os.path import join from os.path import expandvars Import("env") diff --git a/buildroot/share/PlatformIO/scripts/STM32F1_create_variant.py b/buildroot/share/PlatformIO/scripts/STM32F1_create_variant.py index e41c15a66109..4189cb5899bc 100644 --- a/buildroot/share/PlatformIO/scripts/STM32F1_create_variant.py +++ b/buildroot/share/PlatformIO/scripts/STM32F1_create_variant.py @@ -3,30 +3,29 @@ # import pioutil if pioutil.is_pio_build(): - import os,shutil,marlin - from SCons.Script import DefaultEnvironment - from platformio import util + import shutil,marlin + from pathlib import Path - env = DefaultEnvironment() + Import("env") platform = env.PioPlatform() board = env.BoardConfig() - FRAMEWORK_DIR = platform.get_package_dir("framework-arduinoststm32-maple") - assert os.path.isdir(FRAMEWORK_DIR) + FRAMEWORK_DIR = Path(platform.get_package_dir("framework-arduinoststm32-maple")) + assert FRAMEWORK_DIR.is_dir() - source_root = os.path.join("buildroot", "share", "PlatformIO", "variants") - assert os.path.isdir(source_root) + source_root = Path("buildroot/share/PlatformIO/variants") + assert source_root.is_dir() variant = board.get("build.variant") - variant_dir = os.path.join(FRAMEWORK_DIR, "STM32F1", "variants", variant) + variant_dir = FRAMEWORK_DIR / "STM32F1/variants" / variant - source_dir = os.path.join(source_root, variant) - assert os.path.isdir(source_dir) + source_dir = source_root / variant + assert source_dir.is_dir() - if os.path.isdir(variant_dir): + if variant_dir.is_dir(): shutil.rmtree(variant_dir) - if not os.path.isdir(variant_dir): - os.mkdir(variant_dir) + if not variant_dir.is_dir(): + variant_dir.mkdir() marlin.copytree(source_dir, variant_dir) diff --git a/buildroot/share/PlatformIO/scripts/chitu_crypt.py b/buildroot/share/PlatformIO/scripts/chitu_crypt.py index cd909f55bd79..4e81061a19ad 100644 --- a/buildroot/share/PlatformIO/scripts/chitu_crypt.py +++ b/buildroot/share/PlatformIO/scripts/chitu_crypt.py @@ -4,9 +4,9 @@ # import pioutil if pioutil.is_pio_build(): - import os,random,struct,uuid,marlin - # Relocate firmware from 0x08000000 to 0x08008800 - marlin.relocate_firmware("0x08008800") + import struct,uuid,marlin + + board = marlin.env.BoardConfig() def calculate_crc(contents, seed): accumulating_xor_value = seed; @@ -105,13 +105,22 @@ def encrypt_file(input, output_file, file_length): # Encrypt ${PROGNAME}.bin and save it as 'update.cbd' def encrypt(source, target, env): - firmware = open(target[0].path, "rb") - update = open(target[0].dir.path + '/update.cbd', "wb") - length = os.path.getsize(target[0].path) + from pathlib import Path + + fwpath = Path(target[0].path) + fwsize = fwpath.stat().st_size - encrypt_file(firmware, update, length) + enname = board.get("build.crypt_chitu") + enpath = Path(target[0].dir.path) - firmware.close() - update.close() + fwfile = fwpath.open("rb") + enfile = (enpath / enname).open("wb") + print(f"Encrypting {fwpath} to {enname}") + encrypt_file(fwfile, enfile, fwsize) + fwfile.close() + enfile.close() + fwpath.unlink() + + marlin.relocate_firmware("0x08008800") marlin.add_post_action(encrypt); diff --git a/buildroot/share/PlatformIO/scripts/common-cxxflags.py b/buildroot/share/PlatformIO/scripts/common-cxxflags.py index 0a22eccf7cbd..22a0665e05d5 100644 --- a/buildroot/share/PlatformIO/scripts/common-cxxflags.py +++ b/buildroot/share/PlatformIO/scripts/common-cxxflags.py @@ -2,17 +2,18 @@ # common-cxxflags.py # Convenience script to apply customizations to CPP flags # + import pioutil if pioutil.is_pio_build(): Import("env") cxxflags = [ - #"-Wno-incompatible-pointer-types", - #"-Wno-unused-const-variable", - #"-Wno-maybe-uninitialized", - #"-Wno-sign-compare" + # "-Wno-incompatible-pointer-types", + # "-Wno-unused-const-variable", + # "-Wno-maybe-uninitialized", + # "-Wno-sign-compare" ] - if "teensy" not in env['PIOENV']: + if "teensy" not in env["PIOENV"]: cxxflags += ["-Wno-register"] env.Append(CXXFLAGS=cxxflags) @@ -20,8 +21,8 @@ # Add CPU frequency as a compile time constant instead of a runtime variable # def add_cpu_freq(): - if 'BOARD_F_CPU' in env: - env['BUILD_FLAGS'].append('-DBOARD_F_CPU=' + env['BOARD_F_CPU']) + if "BOARD_F_CPU" in env: + env["BUILD_FLAGS"].append("-DBOARD_F_CPU=" + env["BOARD_F_CPU"]) # Useful for JTAG debugging # @@ -29,8 +30,14 @@ def add_cpu_freq(): # It useful to keep two live versions: a debug version for debugging and another for # release, for flashing when upload is not done automatically by jlink/stlink. # Without this, PIO needs to recompile everything twice for any small change. - if env.GetBuildType() == "debug" and env.get('UPLOAD_PROTOCOL') not in ['jlink', 'stlink', 'custom']: - env['BUILD_DIR'] = '$PROJECT_BUILD_DIR/$PIOENV/debug' + if env.GetBuildType() == "debug" and env.get("UPLOAD_PROTOCOL") not in ["jlink", "stlink", "custom"]: + env["BUILD_DIR"] = "$PROJECT_BUILD_DIR/$PIOENV/debug" + + def on_program_ready(source, target, env): + import shutil + shutil.copy(target[0].get_abspath(), env.subst("$PROJECT_BUILD_DIR/$PIOENV")) + + env.AddPostAction("$PROGPATH", on_program_ready) # On some platform, F_CPU is a runtime variable. Since it's used to convert from ns # to CPU cycles, this adds overhead preventing small delay (in the order of less than diff --git a/buildroot/share/PlatformIO/scripts/common-dependencies.py b/buildroot/share/PlatformIO/scripts/common-dependencies.py index 91331d8f7a60..6b5a9aea5792 100644 --- a/buildroot/share/PlatformIO/scripts/common-dependencies.py +++ b/buildroot/share/PlatformIO/scripts/common-dependencies.py @@ -67,6 +67,7 @@ def add_to_feat_cnf(feature, flines): for dep in re.split(r',\s*', line): lib_name = re.sub(r'@([~^]|[<>]=?)?[\d.]+', '', dep.strip()).split('=').pop(0) lib_re = re.compile('(?!^' + lib_name + '\\b)') + if not 'lib_deps' in feat: feat['lib_deps'] = {} feat['lib_deps'] = list(filter(lib_re.match, feat['lib_deps'])) + [dep] blab("[%s] lib_deps = %s" % (feature, dep), 3) diff --git a/buildroot/share/PlatformIO/scripts/configuration.py b/buildroot/share/PlatformIO/scripts/configuration.py new file mode 100644 index 000000000000..3fc43ed6d89a --- /dev/null +++ b/buildroot/share/PlatformIO/scripts/configuration.py @@ -0,0 +1,240 @@ +# +# configuration.py +# Apply options from config.ini to the existing Configuration headers +# +import re, shutil, configparser +from pathlib import Path + +verbose = 0 +def blab(str,level=1): + if verbose >= level: print(f"[config] {str}") + +def config_path(cpath): + return Path("Marlin", cpath, encoding='utf-8') + +# Apply a single name = on/off ; name = value ; etc. +# TODO: Limit to the given (optional) configuration +def apply_opt(name, val, conf=None): + if name == "lcd": name, val = val, "on" + + # Create a regex to match the option and capture parts of the line + regex = re.compile(rf'^(\s*)(//\s*)?(#define\s+)({name}\b)(\s*)(.*?)(\s*)(//.*)?$', re.IGNORECASE) + + # Find and enable and/or update all matches + for file in ("Configuration.h", "Configuration_adv.h"): + fullpath = config_path(file) + lines = fullpath.read_text(encoding='utf-8').split('\n') + found = False + for i in range(len(lines)): + line = lines[i] + match = regex.match(line) + if match and match[4].upper() == name.upper(): + found = True + # For boolean options un/comment the define + if val in ("on", "", None): + newline = re.sub(r'^(\s*)//+\s*(#define)(\s{1,3})?(\s*)', r'\1\2 \4', line) + elif val == "off": + newline = re.sub(r'^(\s*)(#define)(\s{1,3})?(\s*)', r'\1//\2 \4', line) + else: + # For options with values, enable and set the value + newline = match[1] + match[3] + match[4] + match[5] + val + if match[8]: + sp = match[7] if match[7] else ' ' + newline += sp + match[8] + lines[i] = newline + blab(f"Set {name} to {val}") + + # If the option was found, write the modified lines + if found: + fullpath.write_text('\n'.join(lines), encoding='utf-8') + break + + # If the option didn't appear in either config file, add it + if not found: + # OFF options are added as disabled items so they appear + # in config dumps. Useful for custom settings. + prefix = "" + if val == "off": + prefix, val = "//", "" # Item doesn't appear in config dump + #val = "false" # Item appears in config dump + + # Uppercase the option unless already mixed/uppercase + added = name.upper() if name.islower() else name + + # Add the provided value after the name + if val != "on" and val != "" and val is not None: + added += " " + val + + # Prepend the new option after the first set of #define lines + fullpath = config_path("Configuration.h") + with fullpath.open(encoding='utf-8') as f: + lines = f.readlines() + linenum = 0 + gotdef = False + for line in lines: + isdef = line.startswith("#define") + if not gotdef: + gotdef = isdef + elif not isdef: + break + linenum += 1 + lines.insert(linenum, f"{prefix}#define {added:30} // Added by config.ini\n") + fullpath.write_text(''.join(lines), encoding='utf-8') + +# Fetch configuration files from GitHub given the path. +# Return True if any files were fetched. +def fetch_example(url): + if url.endswith("/"): url = url[:-1] + if not url.startswith('http'): + brch = "bugfix-2.1.x" + if '@' in url: url, brch = map(str.strip, url.split('@')) + if url == 'examples/default': url = 'default' + url = f"https://raw.githubusercontent.com/MarlinFirmware/Configurations/{brch}/config/{url}" + url = url.replace("%", "%25").replace(" ", "%20") + + # Find a suitable fetch command + if shutil.which("curl") is not None: + fetch = "curl -L -s -S -f -o" + elif shutil.which("wget") is not None: + fetch = "wget -q -O" + else: + blab("Couldn't find curl or wget", -1) + return False + + import os + + # Reset configurations to default + os.system("git checkout HEAD Marlin/*.h") + + # Try to fetch the remote files + gotfile = False + for fn in ("Configuration.h", "Configuration_adv.h", "_Bootscreen.h", "_Statusscreen.h"): + if os.system(f"{fetch} wgot {url}/{fn} >/dev/null 2>&1") == 0: + shutil.move('wgot', config_path(fn)) + gotfile = True + + if Path('wgot').exists(): shutil.rmtree('wgot') + + return gotfile + +def section_items(cp, sectkey): + return cp.items(sectkey) if sectkey in cp.sections() else [] + +# Apply all items from a config section +def apply_ini_by_name(cp, sect): + iniok = True + if sect in ('config:base', 'config:root'): + iniok = False + items = section_items(cp, 'config:base') + section_items(cp, 'config:root') + else: + items = section_items(cp, sect) + + for item in items: + if iniok or not item[0].startswith('ini_'): + apply_opt(item[0], item[1]) + +# Apply all config sections from a parsed file +def apply_all_sections(cp): + for sect in cp.sections(): + if sect.startswith('config:'): + apply_ini_by_name(cp, sect) + +# Apply certain config sections from a parsed file +def apply_sections(cp, ckey='all'): + blab(f"Apply section key: {ckey}") + if ckey == 'all': + apply_all_sections(cp) + else: + # Apply the base/root config.ini settings after external files are done + if ckey in ('base', 'root'): + apply_ini_by_name(cp, 'config:base') + + # Apply historically 'Configuration.h' settings everywhere + if ckey == 'basic': + apply_ini_by_name(cp, 'config:basic') + + # Apply historically Configuration_adv.h settings everywhere + # (Some of which rely on defines in 'Conditionals_LCD.h') + elif ckey in ('adv', 'advanced'): + apply_ini_by_name(cp, 'config:advanced') + + # Apply a specific config: section directly + elif ckey.startswith('config:'): + apply_ini_by_name(cp, ckey) + +# Apply settings from a top level config.ini +def apply_config_ini(cp): + blab("=" * 20 + " Gather 'config.ini' entries...") + + # Pre-scan for ini_use_config to get config_keys + base_items = section_items(cp, 'config:base') + section_items(cp, 'config:root') + config_keys = ['base'] + for ikey, ival in base_items: + if ikey == 'ini_use_config': + config_keys = map(str.strip, ival.split(',')) + + # For each ini_use_config item perform an action + for ckey in config_keys: + addbase = False + + # For a key ending in .ini load and parse another .ini file + if ckey.endswith('.ini'): + sect = 'base' + if '@' in ckey: sect, ckey = map(str.strip, ckey.split('@')) + cp2 = configparser.ConfigParser() + cp2.read(config_path(ckey)) + apply_sections(cp2, sect) + ckey = 'base'; + + # (Allow 'example/' as a shortcut for 'examples/') + elif ckey.startswith('example/'): + ckey = 'examples' + ckey[7:] + + # For 'examples/' fetch an example set from GitHub. + # For https?:// do a direct fetch of the URL. + if ckey.startswith('examples/') or ckey.startswith('http'): + fetch_example(ckey) + ckey = 'base' + + if ckey == 'all': + apply_sections(cp) + + else: + # Apply keyed sections after external files are done + apply_sections(cp, 'config:' + ckey) + +if __name__ == "__main__": + # + # From command line use the given file name + # + import sys + args = sys.argv[1:] + if len(args) > 0: + if args[0].endswith('.ini'): + ini_file = args[0] + else: + print("Usage: %s <.ini file>" % sys.argv[0]) + else: + ini_file = config_path('config.ini') + + if ini_file: + user_ini = configparser.ConfigParser() + user_ini.read(ini_file) + apply_config_ini(user_ini) + +else: + # + # From within PlatformIO use the loaded INI file + # + import pioutil + if pioutil.is_pio_build(): + + Import("env") + + try: + verbose = int(env.GetProjectOption('custom_verbose')) + except: + pass + + from platformio.project.config import ProjectConfig + apply_config_ini(ProjectConfig()) diff --git a/buildroot/share/PlatformIO/scripts/download_mks_assets.py b/buildroot/share/PlatformIO/scripts/download_mks_assets.py index 3984c0baf63f..661fb2e438e4 100644 --- a/buildroot/share/PlatformIO/scripts/download_mks_assets.py +++ b/buildroot/share/PlatformIO/scripts/download_mks_assets.py @@ -5,45 +5,49 @@ import pioutil if pioutil.is_pio_build(): Import("env") - import os,requests,zipfile,tempfile,shutil + import requests,zipfile,tempfile,shutil + from pathlib import Path url = "https://github.com/makerbase-mks/Mks-Robin-Nano-Marlin2.0-Firmware/archive/0263cdaccf.zip" - deps_path = env.Dictionary("PROJECT_LIBDEPS_DIR") - zip_path = os.path.join(deps_path, "mks-assets.zip") - assets_path = os.path.join(env.Dictionary("PROJECT_BUILD_DIR"), env.Dictionary("PIOENV"), "assets") + deps_path = Path(env.Dictionary("PROJECT_LIBDEPS_DIR")) + zip_path = deps_path / "mks-assets.zip" + assets_path = Path(env.Dictionary("PROJECT_BUILD_DIR"), env.Dictionary("PIOENV"), "assets") def download_mks_assets(): print("Downloading MKS Assets") r = requests.get(url, stream=True) # the user may have a very clean workspace, # so create the PROJECT_LIBDEPS_DIR directory if not exits - if os.path.exists(deps_path) == False: - os.mkdir(deps_path) - with open(zip_path, 'wb') as fd: + if not deps_path.exists(): + deps_path.mkdir() + with zip_path.open('wb') as fd: for chunk in r.iter_content(chunk_size=128): fd.write(chunk) def copy_mks_assets(): print("Copying MKS Assets") - output_path = tempfile.mkdtemp() + output_path = Path(tempfile.mkdtemp()) zip_obj = zipfile.ZipFile(zip_path, 'r') zip_obj.extractall(output_path) zip_obj.close() - if os.path.exists(assets_path) == True and os.path.isdir(assets_path) == False: - os.unlink(assets_path) - if os.path.exists(assets_path) == False: - os.mkdir(assets_path) + if assets_path.exists() and not assets_path.is_dir(): + assets_path.unlink() + if not assets_path.exists(): + assets_path.mkdir() base_path = '' - for filename in os.listdir(output_path): + for filename in output_path.iterdir(): base_path = filename - for filename in os.listdir(os.path.join(output_path, base_path, 'Firmware', 'mks_font')): - shutil.copy(os.path.join(output_path, base_path, 'Firmware', 'mks_font', filename), assets_path) - for filename in os.listdir(os.path.join(output_path, base_path, 'Firmware', 'mks_pic')): - shutil.copy(os.path.join(output_path, base_path, 'Firmware', 'mks_pic', filename), assets_path) + fw_path = (output_path / base_path / 'Firmware') + font_path = fw_path / 'mks_font' + for filename in font_path.iterdir(): + shutil.copy(font_path / filename, assets_path) + pic_path = fw_path / 'mks_pic' + for filename in pic_path.iterdir(): + shutil.copy(pic_path / filename, assets_path) shutil.rmtree(output_path, ignore_errors=True) - if os.path.exists(zip_path) == False: + if not zip_path.exists(): download_mks_assets() - if os.path.exists(assets_path) == False: + if not assets_path.exists(): copy_mks_assets() diff --git a/buildroot/share/PlatformIO/scripts/generic_create_variant.py b/buildroot/share/PlatformIO/scripts/generic_create_variant.py index 2d231907a4ec..49d4c98d3e15 100644 --- a/buildroot/share/PlatformIO/scripts/generic_create_variant.py +++ b/buildroot/share/PlatformIO/scripts/generic_create_variant.py @@ -7,16 +7,14 @@ # import pioutil if pioutil.is_pio_build(): - import os,shutil,marlin - from SCons.Script import DefaultEnvironment - from platformio import util - - env = DefaultEnvironment() + import shutil,marlin + from pathlib import Path # # Get the platform name from the 'platform_packages' option, # or look it up by the platform.class.name. # + env = marlin.env platform = env.PioPlatform() from platformio.package.meta import PackageSpec @@ -37,8 +35,8 @@ if platform_name in [ "usb-host-msc", "usb-host-msc-cdc-msc", "usb-host-msc-cdc-msc-2", "usb-host-msc-cdc-msc-3", "tool-stm32duino", "biqu-bx-workaround", "main" ]: platform_name = "framework-arduinoststm32" - FRAMEWORK_DIR = platform.get_package_dir(platform_name) - assert os.path.isdir(FRAMEWORK_DIR) + FRAMEWORK_DIR = Path(platform.get_package_dir(platform_name)) + assert FRAMEWORK_DIR.is_dir() board = env.BoardConfig() @@ -47,14 +45,14 @@ #series = mcu_type[:7].upper() + "xx" # Prepare a new empty folder at the destination - variant_dir = os.path.join(FRAMEWORK_DIR, "variants", variant) - if os.path.isdir(variant_dir): + variant_dir = FRAMEWORK_DIR / "variants" / variant + if variant_dir.is_dir(): shutil.rmtree(variant_dir) - if not os.path.isdir(variant_dir): - os.mkdir(variant_dir) + if not variant_dir.is_dir(): + variant_dir.mkdir() # Source dir is a local variant sub-folder - source_dir = os.path.join("buildroot/share/PlatformIO/variants", variant) - assert os.path.isdir(source_dir) + source_dir = Path("buildroot/share/PlatformIO/variants", variant) + assert source_dir.is_dir() marlin.copytree(source_dir, variant_dir) diff --git a/buildroot/share/PlatformIO/scripts/jgaurora_a5s_a1_with_bootloader.py b/buildroot/share/PlatformIO/scripts/jgaurora_a5s_a1_with_bootloader.py index 04a3f1a49208..9256751096c5 100644 --- a/buildroot/share/PlatformIO/scripts/jgaurora_a5s_a1_with_bootloader.py +++ b/buildroot/share/PlatformIO/scripts/jgaurora_a5s_a1_with_bootloader.py @@ -4,37 +4,32 @@ # import pioutil if pioutil.is_pio_build(): - import os,marlin + # Append ${PROGNAME}.bin firmware after bootloader and save it as 'jgaurora_firmware.bin' def addboot(source, target, env): - firmware = open(target[0].path, "rb") - lengthfirmware = os.path.getsize(target[0].path) - bootloader_bin = "buildroot/share/PlatformIO/scripts/" + "jgaurora_bootloader.bin" - bootloader = open(bootloader_bin, "rb") - lengthbootloader = os.path.getsize(bootloader_bin) + from pathlib import Path + + fw_path = Path(target[0].path) + fwb_path = fw_path.parent / 'firmware_with_bootloader.bin' + with fwb_path.open("wb") as fwb_file: + bl_path = Path("buildroot/share/PlatformIO/scripts/jgaurora_bootloader.bin") + bl_file = bl_path.open("rb") + while True: + b = bl_file.read(1) + if b == b'': break + else: fwb_file.write(b) + + with fw_path.open("rb") as fw_file: + while True: + b = fw_file.read(1) + if b == b'': break + else: fwb_file.write(b) - firmware_with_boothloader_bin = target[0].dir.path + '/firmware_with_bootloader.bin' - if os.path.exists(firmware_with_boothloader_bin): - os.remove(firmware_with_boothloader_bin) - firmwareimage = open(firmware_with_boothloader_bin, "wb") - position = 0 - while position < lengthbootloader: - byte = bootloader.read(1) - firmwareimage.write(byte) - position += 1 - position = 0 - while position < lengthfirmware: - byte = firmware.read(1) - firmwareimage.write(byte) - position += 1 - bootloader.close() - firmware.close() - firmwareimage.close() + fws_path = Path(target[0].dir.path, 'firmware_for_sd_upload.bin') + if fws_path.exists(): + fws_path.unlink() - firmware_without_bootloader_bin = target[0].dir.path + '/firmware_for_sd_upload.bin' - if os.path.exists(firmware_without_bootloader_bin): - os.remove(firmware_without_bootloader_bin) - os.rename(target[0].path, firmware_without_bootloader_bin) - #os.rename(target[0].dir.path+'/firmware_with_bootloader.bin', target[0].dir.path+'/firmware.bin') + fw_path.rename(fws_path) + import marlin marlin.add_post_action(addboot); diff --git a/buildroot/share/PlatformIO/scripts/lerdge.py b/buildroot/share/PlatformIO/scripts/lerdge.py index 0d7f6c4816a5..607fe312ac84 100644 --- a/buildroot/share/PlatformIO/scripts/lerdge.py +++ b/buildroot/share/PlatformIO/scripts/lerdge.py @@ -8,10 +8,8 @@ import pioutil if pioutil.is_pio_build(): import os,marlin - Import("env") - from SCons.Script import DefaultEnvironment - board = DefaultEnvironment().BoardConfig() + board = marlin.env.BoardConfig() def encryptByte(byte): byte = 0xFF & ((byte << 6) | (byte >> 2)) diff --git a/buildroot/share/PlatformIO/scripts/marlin.py b/buildroot/share/PlatformIO/scripts/marlin.py index ad73eabdfd09..169dd9d3c3a5 100644 --- a/buildroot/share/PlatformIO/scripts/marlin.py +++ b/buildroot/share/PlatformIO/scripts/marlin.py @@ -2,21 +2,18 @@ # marlin.py # Helper module with some commonly-used functions # -import os,shutil +import shutil +from pathlib import Path from SCons.Script import DefaultEnvironment env = DefaultEnvironment() -from os.path import join - def copytree(src, dst, symlinks=False, ignore=None): - for item in os.listdir(src): - s = join(src, item) - d = join(dst, item) - if os.path.isdir(s): - shutil.copytree(s, d, symlinks, ignore) + for item in src.iterdir(): + if item.is_dir(): + shutil.copytree(item, dst / item.name, symlinks, ignore) else: - shutil.copy2(s, d) + shutil.copy2(item, dst / item.name) def replace_define(field, value): for define in env['CPPDEFINES']: @@ -34,7 +31,7 @@ def relocate_vtab(address): # Replace the existing -Wl,-T with the given ldscript path def custom_ld_script(ldname): - apath = os.path.abspath("buildroot/share/PlatformIO/ldscripts/" + ldname) + apath = str(Path("buildroot/share/PlatformIO/ldscripts", ldname).resolve()) for i, flag in enumerate(env["LINKFLAGS"]): if "-Wl,-T" in flag: env["LINKFLAGS"][i] = "-Wl,-T" + apath @@ -52,15 +49,15 @@ def encrypt_mks(source, target, env, new_name): mf = env["MARLIN_FEATURES"] if "FIRMWARE_BIN" in mf: new_name = mf["FIRMWARE_BIN"] - fwpath = target[0].path - fwfile = open(fwpath, "rb") - enfile = open(target[0].dir.path + "/" + new_name, "wb") - length = os.path.getsize(fwpath) + fwpath = Path(target[0].path) + fwfile = fwpath.open("rb") + enfile = Path(target[0].dir.path, new_name).open("wb") + length = fwpath.stat().st_size position = 0 try: while position < length: byte = fwfile.read(1) - if position >= 320 and position < 31040: + if 320 <= position < 31040: byte = chr(ord(byte) ^ key[position & 31]) if sys.version_info[0] > 2: byte = bytes(byte, 'latin1') @@ -69,7 +66,7 @@ def encrypt_mks(source, target, env, new_name): finally: fwfile.close() enfile.close() - os.remove(fwpath) + fwpath.unlink() def add_post_action(action): - env.AddPostAction(join("$BUILD_DIR", "${PROGNAME}.bin"), action); + env.AddPostAction(str(Path("$BUILD_DIR", "${PROGNAME}.bin")), action); diff --git a/buildroot/share/PlatformIO/scripts/mc-apply.py b/buildroot/share/PlatformIO/scripts/mc-apply.py index ed0ed795c6b1..b42ba12f7adf 100755 --- a/buildroot/share/PlatformIO/scripts/mc-apply.py +++ b/buildroot/share/PlatformIO/scripts/mc-apply.py @@ -5,7 +5,6 @@ import json import sys import shutil -import re opt_output = '--opt' in sys.argv output_suffix = '.sh' if opt_output else '' if '--bare-output' in sys.argv else '.gen' diff --git a/buildroot/share/PlatformIO/scripts/offset_and_rename.py b/buildroot/share/PlatformIO/scripts/offset_and_rename.py index 7d4b0fb504ae..de14ccbbbf5f 100644 --- a/buildroot/share/PlatformIO/scripts/offset_and_rename.py +++ b/buildroot/share/PlatformIO/scripts/offset_and_rename.py @@ -10,12 +10,10 @@ # import pioutil if pioutil.is_pio_build(): - import os,sys,marlin - Import("env") - - from SCons.Script import DefaultEnvironment - board = DefaultEnvironment().BoardConfig() + import marlin + env = marlin.env + board = env.BoardConfig() board_keys = board.get("build").keys() # @@ -55,8 +53,13 @@ def encrypt(source, target, env): # if 'rename' in board_keys: + # If FIRMWARE_BIN is defined by config, override all + mf = env["MARLIN_FEATURES"] + if "FIRMWARE_BIN" in mf: new_name = mf["FIRMWARE_BIN"] + else: new_name = board.get("build.rename") + def rename_target(source, target, env): - firmware = os.path.join(target[0].dir.path, board.get("build.rename")) - os.replace(target[0].path, firmware) + from pathlib import Path + Path(target[0].path).replace(Path(target[0].dir.path, new_name)) marlin.add_post_action(rename_target) diff --git a/buildroot/share/PlatformIO/scripts/openblt.py b/buildroot/share/PlatformIO/scripts/openblt.py index 104bd142cac9..6db8727ce4eb 100644 --- a/buildroot/share/PlatformIO/scripts/openblt.py +++ b/buildroot/share/PlatformIO/scripts/openblt.py @@ -3,7 +3,6 @@ # import pioutil if pioutil.is_pio_build(): - import os,sys from os.path import join Import("env") diff --git a/buildroot/share/PlatformIO/scripts/pioutil.py b/buildroot/share/PlatformIO/scripts/pioutil.py index 5ae28a62f393..18e6dba92889 100644 --- a/buildroot/share/PlatformIO/scripts/pioutil.py +++ b/buildroot/share/PlatformIO/scripts/pioutil.py @@ -6,6 +6,7 @@ def is_pio_build(): from SCons.Script import DefaultEnvironment env = DefaultEnvironment() + if "IsCleanTarget" in dir(env) and env.IsCleanTarget(): return False return not env.IsIntegrationDump() def get_pio_version(): diff --git a/buildroot/share/PlatformIO/scripts/preflight-checks.py b/buildroot/share/PlatformIO/scripts/preflight-checks.py index 25eee7e29d12..c10f0fc2e788 100644 --- a/buildroot/share/PlatformIO/scripts/preflight-checks.py +++ b/buildroot/share/PlatformIO/scripts/preflight-checks.py @@ -5,11 +5,13 @@ import pioutil if pioutil.is_pio_build(): - import os,re,sys + import re,sys + from pathlib import Path Import("env") def get_envs_for_board(board): - with open(os.path.join("Marlin", "src", "pins", "pins.h"), "r") as file: + ppath = Path("Marlin/src/pins/pins.h") + with ppath.open() as file: if sys.platform == 'win32': envregex = r"(?:env|win):" @@ -52,11 +54,16 @@ def sanity_check_target(): if 'PIOENV' not in env: raise SystemExit("Error: PIOENV is not defined. This script is intended to be used with PlatformIO") + # Require PlatformIO 6.1.1 or later + vers = pioutil.get_pio_version() + if vers < [6, 1, 1]: + raise SystemExit("Error: Marlin requires PlatformIO >= 6.1.1. Use 'pio upgrade' to get a newer version.") + if 'MARLIN_FEATURES' not in env: - raise SystemExit("Error: this script should be used after common Marlin scripts") + raise SystemExit("Error: this script should be used after common Marlin scripts.") - if 'MOTHERBOARD' not in env['MARLIN_FEATURES']: - raise SystemExit("Error: MOTHERBOARD is not defined in Configuration.h") + if len(env['MARLIN_FEATURES']) == 0: + raise SystemExit("Error: Failed to parse Marlin features. See previous error messages.") build_env = env['PIOENV'] motherboard = env['MARLIN_FEATURES']['MOTHERBOARD'] @@ -72,9 +79,10 @@ def sanity_check_target(): # # Check for Config files in two common incorrect places # - for p in [ env['PROJECT_DIR'], os.path.join(env['PROJECT_DIR'], "config") ]: - for f in [ "Configuration.h", "Configuration_adv.h" ]: - if os.path.isfile(os.path.join(p, f)): + epath = Path(env['PROJECT_DIR']) + for p in [ epath, epath / "config" ]: + for f in ("Configuration.h", "Configuration_adv.h"): + if (p / f).is_file(): err = "ERROR: Config files found in directory %s. Please move them into the Marlin subfolder." % p raise SystemExit(err) @@ -82,12 +90,12 @@ def sanity_check_target(): # Find the name.cpp.o or name.o and remove it # def rm_ofile(subdir, name): - build_dir = os.path.join(env['PROJECT_BUILD_DIR'], build_env); - for outdir in [ build_dir, os.path.join(build_dir, "debug") ]: - for ext in [ ".cpp.o", ".o" ]: - fpath = os.path.join(outdir, "src", "src", subdir, name + ext) - if os.path.exists(fpath): - os.remove(fpath) + build_dir = Path(env['PROJECT_BUILD_DIR'], build_env); + for outdir in (build_dir, build_dir / "debug"): + for ext in (".cpp.o", ".o"): + fpath = outdir / "src/src" / subdir / (name + ext) + if fpath.exists(): + fpath.unlink() # # Give warnings on every build @@ -104,16 +112,25 @@ def rm_ofile(subdir, name): # Check for old files indicating an entangled Marlin (mixing old and new code) # mixedin = [] - p = os.path.join(env['PROJECT_DIR'], "Marlin", "src", "lcd", "dogm") + p = Path(env['PROJECT_DIR'], "Marlin/src/lcd/dogm") for f in [ "ultralcd_DOGM.cpp", "ultralcd_DOGM.h" ]: - if os.path.isfile(os.path.join(p, f)): + if (p / f).is_file(): mixedin += [ f ] - p = os.path.join(env['PROJECT_DIR'], "Marlin", "src", "feature", "bedlevel", "abl") + p = Path(env['PROJECT_DIR'], "Marlin/src/feature/bedlevel/abl") for f in [ "abl.cpp", "abl.h" ]: - if os.path.isfile(os.path.join(p, f)): + if (p / f).is_file(): mixedin += [ f ] if mixedin: err = "ERROR: Old files fell into your Marlin folder. Remove %s and try again" % ", ".join(mixedin) raise SystemExit(err) + # + # Check FILAMENT_RUNOUT_SCRIPT has a %c parammeter when required + # + if 'FILAMENT_RUNOUT_SENSOR' in env['MARLIN_FEATURES'] and 'NUM_RUNOUT_SENSORS' in env['MARLIN_FEATURES']: + if env['MARLIN_FEATURES']['NUM_RUNOUT_SENSORS'].isdigit() and int(env['MARLIN_FEATURES']['NUM_RUNOUT_SENSORS']) > 1: + if 'FILAMENT_RUNOUT_SCRIPT' in env['MARLIN_FEATURES'] and "%c" not in env['MARLIN_FEATURES']['FILAMENT_RUNOUT_SCRIPT']: + err = "ERROR: FILAMENT_RUNOUT_SCRIPT needs a %c parameter (e.g., 'M600 T%c') when NUM_RUNOUT_SENSORS is > 1." + raise SystemExit(err) + sanity_check_target() diff --git a/buildroot/share/PlatformIO/scripts/preprocessor.py b/buildroot/share/PlatformIO/scripts/preprocessor.py index 96f63a49eb5a..b0fec52bfa10 100644 --- a/buildroot/share/PlatformIO/scripts/preprocessor.py +++ b/buildroot/share/PlatformIO/scripts/preprocessor.py @@ -1,7 +1,7 @@ # # preprocessor.py # -import subprocess,os,re +import subprocess nocache = 1 verbose = 0 @@ -54,51 +54,41 @@ def run_preprocessor(env, fn=None): # def search_compiler(env): - ENV_BUILD_PATH = os.path.join(env['PROJECT_BUILD_DIR'], env['PIOENV']) - GCC_PATH_CACHE = os.path.join(ENV_BUILD_PATH, ".gcc_path") + from pathlib import Path, PurePath + + ENV_BUILD_PATH = Path(env['PROJECT_BUILD_DIR'], env['PIOENV']) + GCC_PATH_CACHE = ENV_BUILD_PATH / ".gcc_path" try: - filepath = env.GetProjectOption('custom_gcc') + gccpath = env.GetProjectOption('custom_gcc') blab("Getting compiler from env") - return filepath + return gccpath except: pass # Warning: The cached .gcc_path will obscure a newly-installed toolkit - if not nocache and os.path.exists(GCC_PATH_CACHE): + if not nocache and GCC_PATH_CACHE.exists(): blab("Getting g++ path from cache") - with open(GCC_PATH_CACHE, 'r') as f: - return f.read() + return GCC_PATH_CACHE.read_text() - # Find the current platform compiler by searching the $PATH - # which will be in a platformio toolchain bin folder - path_regex = re.escape(env['PROJECT_PACKAGES_DIR']) - gcc = "g++" + # Use any item in $PATH corresponding to a platformio toolchain bin folder + path_separator = ':' + gcc_exe = '*g++' if env['PLATFORM'] == 'win32': path_separator = ';' - path_regex += r'.*\\bin' - gcc += ".exe" - else: - path_separator = ':' - path_regex += r'/.+/bin' - - # Search for the compiler - for pathdir in env['ENV']['PATH'].split(path_separator): - if not re.search(path_regex, pathdir, re.IGNORECASE): - continue - for filepath in os.listdir(pathdir): - if not filepath.endswith(gcc): - continue - # Use entire path to not rely on env PATH - filepath = os.path.sep.join([pathdir, filepath]) - # Cache the g++ path to no search always - if not nocache and os.path.exists(ENV_BUILD_PATH): - blab("Caching g++ for current env") - with open(GCC_PATH_CACHE, 'w+') as f: - f.write(filepath) - - return filepath - - filepath = env.get('CXX') - blab("Couldn't find a compiler! Fallback to %s" % filepath) - return filepath + gcc_exe += ".exe" + + # Search for the compiler in PATH + for ppath in map(Path, env['ENV']['PATH'].split(path_separator)): + if ppath.match(env['PROJECT_PACKAGES_DIR'] + "/**/bin"): + for gpath in ppath.glob(gcc_exe): + gccpath = str(gpath.resolve()) + # Cache the g++ path to no search always + if not nocache and ENV_BUILD_PATH.exists(): + blab("Caching g++ for current env") + GCC_PATH_CACHE.write_text(gccpath) + return gccpath + + gccpath = env.get('CXX') + blab("Couldn't find a compiler! Fallback to %s" % gccpath) + return gccpath diff --git a/buildroot/share/PlatformIO/scripts/schema.py b/buildroot/share/PlatformIO/scripts/schema.py new file mode 100644 index 000000000000..103aa1f072dc --- /dev/null +++ b/buildroot/share/PlatformIO/scripts/schema.py @@ -0,0 +1,421 @@ +#!/usr/bin/env python3 +# +# schema.py +# +# Used by signature.py via common-dependencies.py to generate a schema file during the PlatformIO build. +# This script can also be run standalone from within the Marlin repo to generate all schema files. +# +import re,json +from pathlib import Path + +def extend_dict(d:dict, k:tuple): + if len(k) >= 1 and k[0] not in d: + d[k[0]] = {} + if len(k) >= 2 and k[1] not in d[k[0]]: + d[k[0]][k[1]] = {} + if len(k) >= 3 and k[2] not in d[k[0]][k[1]]: + d[k[0]][k[1]][k[2]] = {} + +grouping_patterns = [ + re.compile(r'^([XYZIJKUVW]|[XYZ]2|Z[34]|E[0-7])$'), + re.compile(r'^AXIS\d$'), + re.compile(r'^(MIN|MAX)$'), + re.compile(r'^[0-8]$'), + re.compile(r'^HOTEND[0-7]$'), + re.compile(r'^(HOTENDS|BED|PROBE|COOLER)$'), + re.compile(r'^[XYZIJKUVW]M(IN|AX)$') +] +# If the indexed part of the option name matches a pattern +# then add it to the dictionary. +def find_grouping(gdict, filekey, sectkey, optkey, pindex): + optparts = optkey.split('_') + if 1 < len(optparts) > pindex: + for patt in grouping_patterns: + if patt.match(optparts[pindex]): + subkey = optparts[pindex] + modkey = '_'.join(optparts) + optparts[pindex] = '*' + wildkey = '_'.join(optparts) + kkey = f'{filekey}|{sectkey}|{wildkey}' + if kkey not in gdict: gdict[kkey] = [] + gdict[kkey].append((subkey, modkey)) + +# Build a list of potential groups. Only those with multiple items will be grouped. +def group_options(schema): + for pindex in range(10, -1, -1): + found_groups = {} + for filekey, f in schema.items(): + for sectkey, s in f.items(): + for optkey in s: + find_grouping(found_groups, filekey, sectkey, optkey, pindex) + + fkeys = [ k for k in found_groups.keys() ] + for kkey in fkeys: + items = found_groups[kkey] + if len(items) > 1: + f, s, w = kkey.split('|') + extend_dict(schema, (f, s, w)) # Add wildcard group to schema + for subkey, optkey in items: # Add all items to wildcard group + schema[f][s][w][subkey] = schema[f][s][optkey] # Move non-wildcard item to wildcard group + del schema[f][s][optkey] + del found_groups[kkey] + +# Extract all board names from boards.h +def load_boards(): + bpath = Path("Marlin/src/core/boards.h") + if bpath.is_file(): + with bpath.open() as bfile: + boards = [] + for line in bfile: + if line.startswith("#define BOARD_"): + bname = line.split()[1] + if bname != "BOARD_UNKNOWN": boards.append(bname) + return "['" + "','".join(boards) + "']" + return '' + +# +# Extract a schema from the current configuration files +# +def extract(): + # Load board names from boards.h + boards = load_boards() + + # Parsing states + class Parse: + NORMAL = 0 # No condition yet + BLOCK_COMMENT = 1 # Looking for the end of the block comment + EOL_COMMENT = 2 # EOL comment started, maybe add the next comment? + GET_SENSORS = 3 # Gathering temperature sensor options + ERROR = 9 # Syntax error + + # List of files to process, with shorthand + filekey = { 'Configuration.h':'basic', 'Configuration_adv.h':'advanced' } + # A JSON object to store the data + sch_out = { 'basic':{}, 'advanced':{} } + # Regex for #define NAME [VALUE] [COMMENT] with sanitized line + defgrep = re.compile(r'^(//)?\s*(#define)\s+([A-Za-z0-9_]+)\s*(.*?)\s*(//.+)?$') + # Defines to ignore + ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXAMPLES_DIR', 'CONFIG_EXPORT') + # Start with unknown state + state = Parse.NORMAL + # Serial ID + sid = 0 + # Loop through files and parse them line by line + for fn, fk in filekey.items(): + with Path("Marlin", fn).open() as fileobj: + section = 'none' # Current Settings section + line_number = 0 # Counter for the line number of the file + conditions = [] # Create a condition stack for the current file + comment_buff = [] # A temporary buffer for comments + options_json = '' # A buffer for the most recent options JSON found + eol_options = False # The options came from end of line, so only apply once + join_line = False # A flag that the line should be joined with the previous one + line = '' # A line buffer to handle \ continuation + last_added_ref = None # Reference to the last added item + # Loop through the lines in the file + for the_line in fileobj.readlines(): + line_number += 1 + + # Clean the line for easier parsing + the_line = the_line.strip() + + if join_line: # A previous line is being made longer + line += (' ' if line else '') + the_line + else: # Otherwise, start the line anew + line, line_start = the_line, line_number + + # If the resulting line ends with a \, don't process now. + # Strip the end off. The next line will be joined with it. + join_line = line.endswith("\\") + if join_line: + line = line[:-1].strip() + continue + else: + line_end = line_number + + defmatch = defgrep.match(line) + + # Special handling for EOL comments after a #define. + # At this point the #define is already digested and inserted, + # so we have to extend it + if state == Parse.EOL_COMMENT: + # If the line is not a comment, we're done with the EOL comment + if not defmatch and the_line.startswith('//'): + comment_buff.append(the_line[2:].strip()) + else: + last_added_ref['comment'] = ' '.join(comment_buff) + comment_buff = [] + state = Parse.NORMAL + + def use_comment(c, opt, sec, bufref): + if c.startswith(':'): # If the comment starts with : then it has magic JSON + d = c[1:].strip() # Strip the leading : + cbr = c.rindex('}') if d.startswith('{') else c.rindex(']') if d.startswith('[') else 0 + if cbr: + opt, cmt = c[1:cbr+1].strip(), c[cbr+1:].strip() + if cmt != '': bufref.append(cmt) + else: + opt = c[1:].strip() + elif c.startswith('@section'): # Start a new section + sec = c[8:].strip() + elif not c.startswith('========'): + bufref.append(c) + return opt, sec + + # In a block comment, capture lines up to the end of the comment. + # Assume nothing follows the comment closure. + if state in (Parse.BLOCK_COMMENT, Parse.GET_SENSORS): + endpos = line.find('*/') + if endpos < 0: + cline = line + else: + cline, line = line[:endpos].strip(), line[endpos+2:].strip() + + # Temperature sensors are done + if state == Parse.GET_SENSORS: + options_json = f'[ {options_json[:-2]} ]' + + state = Parse.NORMAL + + # Strip the leading '*' from block comments + if cline.startswith('*'): cline = cline[1:].strip() + + # Collect temperature sensors + if state == Parse.GET_SENSORS: + sens = re.match(r'^(-?\d+)\s*:\s*(.+)$', cline) + if sens: + s2 = sens[2].replace("'","''") + options_json += f"{sens[1]}:'{s2}', " + + elif state == Parse.BLOCK_COMMENT: + + # Look for temperature sensors + if cline == "Temperature sensors available:": + state, cline = Parse.GET_SENSORS, "Temperature Sensors" + + options_json, section = use_comment(cline, options_json, section, comment_buff) + + # For the normal state we're looking for any non-blank line + elif state == Parse.NORMAL: + # Skip a commented define when evaluating comment opening + st = 2 if re.match(r'^//\s*#define', line) else 0 + cpos1 = line.find('/*') # Start a block comment on the line? + cpos2 = line.find('//', st) # Start an end of line comment on the line? + + # Only the first comment starter gets evaluated + cpos = -1 + if cpos1 != -1 and (cpos1 < cpos2 or cpos2 == -1): + cpos = cpos1 + comment_buff = [] + state = Parse.BLOCK_COMMENT + eol_options = False + + elif cpos2 != -1 and (cpos2 < cpos1 or cpos1 == -1): + cpos = cpos2 + + # Comment after a define may be continued on the following lines + if defmatch != None and cpos > 10: + state = Parse.EOL_COMMENT + comment_buff = [] + + # Process the start of a new comment + if cpos != -1: + cline, line = line[cpos+2:].strip(), line[:cpos].strip() + + if state == Parse.BLOCK_COMMENT: + # Strip leading '*' from block comments + if cline.startswith('*'): cline = cline[1:].strip() + else: + # Expire end-of-line options after first use + if cline.startswith(':'): eol_options = True + + # Buffer a non-empty comment start + if cline != '': + options_json, section = use_comment(cline, options_json, section, comment_buff) + + # If the line has nothing before the comment, go to the next line + if line == '': + options_json = '' + continue + + # Parenthesize the given expression if needed + def atomize(s): + if s == '' \ + or re.match(r'^[A-Za-z0-9_]*(\([^)]+\))?$', s) \ + or re.match(r'^[A-Za-z0-9_]+ == \d+?$', s): + return s + return f'({s})' + + # + # The conditions stack is an array containing condition-arrays. + # Each condition-array lists the conditions for the current block. + # IF/N/DEF adds a new condition-array to the stack. + # ELSE/ELIF/ENDIF pop the condition-array. + # ELSE/ELIF negate the last item in the popped condition-array. + # ELIF adds a new condition to the end of the array. + # ELSE/ELIF re-push the condition-array. + # + cparts = line.split() + iselif, iselse = cparts[0] == '#elif', cparts[0] == '#else' + if iselif or iselse or cparts[0] == '#endif': + if len(conditions) == 0: + raise Exception(f'no #if block at line {line_number}') + + # Pop the last condition-array from the stack + prev = conditions.pop() + + if iselif or iselse: + prev[-1] = '!' + prev[-1] # Invert the last condition + if iselif: prev.append(atomize(line[5:].strip())) + conditions.append(prev) + + elif cparts[0] == '#if': + conditions.append([ atomize(line[3:].strip()) ]) + elif cparts[0] == '#ifdef': + conditions.append([ f'defined({line[6:].strip()})' ]) + elif cparts[0] == '#ifndef': + conditions.append([ f'!defined({line[7:].strip()})' ]) + + # Handle a complete #define line + elif defmatch != None: + + # Get the match groups into vars + enabled, define_name, val = defmatch[1] == None, defmatch[3], defmatch[4] + + # Increment the serial ID + sid += 1 + + # Create a new dictionary for the current #define + define_info = { + 'section': section, + 'name': define_name, + 'enabled': enabled, + 'line': line_start, + 'sid': sid + } + + # Type is based on the value + if val == '': + value_type = 'switch' + elif re.match(r'^(true|false)$', val): + value_type = 'bool' + val = val == 'true' + elif re.match(r'^[-+]?\s*\d+$', val): + value_type = 'int' + val = int(val) + elif re.match(r'[-+]?\s*(\d+\.|\d*\.\d+)([eE][-+]?\d+)?[fF]?', val): + value_type = 'float' + val = float(val.replace('f','')) + else: + value_type = 'string' if val[0] == '"' \ + else 'char' if val[0] == "'" \ + else 'state' if re.match(r'^(LOW|HIGH)$', val) \ + else 'enum' if re.match(r'^[A-Za-z0-9_]{3,}$', val) \ + else 'int[]' if re.match(r'^{(\s*[-+]?\s*\d+\s*(,\s*)?)+}$', val) \ + else 'float[]' if re.match(r'^{(\s*[-+]?\s*(\d+\.|\d*\.\d+)([eE][-+]?\d+)?[fF]?\s*(,\s*)?)+}$', val) \ + else 'array' if val[0] == '{' \ + else '' + + if val != '': define_info['value'] = val + if value_type != '': define_info['type'] = value_type + + # Join up accumulated conditions with && + if conditions: define_info['requires'] = ' && '.join(sum(conditions, [])) + + # If the comment_buff is not empty, add the comment to the info + if comment_buff: + full_comment = '\n'.join(comment_buff) + + # An EOL comment will be added later + # The handling could go here instead of above + if state == Parse.EOL_COMMENT: + define_info['comment'] = '' + else: + define_info['comment'] = full_comment + comment_buff = [] + + # If the comment specifies units, add that to the info + units = re.match(r'^\(([^)]+)\)', full_comment) + if units: + units = units[1] + if units == 's' or units == 'sec': units = 'seconds' + define_info['units'] = units + + # Set the options for the current #define + if define_name == "MOTHERBOARD" and boards != '': + define_info['options'] = boards + elif options_json != '': + define_info['options'] = options_json + if eol_options: options_json = '' + + # Create section dict if it doesn't exist yet + if section not in sch_out[fk]: sch_out[fk][section] = {} + + # If define has already been seen... + if define_name in sch_out[fk][section]: + info = sch_out[fk][section][define_name] + if isinstance(info, dict): info = [ info ] # Convert a single dict into a list + info.append(define_info) # Add to the list + else: + # Add the define dict with name as key + sch_out[fk][section][define_name] = define_info + + if state == Parse.EOL_COMMENT: + last_added_ref = define_info + + return sch_out + +def dump_json(schema:dict, jpath:Path): + with jpath.open('w') as jfile: + json.dump(schema, jfile, ensure_ascii=False, indent=2) + +def dump_yaml(schema:dict, ypath:Path): + import yaml + with ypath.open('w') as yfile: + yaml.dump(schema, yfile, default_flow_style=False, width=120, indent=2) + +def main(): + try: + schema = extract() + except Exception as exc: + print("Error: " + str(exc)) + schema = None + + if schema: + + # Get the first command line argument + import sys + if len(sys.argv) > 1: + arg = sys.argv[1] + else: + arg = 'some' + + # JSON schema + if arg in ['some', 'json', 'jsons']: + print("Generating JSON ...") + dump_json(schema, Path('schema.json')) + + # JSON schema (wildcard names) + if arg in ['group', 'jsons']: + group_options(schema) + dump_json(schema, Path('schema_grouped.json')) + + # YAML + if arg in ['some', 'yml', 'yaml']: + try: + import yaml + except ImportError: + print("Installing YAML module ...") + import subprocess + try: + subprocess.run(['python3', '-m', 'pip', 'install', 'pyyaml']) + import yaml + except: + print("Failed to install YAML module") + return + + print("Generating YML ...") + dump_yaml(schema, Path('schema.yml')) + +if __name__ == '__main__': + main() diff --git a/buildroot/share/PlatformIO/scripts/signature.py b/buildroot/share/PlatformIO/scripts/signature.py index 41f17fd3b6c7..4fc0084e575b 100644 --- a/buildroot/share/PlatformIO/scripts/signature.py +++ b/buildroot/share/PlatformIO/scripts/signature.py @@ -1,7 +1,11 @@ # # signature.py # -import os,subprocess,re,json,hashlib +import schema + +import subprocess,re,json,hashlib +from datetime import datetime +from pathlib import Path # # Return all macro names in a header as an array, so we can take @@ -35,8 +39,8 @@ def get_file_sha256sum(filepath): # Compress a JSON file into a zip file # import zipfile -def compress_file(filepath, outputbase): - with zipfile.ZipFile(outputbase + '.zip', 'w', compression=zipfile.ZIP_BZIP2, compresslevel=9) as zipf: +def compress_file(filepath, outpath): + with zipfile.ZipFile(outpath, 'w', compression=zipfile.ZIP_BZIP2, compresslevel=9) as zipf: zipf.write(filepath, compress_type=zipfile.ZIP_BZIP2, compresslevel=9) # @@ -51,19 +55,19 @@ def compute_build_signature(env): # Definitions from these files will be kept files_to_keep = [ 'Marlin/Configuration.h', 'Marlin/Configuration_adv.h' ] - build_dir = os.path.join(env['PROJECT_BUILD_DIR'], env['PIOENV']) + build_path = Path(env['PROJECT_BUILD_DIR'], env['PIOENV']) # Check if we can skip processing hashes = '' for header in files_to_keep: hashes += get_file_sha256sum(header)[0:10] - marlin_json = os.path.join(build_dir, 'marlin_config.json') - marlin_zip = os.path.join(build_dir, 'mc') + marlin_json = build_path / 'marlin_config.json' + marlin_zip = build_path / 'mc.zip' # Read existing config file try: - with open(marlin_json, 'r') as infile: + with marlin_json.open() as infile: conf = json.load(infile) if conf['__INITIAL_HASH'] == hashes: # Same configuration, skip recomputing the building signature @@ -109,7 +113,10 @@ def compute_build_signature(env): defines[key] = value if len(value) else "" - if not 'CONFIGURATION_EMBEDDING' in defines: + # + # Continue to gather data for CONFIGURATION_EMBEDDING or CONFIG_EXPORT + # + if not ('CONFIGURATION_EMBEDDING' in defines or 'CONFIG_EXPORT' in defines): return # Second step is to filter useless macro @@ -145,6 +152,85 @@ def compute_build_signature(env): if key in conf_defines[header]: data[header][key] = resolved_defines[key] + # Every python needs this toy + def tryint(key): + try: + return int(defines[key]) + except: + return 0 + + config_dump = tryint('CONFIG_EXPORT') + + # + # Produce an INI file if CONFIG_EXPORT == 2 + # + if config_dump == 2: + print("Generating config.ini ...") + config_ini = build_path / 'config.ini' + with config_ini.open('w') as outfile: + ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXPORT') + filegrp = { 'Configuration.h':'config:basic', 'Configuration_adv.h':'config:advanced' } + vers = defines["CONFIGURATION_H_VERSION"] + dt_string = datetime.now().strftime("%Y-%m-%d at %H:%M:%S") + ini_fmt = '{0:40}{1}\n' + outfile.write( + '#\n' + + '# Marlin Firmware\n' + + '# config.ini - Options to apply before the build\n' + + '#\n' + + f'# Generated by Marlin build on {dt_string}\n' + + '#\n' + + '\n' + + '[config:base]\n' + + ini_fmt.format('ini_use_config', ' = all') + + ini_fmt.format('ini_config_vers', f' = {vers}') + ) + # Loop through the data array of arrays + for header in data: + if header.startswith('__'): + continue + outfile.write('\n[' + filegrp[header] + ']\n') + for key in sorted(data[header]): + if key not in ignore: + val = 'on' if data[header][key] == '' else data[header][key] + outfile.write(ini_fmt.format(key.lower(), ' = ' + val)) + + # + # Produce a schema.json file if CONFIG_EXPORT == 3 + # + if config_dump >= 3: + try: + conf_schema = schema.extract() + except Exception as exc: + print("Error: " + str(exc)) + conf_schema = None + + if conf_schema: + # + # Produce a schema.json file if CONFIG_EXPORT == 3 + # + if config_dump in (3, 13): + print("Generating schema.json ...") + schema.dump_json(conf_schema, build_path / 'schema.json') + if config_dump == 13: + schema.group_options(conf_schema) + schema.dump_json(conf_schema, build_path / 'schema_grouped.json') + + # + # Produce a schema.yml file if CONFIG_EXPORT == 4 + # + elif config_dump == 4: + print("Generating schema.yml ...") + try: + import yaml + except ImportError: + env.Execute(env.VerboseAction( + '$PYTHONEXE -m pip install "pyyaml"', + "Installing YAML for schema.yml export", + )) + import yaml + schema.dump_yaml(conf_schema, build_path / 'schema.yml') + # Append the source code version and date data['VERSION'] = {} data['VERSION']['DETAILED_BUILD_VERSION'] = resolved_defines['DETAILED_BUILD_VERSION'] @@ -156,10 +242,17 @@ def compute_build_signature(env): pass # - # Produce a JSON file for CONFIGURATION_EMBEDDING or CONFIG_DUMP > 0 + # Produce a JSON file for CONFIGURATION_EMBEDDING or CONFIG_EXPORT == 1 # - with open(marlin_json, 'w') as outfile: - json.dump(data, outfile, separators=(',', ':')) + if config_dump == 1 or 'CONFIGURATION_EMBEDDING' in defines: + with marlin_json.open('w') as outfile: + json.dump(data, outfile, separators=(',', ':')) + + # + # The rest only applies to CONFIGURATION_EMBEDDING + # + if not 'CONFIGURATION_EMBEDDING' in defines: + return # Compress the JSON file as much as we can compress_file(marlin_json, marlin_zip) @@ -173,11 +266,11 @@ def compute_build_signature(env): + b'const unsigned char mc_zip[] PROGMEM = {\n ' ) count = 0 - for b in open(os.path.join(build_dir, 'mc.zip'), 'rb').read(): + for b in (build_path / 'mc.zip').open('rb').read(): result_file.write(b' 0x%02X,' % b) count += 1 - if (count % 16 == 0): + if count % 16 == 0: result_file.write(b'\n ') - if (count % 16): + if count % 16: result_file.write(b'\n') result_file.write(b'};\n') diff --git a/buildroot/share/PlatformIO/scripts/simulator.py b/buildroot/share/PlatformIO/scripts/simulator.py index b3a8d67a666a..608258c4d17a 100644 --- a/buildroot/share/PlatformIO/scripts/simulator.py +++ b/buildroot/share/PlatformIO/scripts/simulator.py @@ -2,6 +2,7 @@ # simulator.py # PlatformIO pre: script for simulator builds # + import pioutil if pioutil.is_pio_build(): # Get the environment thus far for the build diff --git a/buildroot/share/dwin/bin/DWIN_ICO.py b/buildroot/share/dwin/bin/DWIN_ICO.py index 8ac680c61e7c..3ddc734022ee 100644 --- a/buildroot/share/dwin/bin/DWIN_ICO.py +++ b/buildroot/share/dwin/bin/DWIN_ICO.py @@ -144,7 +144,7 @@ def createFile(self, iconDir, filename): # process each file: try: index = int(dirEntry.name[0:3]) - if (index < 0) or (index > 255): + if not (0 <= index <= 255): print('...Ignoring invalid index on', dirEntry.path) continue #dirEntry.path is iconDir/name diff --git a/buildroot/share/dwin/bin/makeIco.py b/buildroot/share/dwin/bin/makeIco.py index 274082acee87..65e7eb53a58b 100755 --- a/buildroot/share/dwin/bin/makeIco.py +++ b/buildroot/share/dwin/bin/makeIco.py @@ -18,7 +18,6 @@ # along with this program. If not, see . #---------------------------------------------------------------- -import os import os.path import argparse import DWIN_ICO diff --git a/buildroot/share/dwin/bin/splitIco.py b/buildroot/share/dwin/bin/splitIco.py index ce6ba89749c9..a96d1823d22e 100755 --- a/buildroot/share/dwin/bin/splitIco.py +++ b/buildroot/share/dwin/bin/splitIco.py @@ -18,7 +18,6 @@ # along with this program. If not, see . #---------------------------------------------------------------- -import os import os.path import argparse import DWIN_ICO diff --git a/buildroot/share/scripts/createTemperatureLookupMarlin.py b/buildroot/share/scripts/createTemperatureLookupMarlin.py index 02981f1015aa..5902e91a87d6 100755 --- a/buildroot/share/scripts/createTemperatureLookupMarlin.py +++ b/buildroot/share/scripts/createTemperatureLookupMarlin.py @@ -50,7 +50,7 @@ def __init__(self, rp, t1, r1, t2, r2, t3, r3): if c < 0: print("//////////////////////////////////////////////////////////////////////////////////////") - print("// WARNING: negative coefficient 'c'! Something may be wrong with the measurements! //") + print("// WARNING: Negative coefficient 'c'! Something may be wrong with the measurements! //") print("//////////////////////////////////////////////////////////////////////////////////////") c = -c self.c1 = a # Steinhart-Hart coefficients @@ -93,8 +93,8 @@ def main(argv): r2 = 1641.9 # resistance at middle temperature (1.6 KOhm) t3 = 250 # high temperature in Kelvin (250 degC) r3 = 226.15 # resistance at high temperature (226.15 Ohm) - rp = 4700; # pull-up resistor (4.7 kOhm) - num_temps = 36; # number of entries for look-up table + rp = 4700 # pull-up resistor (4.7 kOhm) + num_temps = 36 # number of entries for look-up table try: opts, args = getopt.getopt(argv, "h", ["help", "rp=", "t1=", "t2=", "t3=", "num-temps="]) @@ -125,13 +125,13 @@ def main(argv): num_temps = int(arg) t = Thermistor(rp, t1, r1, t2, r2, t3, r3) - increment = int((ARES-1)/(num_temps-1)); - step = (TMIN-TMAX) / (num_temps-1) - low_bound = t.temp(ARES-1); - up_bound = t.temp(1); + increment = int((ARES - 1) / (num_temps - 1)) + step = int((TMIN - TMAX) / (num_temps - 1)) + low_bound = t.temp(ARES - 1) + up_bound = t.temp(1) min_temp = int(TMIN if TMIN > low_bound else low_bound) max_temp = int(TMAX if TMAX < up_bound else up_bound) - temps = list(range(max_temp, TMIN+step, step)); + temps = list(range(max_temp, TMIN + step, step)) print("// Thermistor lookup table for Marlin") print("// ./createTemperatureLookupMarlin.py --rp=%s --t1=%s:%s --t2=%s:%s --t3=%s:%s --num-temps=%s" % (rp, t1, r1, t2, r2, t3, r3, num_temps)) diff --git a/buildroot/share/scripts/gen-tft-image.py b/buildroot/share/scripts/gen-tft-image.py index ddbab236ee91..f3786aef706c 100644 --- a/buildroot/share/scripts/gen-tft-image.py +++ b/buildroot/share/scripts/gen-tft-image.py @@ -22,8 +22,8 @@ # Generate Marlin TFT Images from bitmaps/PNG/JPG -import sys,re,struct -from PIL import Image,ImageDraw +import sys,struct +from PIL import Image def image2bin(image, output_file): if output_file.endswith(('.c', '.cpp')): diff --git a/buildroot/share/scripts/upload.py b/buildroot/share/scripts/upload.py index 52fa1abc549b..af15a825906e 100644 --- a/buildroot/share/scripts/upload.py +++ b/buildroot/share/scripts/upload.py @@ -189,9 +189,7 @@ def _RollbackUpload(FirmwareFile): 'BOARD_CREALITY_V427', 'BOARD_CREALITY_V431', 'BOARD_CREALITY_V452', 'BOARD_CREALITY_V453', 'BOARD_CREALITY_V24S1'] # "upload_random_name": generate a random 8.3 firmware filename to upload - upload_random_filename = marlin_motherboard in ['BOARD_CREALITY_V4', 'BOARD_CREALITY_V4210', 'BOARD_CREALITY_V422', 'BOARD_CREALITY_V423', - 'BOARD_CREALITY_V427', 'BOARD_CREALITY_V431', 'BOARD_CREALITY_V452', 'BOARD_CREALITY_V453', - 'BOARD_CREALITY_V24S1'] and not marlin_long_filename_host_support + upload_random_filename = upload_delete_old_bins and not marlin_long_filename_host_support try: @@ -304,7 +302,7 @@ def _RollbackUpload(FirmwareFile): except KeyboardInterrupt: print('Aborted by user') if filetransfer: filetransfer.abort() - if protocol: + if protocol: protocol.disconnect() protocol.shutdown() _RollbackUpload(upload_firmware_target_name) @@ -314,7 +312,7 @@ def _RollbackUpload(FirmwareFile): except serial.SerialException as se: # This exception is raised only for send_ascii data (not for binary transfer) print(f'Serial excepion: {se}, transfer aborted') - if protocol: + if protocol: protocol.disconnect() protocol.shutdown() _RollbackUpload(upload_firmware_target_name) @@ -323,7 +321,7 @@ def _RollbackUpload(FirmwareFile): except MarlinBinaryProtocol.FatalError: print('Too many retries, transfer aborted') - if protocol: + if protocol: protocol.disconnect() protocol.shutdown() _RollbackUpload(upload_firmware_target_name) @@ -332,7 +330,7 @@ def _RollbackUpload(FirmwareFile): except Exception as ex: print(f"\nException: {ex}, transfer aborted") - if protocol: + if protocol: protocol.disconnect() protocol.shutdown() _RollbackUpload(upload_firmware_target_name) diff --git a/buildroot/share/vscode/auto_build.py b/buildroot/share/vscode/auto_build.py index 5bd769478e0e..31ef2715515b 100644 --- a/buildroot/share/vscode/auto_build.py +++ b/buildroot/share/vscode/auto_build.py @@ -252,7 +252,7 @@ def resolve_path(path): while 0 <= path.find('../'): end = path.find('../') - 1 start = path.find('/') - while 0 <= path.find('/', start) and end > path.find('/', start): + while 0 <= path.find('/', start) < end: start = path.find('/', start) + 1 path = path[0:start] + path[end + 4:] @@ -674,7 +674,7 @@ def write_to_screen_with_replace(text, highlights): # search for highlights & s if 0 == highlight[1]: found_1 = text.find(' ') found_tab = text.find('\t') - if found_1 < 0 or found_1 > found_tab: + if not (0 <= found_1 <= found_tab): found_1 = found_tab write_to_screen_queue(text[:found_1 + 1]) for highlight_2 in highlights: @@ -684,7 +684,7 @@ def write_to_screen_with_replace(text, highlights): # search for highlights & s if found >= 0: found_space = text.find(' ', found_1 + 1) found_tab = text.find('\t', found_1 + 1) - if found_space < 0 or found_space > found_tab: + if not (0 <= found_space <= found_tab): found_space = found_tab found_right = text.find(']', found + 1) write_to_screen_queue(text[found_1 + 1:found_space + 1], highlight[2]) @@ -701,7 +701,7 @@ def write_to_screen_with_replace(text, highlights): # search for highlights & s break if did_something == False: r_loc = text.find('\r') + 1 - if r_loc > 0 and r_loc < len(text): # need to split this line + if 0 < r_loc < len(text): # need to split this line text = text.split('\r') for line in text: if line != '': diff --git a/buildroot/share/vscode/create_custom_upload_command_CDC.py b/buildroot/share/vscode/create_custom_upload_command_CDC.py index 4662dd26cb49..4926faf06a68 100644 --- a/buildroot/share/vscode/create_custom_upload_command_CDC.py +++ b/buildroot/share/vscode/create_custom_upload_command_CDC.py @@ -13,7 +13,7 @@ from __future__ import print_function from __future__ import division -import subprocess,os,sys,platform +import subprocess,os,platform from SCons.Script import DefaultEnvironment current_OS = platform.system() diff --git a/buildroot/share/vscode/create_custom_upload_command_DFU.py b/buildroot/share/vscode/create_custom_upload_command_DFU.py index 562e284e63c2..27c5a34802f8 100644 --- a/buildroot/share/vscode/create_custom_upload_command_DFU.py +++ b/buildroot/share/vscode/create_custom_upload_command_DFU.py @@ -9,7 +9,7 @@ # Will continue on if a COM port isn't found so that the compilation can be done. # -import os,sys +import os from SCons.Script import DefaultEnvironment import platform current_OS = platform.system() diff --git a/ini/esp32.ini b/ini/esp32.ini index 4f5cd27c2942..f12ef99759cd 100644 --- a/ini/esp32.ini +++ b/ini/esp32.ini @@ -20,6 +20,7 @@ build_src_filter = ${common.default_src_filter} + lib_ignore = NativeEthernet upload_speed = 500000 monitor_speed = 250000 +monitor_filters = colorize, time, send_on_enter, log2file, esp32_exception_decoder #upload_port = marlinesp.local #board_build.flash_mode = qio diff --git a/platformio.ini b/platformio.ini index 3820e7019389..bcace14d28d5 100644 --- a/platformio.ini +++ b/platformio.ini @@ -16,6 +16,7 @@ boards_dir = buildroot/share/PlatformIO/boards default_envs = mega2560 include_dir = Marlin extra_configs = + Marlin/config.ini ini/avr.ini ini/due.ini ini/esp32.ini @@ -44,6 +45,7 @@ extra_configs = build_flags = -g3 -D__MARLIN_FIRMWARE__ -DNDEBUG -fmax-errors=5 extra_scripts = + pre:buildroot/share/PlatformIO/scripts/configuration.py pre:buildroot/share/PlatformIO/scripts/common-dependencies.py pre:buildroot/share/PlatformIO/scripts/common-cxxflags.py pre:buildroot/share/PlatformIO/scripts/preflight-checks.py @@ -267,17 +269,10 @@ framework = arduino extra_scripts = ${common.extra_scripts} build_flags = ${common.build_flags} lib_deps = ${common.lib_deps} -platform_packages = platformio/tool-dfuutil@^1.11.0 monitor_speed = 250000 -monitor_flags = - --quiet - --echo - --eol - LF - --filter - colorize - --filter - time +monitor_eol = LF +monitor_echo = yes +monitor_filters = colorize, time, send_on_enter # # Just print the dependency tree