Compare commits

..

8 Commits

Author SHA1 Message Date
Scott Lahteine
743d3100fa 🔨 [2.0.9.7] Use older chitu_crypt.py 2023-07-04 00:34:26 -05:00
Giuliano Zaro
5ac0c72bac 🐛 Fix G2/G3 limiting_speed (#25988) 2023-06-17 15:56:25 -05:00
Scott Lahteine
a8cf849bbb 🔨 Use 'build_src_filter' 2023-05-16 19:50:22 -05:00
Scott Lahteine
e7186fc33e 🔨 Fix thread-unsafe deque iteration 2023-05-16 15:50:48 -05:00
Scott Lahteine
41593c4d1a 🔨 Newer PlatformIO support 2023-04-30 18:19:57 -05:00
Scott Lahteine
284cc8f4c0 🧑‍💻 Update Python indentation 2023-03-25 20:47:31 -05:00
ellensp
d02d7d26c8 🩹 Add missing FORCE_SOFT_SPI (#24940) 2023-03-25 20:47:31 -05:00
Scott Lahteine
aaa4e8ecdb 🐛 Fix recalculate_max_e_jerk 2022-11-09 20:57:22 -06:00
52 changed files with 2594 additions and 1614 deletions

View File

@@ -14,6 +14,10 @@ end_of_line = lf
indent_style = space indent_style = space
indent_size = 2 indent_size = 2
[{*.py,*.conf,*.sublime-project}] [{*.py}]
indent_style = space
indent_size = 4
[{*.conf,*.sublime-project}]
indent_style = tab indent_style = tab
indent_size = 4 indent_size = 4

View File

@@ -28,7 +28,7 @@
/** /**
* Marlin release version identifier * Marlin release version identifier
*/ */
//#define SHORT_BUILD_VERSION "2.0.9.5" //#define SHORT_BUILD_VERSION "2.0.9.7"
/** /**
* Verbose version identifier which should contain a reference to the location * Verbose version identifier which should contain a reference to the location
@@ -41,7 +41,7 @@
* here we define this default string as the date where the latest release * here we define this default string as the date where the latest release
* version was tagged. * version was tagged.
*/ */
//#define STRING_DISTRIBUTION_DATE "2022-07-29" //#define STRING_DISTRIBUTION_DATE "2023-07-04"
/** /**
* Defines a generic printer name to be output to the LCD after booting Marlin. * Defines a generic printer name to be output to the LCD after booting Marlin.

211
Marlin/config.ini Normal file
View File

@@ -0,0 +1,211 @@
#
# Marlin Firmware
# config.ini - Options to apply before the build
#
[config:base]
ini_use_config = none
# Load all config: sections in this file
;ini_use_config = all
# Load config file relative to Marlin/
;ini_use_config = another.ini
# Download configurations from GitHub
;ini_use_config = example/Creality/Ender-5 Plus @ bugfix-2.1.x
# Download configurations from your server
;ini_use_config = https://me.myserver.com/path/to/configs
# Evaluate config:base and do a config dump
;ini_use_config = base
;config_export = 2
[config:minimal]
motherboard = BOARD_RAMPS_14_EFB
serial_port = 0
baudrate = 250000
use_watchdog = on
thermal_protection_hotends = on
thermal_protection_hysteresis = 4
thermal_protection_period = 40
bufsize = 4
block_buffer_size = 16
max_cmd_size = 96
extruders = 1
temp_sensor_0 = 1
temp_hysteresis = 3
heater_0_mintemp = 5
heater_0_maxtemp = 275
preheat_1_temp_hotend = 180
bang_max = 255
pidtemp = on
pid_k1 = 0.95
pid_max = BANG_MAX
pid_functional_range = 10
default_kp = 22.20
default_ki = 1.08
default_kd = 114.00
x_driver_type = A4988
y_driver_type = A4988
z_driver_type = A4988
e0_driver_type = A4988
x_bed_size = 200
x_min_pos = 0
x_max_pos = X_BED_SIZE
y_bed_size = 200
y_min_pos = 0
y_max_pos = Y_BED_SIZE
z_min_pos = 0
z_max_pos = 200
x_home_dir = -1
y_home_dir = -1
z_home_dir = -1
use_xmin_plug = on
use_ymin_plug = on
use_zmin_plug = on
x_min_endstop_inverting = false
y_min_endstop_inverting = false
z_min_endstop_inverting = false
default_axis_steps_per_unit = { 80, 80, 400, 500 }
axis_relative_modes = { false, false, false, false }
default_max_feedrate = { 300, 300, 5, 25 }
default_max_acceleration = { 3000, 3000, 100, 10000 }
homing_feedrate_mm_m = { (50*60), (50*60), (4*60) }
homing_bump_divisor = { 2, 2, 4 }
x_enable_on = 0
y_enable_on = 0
z_enable_on = 0
e_enable_on = 0
invert_x_dir = false
invert_y_dir = true
invert_z_dir = false
invert_e0_dir = false
invert_e_step_pin = false
invert_x_step_pin = false
invert_y_step_pin = false
invert_z_step_pin = false
disable_x = false
disable_y = false
disable_z = false
disable_e = false
proportional_font_ratio = 1.0
default_nominal_filament_dia = 1.75
junction_deviation_mm = 0.013
default_acceleration = 3000
default_travel_acceleration = 3000
default_retract_acceleration = 3000
default_minimumfeedrate = 0.0
default_mintravelfeedrate = 0.0
minimum_planner_speed = 0.05
min_steps_per_segment = 6
default_minsegmenttime = 20000
[config:basic]
bed_overshoot = 10
busy_while_heating = on
default_ejerk = 5.0
default_keepalive_interval = 2
default_leveling_fade_height = 0.0
disable_inactive_extruder = on
display_charset_hd44780 = JAPANESE
eeprom_boot_silent = on
eeprom_chitchat = on
endstoppullups = on
extrude_maxlength = 200
extrude_mintemp = 170
host_keepalive_feature = on
hotend_overshoot = 15
jd_handle_small_segments = on
lcd_info_screen_style = 0
lcd_language = en
max_bed_power = 255
mesh_inset = 0
min_software_endstops = on
max_software_endstops = on
min_software_endstop_x = on
min_software_endstop_y = on
min_software_endstop_z = on
max_software_endstop_x = on
max_software_endstop_y = on
max_software_endstop_z = on
preheat_1_fan_speed = 0
preheat_1_label = "PLA"
preheat_1_temp_bed = 70
prevent_cold_extrusion = on
prevent_lengthy_extrude = on
printjob_timer_autostart = on
probing_margin = 10
show_bootscreen = on
soft_pwm_scale = 0
string_config_h_author = "(none, default config)"
temp_bed_hysteresis = 3
temp_bed_residency_time = 10
temp_bed_window = 1
temp_residency_time = 10
temp_window = 1
validate_homing_endstops = on
xy_probe_feedrate = (133*60)
z_clearance_between_probes = 5
z_clearance_deploy_probe = 10
z_clearance_multi_probe = 5
[config:advanced]
arc_support = on
auto_report_temperatures = on
autotemp = on
autotemp_oldweight = 0.98
bed_check_interval = 5000
default_stepper_deactive_time = 120
default_volumetric_extruder_limit = 0.00
disable_inactive_e = true
disable_inactive_x = true
disable_inactive_y = true
disable_inactive_z = true
e0_auto_fan_pin = -1
encoder_100x_steps_per_sec = 80
encoder_10x_steps_per_sec = 30
encoder_rate_multiplier = on
extended_capabilities_report = on
extruder_auto_fan_speed = 255
extruder_auto_fan_temperature = 50
fanmux0_pin = -1
fanmux1_pin = -1
fanmux2_pin = -1
faster_gcode_parser = on
homing_bump_mm = { 5, 5, 2 }
max_arc_segment_mm = 1.0
min_arc_segment_mm = 0.1
min_circle_segments = 72
n_arc_correction = 25
serial_overrun_protection = on
slowdown = on
slowdown_divisor = 2
temp_sensor_bed = 0
thermal_protection_bed_hysteresis = 2
thermocouple_max_errors = 15
tx_buffer_size = 0
watch_bed_temp_increase = 2
watch_bed_temp_period = 60
watch_temp_increase = 2
watch_temp_period = 20

View File

@@ -6,14 +6,14 @@
# #
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
import platform import platform
current_OS = platform.system() current_OS = platform.system()
if current_OS == 'Windows': if current_OS == 'Windows':
Import("env") Import("env")
# Use bossac.exe on Windows # Use bossac.exe on Windows
env.Replace( env.Replace(
UPLOADCMD="bossac --info --unlock --write --verify --reset --erase -U false --boot $SOURCE" UPLOADCMD="bossac --info --unlock --write --verify --reset --erase -U false --boot $SOURCE"
) )

View File

@@ -9,119 +9,127 @@ from __future__ import print_function
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
target_filename = "FIRMWARE.CUR" target_filename = "FIRMWARE.CUR"
target_drive = "REARM" target_drive = "REARM"
import os,getpass,platform import platform
current_OS = platform.system() current_OS = platform.system()
Import("env") Import("env")
def print_error(e): def print_error(e):
print('\nUnable to find destination disk (%s)\n' \ print('\nUnable to find destination disk (%s)\n' \
'Please select it in platformio.ini using the upload_port keyword ' \ 'Please select it in platformio.ini using the upload_port keyword ' \
'(https://docs.platformio.org/en/latest/projectconf/section_env_upload.html) ' \ '(https://docs.platformio.org/en/latest/projectconf/section_env_upload.html) ' \
'or copy the firmware (.pio/build/%s/firmware.bin) manually to the appropriate disk\n' \ 'or copy the firmware (.pio/build/%s/firmware.bin) manually to the appropriate disk\n' \
%(e, env.get('PIOENV'))) %(e, env.get('PIOENV')))
def before_upload(source, target, env): def before_upload(source, target, env):
try: try:
# from pathlib import Path
# Find a disk for upload #
# # Find a disk for upload
upload_disk = 'Disk not found' #
target_file_found = False upload_disk = 'Disk not found'
target_drive_found = False target_file_found = False
if current_OS == 'Windows': target_drive_found = False
# if current_OS == 'Windows':
# platformio.ini will accept this for a Windows upload port designation: 'upload_port = L:' #
# Windows - doesn't care about the disk's name, only cares about the drive letter # platformio.ini will accept this for a Windows upload port designation: 'upload_port = L:'
import subprocess,string # Windows - doesn't care about the disk's name, only cares about the drive letter
from ctypes import windll import subprocess,string
from ctypes import windll
from pathlib import PureWindowsPath
# getting list of drives # getting list of drives
# https://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python # https://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python
drives = [] drives = []
bitmask = windll.kernel32.GetLogicalDrives() bitmask = windll.kernel32.GetLogicalDrives()
for letter in string.ascii_uppercase: for letter in string.ascii_uppercase:
if bitmask & 1: if bitmask & 1:
drives.append(letter) drives.append(letter)
bitmask >>= 1 bitmask >>= 1
for drive in drives: for drive in drives:
final_drive_name = drive + ':\\' final_drive_name = drive + ':'
# print ('disc check: {}'.format(final_drive_name)) # print ('disc check: {}'.format(final_drive_name))
try: try:
volume_info = str(subprocess.check_output('cmd /C dir ' + final_drive_name, stderr=subprocess.STDOUT)) volume_info = str(subprocess.check_output('cmd /C dir ' + final_drive_name, stderr=subprocess.STDOUT))
except Exception as e: except Exception as e:
print ('error:{}'.format(e)) print ('error:{}'.format(e))
continue continue
else: else:
if target_drive in volume_info and not target_file_found: # set upload if not found target file yet if target_drive in volume_info and not target_file_found: # set upload if not found target file yet
target_drive_found = True target_drive_found = True
upload_disk = final_drive_name upload_disk = PureWindowsPath(final_drive_name)
if target_filename in volume_info: if target_filename in volume_info:
if not target_file_found: if not target_file_found:
upload_disk = final_drive_name upload_disk = PureWindowsPath(final_drive_name)
target_file_found = True target_file_found = True
elif current_OS == 'Linux': elif current_OS == 'Linux':
# #
# platformio.ini will accept this for a Linux upload port designation: 'upload_port = /media/media_name/drive' # platformio.ini will accept this for a Linux upload port designation: 'upload_port = /media/media_name/drive'
# #
drives = os.listdir(os.path.join(os.sep, 'media', getpass.getuser())) import getpass
if target_drive in drives: # If target drive is found, use it. user = getpass.getuser()
target_drive_found = True mpath = Path('/media', user)
upload_disk = os.path.join(os.sep, 'media', getpass.getuser(), target_drive) + os.sep drives = [ x for x in mpath.iterdir() if x.is_dir() ]
else: if target_drive in drives: # If target drive is found, use it.
for drive in drives: target_drive_found = True
try: upload_disk = mpath / target_drive
files = os.listdir(os.path.join(os.sep, 'media', getpass.getuser(), drive)) else:
except: for drive in drives:
continue try:
else: fpath = mpath / drive
if target_filename in files: filenames = [ x.name for x in fpath.iterdir() if x.is_file() ]
upload_disk = os.path.join(os.sep, 'media', getpass.getuser(), drive) + os.sep except:
target_file_found = True continue
break else:
# if target_filename in filenames:
# set upload_port to drive if found upload_disk = mpath / drive
# target_file_found = True
break
#
# set upload_port to drive if found
#
if target_file_found or target_drive_found: if target_file_found or target_drive_found:
env.Replace( env.Replace(
UPLOAD_FLAGS="-P$UPLOAD_PORT" UPLOAD_FLAGS="-P$UPLOAD_PORT"
) )
elif current_OS == 'Darwin': # MAC elif current_OS == 'Darwin': # MAC
# #
# platformio.ini will accept this for a OSX upload port designation: 'upload_port = /media/media_name/drive' # platformio.ini will accept this for a OSX upload port designation: 'upload_port = /media/media_name/drive'
# #
drives = os.listdir('/Volumes') # human readable names dpath = Path('/Volumes') # human readable names
if target_drive in drives and not target_file_found: # set upload if not found target file yet drives = [ x for x in dpath.iterdir() if x.is_dir() ]
target_drive_found = True if target_drive in drives and not target_file_found: # set upload if not found target file yet
upload_disk = '/Volumes/' + target_drive + '/' target_drive_found = True
for drive in drives: upload_disk = dpath / target_drive
try: for drive in drives:
filenames = os.listdir('/Volumes/' + drive + '/') # will get an error if the drive is protected try:
except: fpath = dpath / drive # will get an error if the drive is protected
continue filenames = [ x.name for x in fpath.iterdir() if x.is_file() ]
else: except:
if target_filename in filenames: continue
if not target_file_found: else:
upload_disk = '/Volumes/' + drive + '/' if target_filename in filenames:
target_file_found = True upload_disk = dpath / drive
target_file_found = True
break
# #
# Set upload_port to drive if found # Set upload_port to drive if found
# #
if target_file_found or target_drive_found: if target_file_found or target_drive_found:
env.Replace(UPLOAD_PORT=upload_disk) env.Replace(UPLOAD_PORT=str(upload_disk))
print('\nUpload disk: ', upload_disk, '\n') print('\nUpload disk: ', upload_disk, '\n')
else: else:
print_error('Autodetect Error') print_error('Autodetect Error')
except Exception as e: except Exception as e:
print_error(str(e)) print_error(str(e))
env.AddPreAction("upload", before_upload) env.AddPreAction("upload", before_upload)

View File

@@ -269,7 +269,7 @@ void plan_arc(
// d) allows the print head to stop in the remining length of the curve within all configured maximum accelerations. // d) allows the print head to stop in the remining length of the curve within all configured maximum accelerations.
// The last has to be calculated every time through the loop. // The last has to be calculated every time through the loop.
const float limiting_accel = _MIN(planner.settings.max_acceleration_mm_per_s2[axis_p], planner.settings.max_acceleration_mm_per_s2[axis_q]), const float limiting_accel = _MIN(planner.settings.max_acceleration_mm_per_s2[axis_p], planner.settings.max_acceleration_mm_per_s2[axis_q]),
limiting_speed = _MIN(planner.settings.max_feedrate_mm_s[axis_p], planner.settings.max_acceleration_mm_per_s2[axis_q]), limiting_speed = _MIN(planner.settings.max_feedrate_mm_s[axis_p], planner.settings.max_feedrate_mm_s[axis_q]),
limiting_speed_sqr = _MIN(sq(limiting_speed), limiting_accel * radius, sq(scaled_fr_mm_s)); limiting_speed_sqr = _MIN(sq(limiting_speed), limiting_accel * radius, sq(scaled_fr_mm_s));
float arc_mm_remaining = flat_mm; float arc_mm_remaining = flat_mm;

View File

@@ -25,7 +25,7 @@
* Release version. Leave the Marlin version or apply a custom scheme. * Release version. Leave the Marlin version or apply a custom scheme.
*/ */
#ifndef SHORT_BUILD_VERSION #ifndef SHORT_BUILD_VERSION
#define SHORT_BUILD_VERSION "2.0.9.5" #define SHORT_BUILD_VERSION "2.0.9.7"
#endif #endif
/** /**
@@ -42,7 +42,7 @@
* version was tagged. * version was tagged.
*/ */
#ifndef STRING_DISTRIBUTION_DATE #ifndef STRING_DISTRIBUTION_DATE
#define STRING_DISTRIBUTION_DATE "2022-07-29" #define STRING_DISTRIBUTION_DATE "2023-07-04"
#endif #endif
/** /**

View File

@@ -987,7 +987,7 @@ class Planner {
FORCE_INLINE static void recalculate_max_e_jerk() { FORCE_INLINE static void recalculate_max_e_jerk() {
const float prop = junction_deviation_mm * SQRT(0.5) / (1.0f - SQRT(0.5)); const float prop = junction_deviation_mm * SQRT(0.5) / (1.0f - SQRT(0.5));
EXTRUDER_LOOP() EXTRUDER_LOOP()
max_e_jerk[E_INDEX_N(e)] = SQRT(prop * settings.max_acceleration_mm_per_s2[E_INDEX_N(e)]); max_e_jerk[E_INDEX_N(e)] = SQRT(prop * settings.max_acceleration_mm_per_s2[E_AXIS_N(e)]);
} }
#endif #endif

View File

@@ -423,6 +423,7 @@
#define DOGLCD_A0 EXP1_07_PIN #define DOGLCD_A0 EXP1_07_PIN
#define DOGLCD_SCK EXP2_02_PIN #define DOGLCD_SCK EXP2_02_PIN
#define DOGLCD_MOSI EXP2_06_PIN #define DOGLCD_MOSI EXP2_06_PIN
#define FORCE_SOFT_SPI
#elif ENABLED(ENDER2_STOCKDISPLAY) #elif ENABLED(ENDER2_STOCKDISPLAY)

View File

@@ -5,7 +5,7 @@
# Examples: # Examples:
# use_example_configs # use_example_configs
# use_example_configs Creality/CR-10/CrealityV1 # use_example_configs Creality/CR-10/CrealityV1
# use_example_configs release-2.0.9.5:Creality/CR-10/CrealityV1 # use_example_configs release-2.0.9.7:Creality/CR-10/CrealityV1
# #
# If a configpath has spaces (or quotes) escape them or enquote the path # If a configpath has spaces (or quotes) escape them or enquote the path
# #

View File

@@ -4,17 +4,17 @@
# #
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
from os.path import join, isfile from os.path import join, isfile
import shutil import shutil
Import("env") Import("env")
mf = env["MARLIN_FEATURES"] mf = env["MARLIN_FEATURES"]
rxBuf = mf["RX_BUFFER_SIZE"] if "RX_BUFFER_SIZE" in mf else "0" rxBuf = mf["RX_BUFFER_SIZE"] if "RX_BUFFER_SIZE" in mf else "0"
txBuf = mf["TX_BUFFER_SIZE"] if "TX_BUFFER_SIZE" in mf else "0" txBuf = mf["TX_BUFFER_SIZE"] if "TX_BUFFER_SIZE" in mf else "0"
serialBuf = str(max(int(rxBuf), int(txBuf), 350)) serialBuf = str(max(int(rxBuf), int(txBuf), 350))
build_flags = env.get('BUILD_FLAGS') build_flags = env.get('BUILD_FLAGS')
build_flags.append("-DSERIAL_BUFFER_SIZE=" + serialBuf) build_flags.append("-DSERIAL_BUFFER_SIZE=" + serialBuf)
env.Replace(BUILD_FLAGS=build_flags) env.Replace(BUILD_FLAGS=build_flags)

View File

@@ -4,17 +4,16 @@
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
import os Import("env", "projenv")
Import("env", "projenv")
flash_size = 0 flash_size = 0
vect_tab_addr = 0 vect_tab_addr = 0
for define in env['CPPDEFINES']: for define in env['CPPDEFINES']:
if define[0] == "VECT_TAB_ADDR": if define[0] == "VECT_TAB_ADDR":
vect_tab_addr = define[1] vect_tab_addr = define[1]
if define[0] == "STM32_FLASH_SIZE": if define[0] == "STM32_FLASH_SIZE":
flash_size = define[1] flash_size = define[1]
print('Use the {0:s} address as the marlin app entry point.'.format(vect_tab_addr)) print('Use the {0:s} address as the marlin app entry point.'.format(vect_tab_addr))
print('Use the {0:d}KB flash version of stm32f103rct6 chip.'.format(flash_size)) print('Use the {0:d}KB flash version of stm32f103rct6 chip.'.format(flash_size))

View File

@@ -3,26 +3,25 @@
# #
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
import os from os.path import join
from os.path import join from os.path import expandvars
from os.path import expandvars Import("env")
Import("env")
# Custom HEX from ELF # Custom HEX from ELF
env.AddPostAction( env.AddPostAction(
join("$BUILD_DIR", "${PROGNAME}.elf"), join("$BUILD_DIR", "${PROGNAME}.elf"),
env.VerboseAction(" ".join([ env.VerboseAction(" ".join([
"$OBJCOPY", "-O ihex", "$TARGET", "$OBJCOPY", "-O ihex", "$TARGET",
"\"" + join("$BUILD_DIR", "${PROGNAME}.hex") + "\"", # Note: $BUILD_DIR is a full path "\"" + join("$BUILD_DIR", "${PROGNAME}.hex") + "\"", # Note: $BUILD_DIR is a full path
]), "Building $TARGET")) ]), "Building $TARGET"))
# In-line command with arguments # In-line command with arguments
UPLOAD_TOOL="stm32flash" UPLOAD_TOOL="stm32flash"
platform = env.PioPlatform() platform = env.PioPlatform()
if platform.get_package_dir("tool-stm32duino") != None: if platform.get_package_dir("tool-stm32duino") != None:
UPLOAD_TOOL=expandvars("\"" + join(platform.get_package_dir("tool-stm32duino"),"stm32flash","stm32flash") + "\"") UPLOAD_TOOL=expandvars("\"" + join(platform.get_package_dir("tool-stm32duino"),"stm32flash","stm32flash") + "\"")
env.Replace( env.Replace(
UPLOADER=UPLOAD_TOOL, UPLOADER=UPLOAD_TOOL,
UPLOADCMD=expandvars(UPLOAD_TOOL + " -v -i rts,-dtr,dtr -R -b 115200 -g 0x8000000 -w \"" + join("$BUILD_DIR","${PROGNAME}.hex")+"\"" + " $UPLOAD_PORT") UPLOADCMD=expandvars(UPLOAD_TOOL + " -v -i rts,-dtr,dtr -R -b 115200 -g 0x8000000 -w \"" + join("$BUILD_DIR","${PROGNAME}.hex")+"\"" + " $UPLOAD_PORT")
) )

View File

@@ -3,30 +3,29 @@
# #
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
import os,shutil,marlin import shutil,marlin
from SCons.Script import DefaultEnvironment from pathlib import Path
from platformio import util
env = DefaultEnvironment() Import("env")
platform = env.PioPlatform() platform = env.PioPlatform()
board = env.BoardConfig() board = env.BoardConfig()
FRAMEWORK_DIR = platform.get_package_dir("framework-arduinoststm32-maple") FRAMEWORK_DIR = Path(platform.get_package_dir("framework-arduinoststm32-maple"))
assert os.path.isdir(FRAMEWORK_DIR) assert FRAMEWORK_DIR.is_dir()
source_root = os.path.join("buildroot", "share", "PlatformIO", "variants") source_root = Path("buildroot/share/PlatformIO/variants")
assert os.path.isdir(source_root) assert source_root.is_dir()
variant = board.get("build.variant") variant = board.get("build.variant")
variant_dir = os.path.join(FRAMEWORK_DIR, "STM32F1", "variants", variant) variant_dir = FRAMEWORK_DIR / "STM32F1/variants" / variant
source_dir = os.path.join(source_root, variant) source_dir = source_root / variant
assert os.path.isdir(source_dir) assert source_dir.is_dir()
if os.path.isdir(variant_dir): if variant_dir.is_dir():
shutil.rmtree(variant_dir) shutil.rmtree(variant_dir)
if not os.path.isdir(variant_dir): if not variant_dir.is_dir():
os.mkdir(variant_dir) variant_dir.mkdir()
marlin.copytree(source_dir, variant_dir) marlin.copytree(source_dir, variant_dir)

View File

@@ -4,114 +4,114 @@
# #
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
import os,random,struct,uuid,marlin import os,random,struct,uuid,marlin
# Relocate firmware from 0x08000000 to 0x08008800 # Relocate firmware from 0x08000000 to 0x08008800
marlin.relocate_firmware("0x08008800") marlin.relocate_firmware("0x08008800")
def calculate_crc(contents, seed): def calculate_crc(contents, seed):
accumulating_xor_value = seed; accumulating_xor_value = seed;
for i in range(0, len(contents), 4): for i in range(0, len(contents), 4):
value = struct.unpack('<I', contents[ i : i + 4])[0] value = struct.unpack('<I', contents[ i : i + 4])[0]
accumulating_xor_value = accumulating_xor_value ^ value accumulating_xor_value = accumulating_xor_value ^ value
return accumulating_xor_value return accumulating_xor_value
def xor_block(r0, r1, block_number, block_size, file_key): def xor_block(r0, r1, block_number, block_size, file_key):
# This is the loop counter # This is the loop counter
loop_counter = 0x0 loop_counter = 0x0
# This is the key length # This is the key length
key_length = 0x18 key_length = 0x18
# This is an initial seed # This is an initial seed
xor_seed = 0x4BAD xor_seed = 0x4BAD
# This is the block counter # This is the block counter
block_number = xor_seed * block_number block_number = xor_seed * block_number
#load the xor key from the file #load the xor key from the file
r7 = file_key r7 = file_key
for loop_counter in range(0, block_size): for loop_counter in range(0, block_size):
# meant to make sure different bits of the key are used. # meant to make sure different bits of the key are used.
xor_seed = int(loop_counter / key_length) xor_seed = int(loop_counter / key_length)
# IP is a scratch register / R12 # IP is a scratch register / R12
ip = loop_counter - (key_length * xor_seed) ip = loop_counter - (key_length * xor_seed)
# xor_seed = (loop_counter * loop_counter) + block_number # xor_seed = (loop_counter * loop_counter) + block_number
xor_seed = (loop_counter * loop_counter) + block_number xor_seed = (loop_counter * loop_counter) + block_number
# shift the xor_seed left by the bits in IP. # shift the xor_seed left by the bits in IP.
xor_seed = xor_seed >> ip xor_seed = xor_seed >> ip
# load a byte into IP # load a byte into IP
ip = r0[loop_counter] ip = r0[loop_counter]
# XOR the seed with r7 # XOR the seed with r7
xor_seed = xor_seed ^ r7 xor_seed = xor_seed ^ r7
# and then with IP # and then with IP
xor_seed = xor_seed ^ ip xor_seed = xor_seed ^ ip
#Now store the byte back #Now store the byte back
r1[loop_counter] = xor_seed & 0xFF r1[loop_counter] = xor_seed & 0xFF
#increment the loop_counter #increment the loop_counter
loop_counter = loop_counter + 1 loop_counter = loop_counter + 1
def encrypt_file(input, output_file, file_length): def encrypt_file(input, output_file, file_length):
input_file = bytearray(input.read()) input_file = bytearray(input.read())
block_size = 0x800 block_size = 0x800
key_length = 0x18 key_length = 0x18
uid_value = uuid.uuid4() uid_value = uuid.uuid4()
file_key = int(uid_value.hex[0:8], 16) file_key = int(uid_value.hex[0:8], 16)
xor_crc = 0xEF3D4323; xor_crc = 0xEF3D4323;
# the input file is exepcted to be in chunks of 0x800 # the input file is exepcted to be in chunks of 0x800
# so round the size # so round the size
while len(input_file) % block_size != 0: while len(input_file) % block_size != 0:
input_file.extend(b'0x0') input_file.extend(b'0x0')
# write the file header # write the file header
output_file.write(struct.pack(">I", 0x443D2D3F)) output_file.write(struct.pack(">I", 0x443D2D3F))
# encrypt the contents using a known file header key # encrypt the contents using a known file header key
# write the file_key # write the file_key
output_file.write(struct.pack("<I", file_key)) output_file.write(struct.pack("<I", file_key))
#TODO - how to enforce that the firmware aligns to block boundaries? #TODO - how to enforce that the firmware aligns to block boundaries?
block_count = int(len(input_file) / block_size) block_count = int(len(input_file) / block_size)
print ("Block Count is ", block_count) print ("Block Count is ", block_count)
for block_number in range(0, block_count): for block_number in range(0, block_count):
block_offset = (block_number * block_size) block_offset = (block_number * block_size)
block_end = block_offset + block_size block_end = block_offset + block_size
block_array = bytearray(input_file[block_offset: block_end]) block_array = bytearray(input_file[block_offset: block_end])
xor_block(block_array, block_array, block_number, block_size, file_key) xor_block(block_array, block_array, block_number, block_size, file_key)
for n in range (0, block_size): for n in range (0, block_size):
input_file[block_offset + n] = block_array[n] input_file[block_offset + n] = block_array[n]
# update the expected CRC value. # update the expected CRC value.
xor_crc = calculate_crc(block_array, xor_crc) xor_crc = calculate_crc(block_array, xor_crc)
# write CRC # write CRC
output_file.write(struct.pack("<I", xor_crc)) output_file.write(struct.pack("<I", xor_crc))
# finally, append the encrypted results. # finally, append the encrypted results.
output_file.write(input_file) output_file.write(input_file)
return return
# Encrypt ${PROGNAME}.bin and save it as 'update.cbd' # Encrypt ${PROGNAME}.bin and save it as 'update.cbd'
def encrypt(source, target, env): def encrypt(source, target, env):
firmware = open(target[0].path, "rb") firmware = open(target[0].path, "rb")
update = open(target[0].dir.path + '/update.cbd', "wb") update = open(target[0].dir.path + '/update.cbd', "wb")
length = os.path.getsize(target[0].path) length = os.path.getsize(target[0].path)
encrypt_file(firmware, update, length) encrypt_file(firmware, update, length)
firmware.close() firmware.close()
update.close() update.close()
marlin.add_post_action(encrypt); marlin.add_post_action(encrypt);

View File

@@ -2,38 +2,45 @@
# common-cxxflags.py # common-cxxflags.py
# Convenience script to apply customizations to CPP flags # Convenience script to apply customizations to CPP flags
# #
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
Import("env") Import("env")
cxxflags = [ cxxflags = [
#"-Wno-incompatible-pointer-types", # "-Wno-incompatible-pointer-types",
#"-Wno-unused-const-variable", # "-Wno-unused-const-variable",
#"-Wno-maybe-uninitialized", # "-Wno-maybe-uninitialized",
#"-Wno-sign-compare" # "-Wno-sign-compare"
] ]
if "teensy" not in env['PIOENV']: if "teensy" not in env["PIOENV"]:
cxxflags += ["-Wno-register"] cxxflags += ["-Wno-register"]
env.Append(CXXFLAGS=cxxflags) env.Append(CXXFLAGS=cxxflags)
# #
# Add CPU frequency as a compile time constant instead of a runtime variable # Add CPU frequency as a compile time constant instead of a runtime variable
# #
def add_cpu_freq(): def add_cpu_freq():
if 'BOARD_F_CPU' in env: if "BOARD_F_CPU" in env:
env['BUILD_FLAGS'].append('-DBOARD_F_CPU=' + env['BOARD_F_CPU']) env["BUILD_FLAGS"].append("-DBOARD_F_CPU=" + env["BOARD_F_CPU"])
# Useful for JTAG debugging # Useful for JTAG debugging
# #
# It will separate release and debug build folders. # It will separate release and debug build folders.
# It useful to keep two live versions: a debug version for debugging and another for # It useful to keep two live versions: a debug version for debugging and another for
# release, for flashing when upload is not done automatically by jlink/stlink. # release, for flashing when upload is not done automatically by jlink/stlink.
# Without this, PIO needs to recompile everything twice for any small change. # Without this, PIO needs to recompile everything twice for any small change.
if env.GetBuildType() == "debug" and env.get('UPLOAD_PROTOCOL') not in ['jlink', 'stlink', 'custom']: if env.GetBuildType() == "debug" and env.get("UPLOAD_PROTOCOL") not in ["jlink", "stlink", "custom"]:
env['BUILD_DIR'] = '$PROJECT_BUILD_DIR/$PIOENV/debug' env["BUILD_DIR"] = "$PROJECT_BUILD_DIR/$PIOENV/debug"
# On some platform, F_CPU is a runtime variable. Since it's used to convert from ns def on_program_ready(source, target, env):
# to CPU cycles, this adds overhead preventing small delay (in the order of less than import shutil
# 30 cycles) to be generated correctly. By using a compile time constant instead shutil.copy(target[0].get_abspath(), env.subst("$PROJECT_BUILD_DIR/$PIOENV"))
# the compiler will perform the computation and this overhead will be avoided
add_cpu_freq() env.AddPostAction("$PROGPATH", on_program_ready)
# On some platform, F_CPU is a runtime variable. Since it's used to convert from ns
# to CPU cycles, this adds overhead preventing small delay (in the order of less than
# 30 cycles) to be generated correctly. By using a compile time constant instead
# the compiler will perform the computation and this overhead will be avoided
add_cpu_freq()

View File

@@ -4,13 +4,13 @@
# #
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
Import("env", "projenv") Import("env", "projenv")
def apply_board_build_flags(): def apply_board_build_flags():
if not 'BOARD_CUSTOM_BUILD_FLAGS' in env['MARLIN_FEATURES']: if not 'BOARD_CUSTOM_BUILD_FLAGS' in env['MARLIN_FEATURES']:
return return
projenv.Append(CCFLAGS=env['MARLIN_FEATURES']['BOARD_CUSTOM_BUILD_FLAGS'].split()) projenv.Append(CCFLAGS=env['MARLIN_FEATURES']['BOARD_CUSTOM_BUILD_FLAGS'].split())
# We need to add the board build flags in a post script # We need to add the board build flags in a post script
# so the platform build script doesn't overwrite the custom CCFLAGS # so the platform build script doesn't overwrite the custom CCFLAGS
apply_board_build_flags() apply_board_build_flags()

View File

@@ -5,247 +5,248 @@
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
import subprocess,os,re import subprocess,os,re
Import("env") Import("env")
from platformio.package.meta import PackageSpec from platformio.package.meta import PackageSpec
from platformio.project.config import ProjectConfig from platformio.project.config import ProjectConfig
verbose = 0 verbose = 0
FEATURE_CONFIG = {} FEATURE_CONFIG = {}
def validate_pio(): def validate_pio():
PIO_VERSION_MIN = (6, 0, 1) PIO_VERSION_MIN = (6, 0, 1)
try: try:
from platformio import VERSION as PIO_VERSION from platformio import VERSION as PIO_VERSION
weights = (1000, 100, 1) weights = (1000, 100, 1)
version_min = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION_MIN)]) version_min = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION_MIN)])
version_cur = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION)]) version_cur = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION)])
if version_cur < version_min: if version_cur < version_min:
print() print()
print("**************************************************") print("**************************************************")
print("****** An update to PlatformIO is ******") print("****** An update to PlatformIO is ******")
print("****** required to build Marlin Firmware. ******") print("****** required to build Marlin Firmware. ******")
print("****** ******") print("****** ******")
print("****** Minimum version: ", PIO_VERSION_MIN, " ******") print("****** Minimum version: ", PIO_VERSION_MIN, " ******")
print("****** Current Version: ", PIO_VERSION, " ******") print("****** Current Version: ", PIO_VERSION, " ******")
print("****** ******") print("****** ******")
print("****** Update PlatformIO and try again. ******") print("****** Update PlatformIO and try again. ******")
print("**************************************************") print("**************************************************")
print() print()
exit(1) exit(1)
except SystemExit: except SystemExit:
exit(1) exit(1)
except: except:
print("Can't detect PlatformIO Version") print("Can't detect PlatformIO Version")
def blab(str,level=1): def blab(str,level=1):
if verbose >= level: if verbose >= level:
print("[deps] %s" % str) print("[deps] %s" % str)
def add_to_feat_cnf(feature, flines): def add_to_feat_cnf(feature, flines):
try: try:
feat = FEATURE_CONFIG[feature] feat = FEATURE_CONFIG[feature]
except: except:
FEATURE_CONFIG[feature] = {} FEATURE_CONFIG[feature] = {}
# Get a reference to the FEATURE_CONFIG under construction # Get a reference to the FEATURE_CONFIG under construction
feat = FEATURE_CONFIG[feature] feat = FEATURE_CONFIG[feature]
# Split up passed lines on commas or newlines and iterate # Split up passed lines on commas or newlines and iterate
# Add common options to the features config under construction # Add common options to the features config under construction
# For lib_deps replace a previous instance of the same library # For lib_deps replace a previous instance of the same library
atoms = re.sub(r',\s*', '\n', flines).strip().split('\n') atoms = re.sub(r',\s*', '\n', flines).strip().split('\n')
for line in atoms: for line in atoms:
parts = line.split('=') parts = line.split('=')
name = parts.pop(0) name = parts.pop(0)
if name in ['build_flags', 'extra_scripts', 'src_filter', 'lib_ignore']: if name in ['build_flags', 'extra_scripts', 'build_src_filter', 'lib_ignore']:
feat[name] = '='.join(parts) feat[name] = '='.join(parts)
blab("[%s] %s=%s" % (feature, name, feat[name]), 3) blab("[%s] %s=%s" % (feature, name, feat[name]), 3)
else: else:
for dep in re.split(r',\s*', line): for dep in re.split(r',\s*', line):
lib_name = re.sub(r'@([~^]|[<>]=?)?[\d.]+', '', dep.strip()).split('=').pop(0) lib_name = re.sub(r'@([~^]|[<>]=?)?[\d.]+', '', dep.strip()).split('=').pop(0)
lib_re = re.compile('(?!^' + lib_name + '\\b)') lib_re = re.compile('(?!^' + lib_name + '\\b)')
feat['lib_deps'] = list(filter(lib_re.match, feat['lib_deps'])) + [dep] if not 'lib_deps' in feat: feat['lib_deps'] = {}
blab("[%s] lib_deps = %s" % (feature, dep), 3) feat['lib_deps'] = list(filter(lib_re.match, feat['lib_deps'])) + [dep]
blab("[%s] lib_deps = %s" % (feature, dep), 3)
def load_features(): def load_features():
blab("========== Gather [features] entries...") blab("========== Gather [features] entries...")
for key in ProjectConfig().items('features'): for key in ProjectConfig().items('features'):
feature = key[0].upper() feature = key[0].upper()
if not feature in FEATURE_CONFIG: if not feature in FEATURE_CONFIG:
FEATURE_CONFIG[feature] = { 'lib_deps': [] } FEATURE_CONFIG[feature] = { 'lib_deps': [] }
add_to_feat_cnf(feature, key[1]) add_to_feat_cnf(feature, key[1])
# Add options matching custom_marlin.MY_OPTION to the pile # Add options matching custom_marlin.MY_OPTION to the pile
blab("========== Gather custom_marlin entries...") blab("========== Gather custom_marlin entries...")
for n in env.GetProjectOptions(): for n in env.GetProjectOptions():
key = n[0] key = n[0]
mat = re.match(r'custom_marlin\.(.+)', key) mat = re.match(r'custom_marlin\.(.+)', key)
if mat: if mat:
try: try:
val = env.GetProjectOption(key) val = env.GetProjectOption(key)
except: except:
val = None val = None
if val: if val:
opt = mat[1].upper() opt = mat[1].upper()
blab("%s.custom_marlin.%s = '%s'" % ( env['PIOENV'], opt, val )) blab("%s.custom_marlin.%s = '%s'" % ( env['PIOENV'], opt, val ))
add_to_feat_cnf(opt, val) add_to_feat_cnf(opt, val)
def get_all_known_libs(): def get_all_known_libs():
known_libs = [] known_libs = []
for feature in FEATURE_CONFIG: for feature in FEATURE_CONFIG:
feat = FEATURE_CONFIG[feature] feat = FEATURE_CONFIG[feature]
if not 'lib_deps' in feat: if not 'lib_deps' in feat:
continue continue
for dep in feat['lib_deps']: for dep in feat['lib_deps']:
known_libs.append(PackageSpec(dep).name) known_libs.append(PackageSpec(dep).name)
return known_libs return known_libs
def get_all_env_libs(): def get_all_env_libs():
env_libs = [] env_libs = []
lib_deps = env.GetProjectOption('lib_deps') lib_deps = env.GetProjectOption('lib_deps')
for dep in lib_deps: for dep in lib_deps:
env_libs.append(PackageSpec(dep).name) env_libs.append(PackageSpec(dep).name)
return env_libs return env_libs
def set_env_field(field, value): def set_env_field(field, value):
proj = env.GetProjectConfig() proj = env.GetProjectConfig()
proj.set("env:" + env['PIOENV'], field, value) proj.set("env:" + env['PIOENV'], field, value)
# All unused libs should be ignored so that if a library # All unused libs should be ignored so that if a library
# exists in .pio/lib_deps it will not break compilation. # exists in .pio/lib_deps it will not break compilation.
def force_ignore_unused_libs(): def force_ignore_unused_libs():
env_libs = get_all_env_libs() env_libs = get_all_env_libs()
known_libs = get_all_known_libs() known_libs = get_all_known_libs()
diff = (list(set(known_libs) - set(env_libs))) diff = (list(set(known_libs) - set(env_libs)))
lib_ignore = env.GetProjectOption('lib_ignore') + diff lib_ignore = env.GetProjectOption('lib_ignore') + diff
blab("Ignore libraries: %s" % lib_ignore) blab("Ignore libraries: %s" % lib_ignore)
set_env_field('lib_ignore', lib_ignore) set_env_field('lib_ignore', lib_ignore)
def apply_features_config(): def apply_features_config():
load_features() load_features()
blab("========== Apply enabled features...") blab("========== Apply enabled features...")
for feature in FEATURE_CONFIG: for feature in FEATURE_CONFIG:
if not env.MarlinHas(feature): if not env.MarlinHas(feature):
continue continue
feat = FEATURE_CONFIG[feature] feat = FEATURE_CONFIG[feature]
if 'lib_deps' in feat and len(feat['lib_deps']): if 'lib_deps' in feat and len(feat['lib_deps']):
blab("========== Adding lib_deps for %s... " % feature, 2) blab("========== Adding lib_deps for %s... " % feature, 2)
# feat to add # feat to add
deps_to_add = {} deps_to_add = {}
for dep in feat['lib_deps']: for dep in feat['lib_deps']:
deps_to_add[PackageSpec(dep).name] = dep deps_to_add[PackageSpec(dep).name] = dep
blab("==================== %s... " % dep, 2) blab("==================== %s... " % dep, 2)
# Does the env already have the dependency? # Does the env already have the dependency?
deps = env.GetProjectOption('lib_deps') deps = env.GetProjectOption('lib_deps')
for dep in deps: for dep in deps:
name = PackageSpec(dep).name name = PackageSpec(dep).name
if name in deps_to_add: if name in deps_to_add:
del deps_to_add[name] del deps_to_add[name]
# Are there any libraries that should be ignored? # Are there any libraries that should be ignored?
lib_ignore = env.GetProjectOption('lib_ignore') lib_ignore = env.GetProjectOption('lib_ignore')
for dep in deps: for dep in deps:
name = PackageSpec(dep).name name = PackageSpec(dep).name
if name in deps_to_add: if name in deps_to_add:
del deps_to_add[name] del deps_to_add[name]
# Is there anything left? # Is there anything left?
if len(deps_to_add) > 0: if len(deps_to_add) > 0:
# Only add the missing dependencies # Only add the missing dependencies
set_env_field('lib_deps', deps + list(deps_to_add.values())) set_env_field('lib_deps', deps + list(deps_to_add.values()))
if 'build_flags' in feat: if 'build_flags' in feat:
f = feat['build_flags'] f = feat['build_flags']
blab("========== Adding build_flags for %s: %s" % (feature, f), 2) blab("========== Adding build_flags for %s: %s" % (feature, f), 2)
new_flags = env.GetProjectOption('build_flags') + [ f ] new_flags = env.GetProjectOption('build_flags') + [ f ]
env.Replace(BUILD_FLAGS=new_flags) env.Replace(BUILD_FLAGS=new_flags)
if 'extra_scripts' in feat: if 'extra_scripts' in feat:
blab("Running extra_scripts for %s... " % feature, 2) blab("Running extra_scripts for %s... " % feature, 2)
env.SConscript(feat['extra_scripts'], exports="env") env.SConscript(feat['extra_scripts'], exports="env")
if 'src_filter' in feat: if 'build_src_filter' in feat:
blab("========== Adding build_src_filter for %s... " % feature, 2) blab("========== Adding build_src_filter for %s... " % feature, 2)
src_filter = ' '.join(env.GetProjectOption('src_filter')) build_src_filter = ' '.join(env.GetProjectOption('build_src_filter'))
# first we need to remove the references to the same folder # first we need to remove the references to the same folder
my_srcs = re.findall(r'[+-](<.*?>)', feat['src_filter']) my_srcs = re.findall(r'[+-](<.*?>)', feat['build_src_filter'])
cur_srcs = re.findall(r'[+-](<.*?>)', src_filter) cur_srcs = re.findall(r'[+-](<.*?>)', build_src_filter)
for d in my_srcs: for d in my_srcs:
if d in cur_srcs: if d in cur_srcs:
src_filter = re.sub(r'[+-]' + d, '', src_filter) build_src_filter = re.sub(r'[+-]' + d, '', build_src_filter)
src_filter = feat['src_filter'] + ' ' + src_filter build_src_filter = feat['build_src_filter'] + ' ' + build_src_filter
set_env_field('build_src_filter', [src_filter]) set_env_field('build_src_filter', [build_src_filter])
env.Replace(SRC_FILTER=src_filter) env.Replace(SRC_FILTER=build_src_filter)
if 'lib_ignore' in feat: if 'lib_ignore' in feat:
blab("========== Adding lib_ignore for %s... " % feature, 2) blab("========== Adding lib_ignore for %s... " % feature, 2)
lib_ignore = env.GetProjectOption('lib_ignore') + [feat['lib_ignore']] lib_ignore = env.GetProjectOption('lib_ignore') + [feat['lib_ignore']]
set_env_field('lib_ignore', lib_ignore) set_env_field('lib_ignore', lib_ignore)
# #
# Use the compiler to get a list of all enabled features # Use the compiler to get a list of all enabled features
# #
def load_marlin_features(): def load_marlin_features():
if 'MARLIN_FEATURES' in env: if 'MARLIN_FEATURES' in env:
return return
# Process defines # Process defines
from preprocessor import run_preprocessor from preprocessor import run_preprocessor
define_list = run_preprocessor(env) define_list = run_preprocessor(env)
marlin_features = {} marlin_features = {}
for define in define_list: for define in define_list:
feature = define[8:].strip().decode().split(' ') feature = define[8:].strip().decode().split(' ')
feature, definition = feature[0], ' '.join(feature[1:]) feature, definition = feature[0], ' '.join(feature[1:])
marlin_features[feature] = definition marlin_features[feature] = definition
env['MARLIN_FEATURES'] = marlin_features env['MARLIN_FEATURES'] = marlin_features
# #
# Return True if a matching feature is enabled # Return True if a matching feature is enabled
# #
def MarlinHas(env, feature): def MarlinHas(env, feature):
load_marlin_features() load_marlin_features()
r = re.compile('^' + feature + '$') r = re.compile('^' + feature + '$')
found = list(filter(r.match, env['MARLIN_FEATURES'])) found = list(filter(r.match, env['MARLIN_FEATURES']))
# Defines could still be 'false' or '0', so check # Defines could still be 'false' or '0', so check
some_on = False some_on = False
if len(found): if len(found):
for f in found: for f in found:
val = env['MARLIN_FEATURES'][f] val = env['MARLIN_FEATURES'][f]
if val in [ '', '1', 'true' ]: if val in [ '', '1', 'true' ]:
some_on = True some_on = True
elif val in env['MARLIN_FEATURES']: elif val in env['MARLIN_FEATURES']:
some_on = env.MarlinHas(val) some_on = env.MarlinHas(val)
return some_on return some_on
validate_pio() validate_pio()
try: try:
verbose = int(env.GetProjectOption('custom_verbose')) verbose = int(env.GetProjectOption('custom_verbose'))
except: except:
pass pass
# #
# Add a method for other PIO scripts to query enabled features # Add a method for other PIO scripts to query enabled features
# #
env.AddMethod(MarlinHas) env.AddMethod(MarlinHas)
# #
# Add dependencies for enabled Marlin features # Add dependencies for enabled Marlin features
# #
apply_features_config() apply_features_config()
force_ignore_unused_libs() force_ignore_unused_libs()
#print(env.Dump()) #print(env.Dump())
from signature import compute_build_signature from signature import compute_build_signature
compute_build_signature(env) compute_build_signature(env)

View File

@@ -0,0 +1,240 @@
#
# configuration.py
# Apply options from config.ini to the existing Configuration headers
#
import re, shutil, configparser
from pathlib import Path
verbose = 0
def blab(str,level=1):
if verbose >= level: print(f"[config] {str}")
def config_path(cpath):
return Path("Marlin", cpath, encoding='utf-8')
# Apply a single name = on/off ; name = value ; etc.
# TODO: Limit to the given (optional) configuration
def apply_opt(name, val, conf=None):
if name == "lcd": name, val = val, "on"
# Create a regex to match the option and capture parts of the line
regex = re.compile(rf'^(\s*)(//\s*)?(#define\s+)({name}\b)(\s*)(.*?)(\s*)(//.*)?$', re.IGNORECASE)
# Find and enable and/or update all matches
for file in ("Configuration.h", "Configuration_adv.h"):
fullpath = config_path(file)
lines = fullpath.read_text(encoding='utf-8').split('\n')
found = False
for i in range(len(lines)):
line = lines[i]
match = regex.match(line)
if match and match[4].upper() == name.upper():
found = True
# For boolean options un/comment the define
if val in ("on", "", None):
newline = re.sub(r'^(\s*)//+\s*(#define)(\s{1,3})?(\s*)', r'\1\2 \4', line)
elif val == "off":
newline = re.sub(r'^(\s*)(#define)(\s{1,3})?(\s*)', r'\1//\2 \4', line)
else:
# For options with values, enable and set the value
newline = match[1] + match[3] + match[4] + match[5] + val
if match[8]:
sp = match[7] if match[7] else ' '
newline += sp + match[8]
lines[i] = newline
blab(f"Set {name} to {val}")
# If the option was found, write the modified lines
if found:
fullpath.write_text('\n'.join(lines), encoding='utf-8')
break
# If the option didn't appear in either config file, add it
if not found:
# OFF options are added as disabled items so they appear
# in config dumps. Useful for custom settings.
prefix = ""
if val == "off":
prefix, val = "//", "" # Item doesn't appear in config dump
#val = "false" # Item appears in config dump
# Uppercase the option unless already mixed/uppercase
added = name.upper() if name.islower() else name
# Add the provided value after the name
if val != "on" and val != "" and val is not None:
added += " " + val
# Prepend the new option after the first set of #define lines
fullpath = config_path("Configuration.h")
with fullpath.open(encoding='utf-8') as f:
lines = f.readlines()
linenum = 0
gotdef = False
for line in lines:
isdef = line.startswith("#define")
if not gotdef:
gotdef = isdef
elif not isdef:
break
linenum += 1
lines.insert(linenum, f"{prefix}#define {added:30} // Added by config.ini\n")
fullpath.write_text(''.join(lines), encoding='utf-8')
# Fetch configuration files from GitHub given the path.
# Return True if any files were fetched.
def fetch_example(url):
if url.endswith("/"): url = url[:-1]
if not url.startswith('http'):
brch = "bugfix-2.1.x"
if '@' in url: url, brch = map(str.strip, url.split('@'))
if url == 'examples/default': url = 'default'
url = f"https://raw.githubusercontent.com/MarlinFirmware/Configurations/{brch}/config/{url}"
url = url.replace("%", "%25").replace(" ", "%20")
# Find a suitable fetch command
if shutil.which("curl") is not None:
fetch = "curl -L -s -S -f -o"
elif shutil.which("wget") is not None:
fetch = "wget -q -O"
else:
blab("Couldn't find curl or wget", -1)
return False
import os
# Reset configurations to default
os.system("git checkout HEAD Marlin/*.h")
# Try to fetch the remote files
gotfile = False
for fn in ("Configuration.h", "Configuration_adv.h", "_Bootscreen.h", "_Statusscreen.h"):
if os.system(f"{fetch} wgot {url}/{fn} >/dev/null 2>&1") == 0:
shutil.move('wgot', config_path(fn))
gotfile = True
if Path('wgot').exists(): shutil.rmtree('wgot')
return gotfile
def section_items(cp, sectkey):
return cp.items(sectkey) if sectkey in cp.sections() else []
# Apply all items from a config section
def apply_ini_by_name(cp, sect):
iniok = True
if sect in ('config:base', 'config:root'):
iniok = False
items = section_items(cp, 'config:base') + section_items(cp, 'config:root')
else:
items = section_items(cp, sect)
for item in items:
if iniok or not item[0].startswith('ini_'):
apply_opt(item[0], item[1])
# Apply all config sections from a parsed file
def apply_all_sections(cp):
for sect in cp.sections():
if sect.startswith('config:'):
apply_ini_by_name(cp, sect)
# Apply certain config sections from a parsed file
def apply_sections(cp, ckey='all'):
blab(f"Apply section key: {ckey}")
if ckey == 'all':
apply_all_sections(cp)
else:
# Apply the base/root config.ini settings after external files are done
if ckey in ('base', 'root'):
apply_ini_by_name(cp, 'config:base')
# Apply historically 'Configuration.h' settings everywhere
if ckey == 'basic':
apply_ini_by_name(cp, 'config:basic')
# Apply historically Configuration_adv.h settings everywhere
# (Some of which rely on defines in 'Conditionals_LCD.h')
elif ckey in ('adv', 'advanced'):
apply_ini_by_name(cp, 'config:advanced')
# Apply a specific config:<name> section directly
elif ckey.startswith('config:'):
apply_ini_by_name(cp, ckey)
# Apply settings from a top level config.ini
def apply_config_ini(cp):
blab("=" * 20 + " Gather 'config.ini' entries...")
# Pre-scan for ini_use_config to get config_keys
base_items = section_items(cp, 'config:base') + section_items(cp, 'config:root')
config_keys = ['base']
for ikey, ival in base_items:
if ikey == 'ini_use_config':
config_keys = map(str.strip, ival.split(','))
# For each ini_use_config item perform an action
for ckey in config_keys:
addbase = False
# For a key ending in .ini load and parse another .ini file
if ckey.endswith('.ini'):
sect = 'base'
if '@' in ckey: sect, ckey = map(str.strip, ckey.split('@'))
cp2 = configparser.ConfigParser()
cp2.read(config_path(ckey))
apply_sections(cp2, sect)
ckey = 'base';
# (Allow 'example/' as a shortcut for 'examples/')
elif ckey.startswith('example/'):
ckey = 'examples' + ckey[7:]
# For 'examples/<path>' fetch an example set from GitHub.
# For https?:// do a direct fetch of the URL.
if ckey.startswith('examples/') or ckey.startswith('http'):
fetch_example(ckey)
ckey = 'base'
if ckey == 'all':
apply_sections(cp)
else:
# Apply keyed sections after external files are done
apply_sections(cp, 'config:' + ckey)
if __name__ == "__main__":
#
# From command line use the given file name
#
import sys
args = sys.argv[1:]
if len(args) > 0:
if args[0].endswith('.ini'):
ini_file = args[0]
else:
print("Usage: %s <.ini file>" % sys.argv[0])
else:
ini_file = config_path('config.ini')
if ini_file:
user_ini = configparser.ConfigParser()
user_ini.read(ini_file)
apply_config_ini(user_ini)
else:
#
# From within PlatformIO use the loaded INI file
#
import pioutil
if pioutil.is_pio_build():
Import("env")
try:
verbose = int(env.GetProjectOption('custom_verbose'))
except:
pass
from platformio.project.config import ProjectConfig
apply_config_ini(ProjectConfig())

View File

@@ -6,13 +6,13 @@
# #
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
import marlin import marlin
board = marlin.env.BoardConfig() board = marlin.env.BoardConfig()
address = board.get("build.address", "") address = board.get("build.address", "")
if address: if address:
marlin.relocate_firmware(address) marlin.relocate_firmware(address)
ldscript = board.get("build.ldscript", "") ldscript = board.get("build.ldscript", "")
if ldscript: if ldscript:
marlin.custom_ld_script(ldscript) marlin.custom_ld_script(ldscript)

View File

@@ -4,46 +4,50 @@
# #
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
Import("env") Import("env")
import os,requests,zipfile,tempfile,shutil import requests,zipfile,tempfile,shutil
from pathlib import Path
url = "https://github.com/makerbase-mks/Mks-Robin-Nano-Marlin2.0-Firmware/archive/0263cdaccf.zip" url = "https://github.com/makerbase-mks/Mks-Robin-Nano-Marlin2.0-Firmware/archive/0263cdaccf.zip"
deps_path = env.Dictionary("PROJECT_LIBDEPS_DIR") deps_path = Path(env.Dictionary("PROJECT_LIBDEPS_DIR"))
zip_path = os.path.join(deps_path, "mks-assets.zip") zip_path = deps_path / "mks-assets.zip"
assets_path = os.path.join(env.Dictionary("PROJECT_BUILD_DIR"), env.Dictionary("PIOENV"), "assets") assets_path = Path(env.Dictionary("PROJECT_BUILD_DIR"), env.Dictionary("PIOENV"), "assets")
def download_mks_assets(): def download_mks_assets():
print("Downloading MKS Assets") print("Downloading MKS Assets")
r = requests.get(url, stream=True) r = requests.get(url, stream=True)
# the user may have a very clean workspace, # the user may have a very clean workspace,
# so create the PROJECT_LIBDEPS_DIR directory if not exits # so create the PROJECT_LIBDEPS_DIR directory if not exits
if os.path.exists(deps_path) == False: if not deps_path.exists():
os.mkdir(deps_path) deps_path.mkdir()
with open(zip_path, 'wb') as fd: with zip_path.open('wb') as fd:
for chunk in r.iter_content(chunk_size=128): for chunk in r.iter_content(chunk_size=128):
fd.write(chunk) fd.write(chunk)
def copy_mks_assets(): def copy_mks_assets():
print("Copying MKS Assets") print("Copying MKS Assets")
output_path = tempfile.mkdtemp() output_path = Path(tempfile.mkdtemp())
zip_obj = zipfile.ZipFile(zip_path, 'r') zip_obj = zipfile.ZipFile(zip_path, 'r')
zip_obj.extractall(output_path) zip_obj.extractall(output_path)
zip_obj.close() zip_obj.close()
if os.path.exists(assets_path) == True and os.path.isdir(assets_path) == False: if assets_path.exists() and not assets_path.is_dir():
os.unlink(assets_path) assets_path.unlink()
if os.path.exists(assets_path) == False: if not assets_path.exists():
os.mkdir(assets_path) assets_path.mkdir()
base_path = '' base_path = ''
for filename in os.listdir(output_path): for filename in output_path.iterdir():
base_path = filename base_path = filename
for filename in os.listdir(os.path.join(output_path, base_path, 'Firmware', 'mks_font')): fw_path = (output_path / base_path / 'Firmware')
shutil.copy(os.path.join(output_path, base_path, 'Firmware', 'mks_font', filename), assets_path) font_path = fw_path / 'mks_font'
for filename in os.listdir(os.path.join(output_path, base_path, 'Firmware', 'mks_pic')): for filename in font_path.iterdir():
shutil.copy(os.path.join(output_path, base_path, 'Firmware', 'mks_pic', filename), assets_path) shutil.copy(font_path / filename, assets_path)
shutil.rmtree(output_path, ignore_errors=True) pic_path = fw_path / 'mks_pic'
for filename in pic_path.iterdir():
shutil.copy(pic_path / filename, assets_path)
shutil.rmtree(output_path, ignore_errors=True)
if os.path.exists(zip_path) == False: if not zip_path.exists():
download_mks_assets() download_mks_assets()
if os.path.exists(assets_path) == False: if not assets_path.exists():
copy_mks_assets() copy_mks_assets()

View File

@@ -4,32 +4,32 @@
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
import shutil import shutil
from os.path import join, isfile from os.path import join, isfile
from pprint import pprint from pprint import pprint
Import("env") Import("env")
if env.MarlinHas("POSTMORTEM_DEBUGGING"): if env.MarlinHas("POSTMORTEM_DEBUGGING"):
FRAMEWORK_DIR = env.PioPlatform().get_package_dir("framework-arduinoststm32-maple") FRAMEWORK_DIR = env.PioPlatform().get_package_dir("framework-arduinoststm32-maple")
patchflag_path = join(FRAMEWORK_DIR, ".exc-patching-done") patchflag_path = join(FRAMEWORK_DIR, ".exc-patching-done")
# patch file only if we didn't do it before # patch file only if we didn't do it before
if not isfile(patchflag_path): if not isfile(patchflag_path):
print("Patching libmaple exception handlers") print("Patching libmaple exception handlers")
original_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S") original_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S")
backup_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S.bak") backup_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S.bak")
src_file = join("buildroot", "share", "PlatformIO", "scripts", "exc.S") src_file = join("buildroot", "share", "PlatformIO", "scripts", "exc.S")
assert isfile(original_file) and isfile(src_file) assert isfile(original_file) and isfile(src_file)
shutil.copyfile(original_file, backup_file) shutil.copyfile(original_file, backup_file)
shutil.copyfile(src_file, original_file); shutil.copyfile(src_file, original_file);
def _touch(path): def _touch(path):
with open(path, "w") as fp: with open(path, "w") as fp:
fp.write("") fp.write("")
env.Execute(lambda *args, **kwargs: _touch(patchflag_path)) env.Execute(lambda *args, **kwargs: _touch(patchflag_path))
print("Done patching exception handler") print("Done patching exception handler")
print("Libmaple modified and ready for post mortem debugging") print("Libmaple modified and ready for post mortem debugging")

View File

@@ -7,54 +7,52 @@
# #
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
import os,shutil,marlin import shutil,marlin
from SCons.Script import DefaultEnvironment from pathlib import Path
from platformio import util
env = DefaultEnvironment() #
# Get the platform name from the 'platform_packages' option,
# or look it up by the platform.class.name.
#
env = marlin.env
platform = env.PioPlatform()
# from platformio.package.meta import PackageSpec
# Get the platform name from the 'platform_packages' option, platform_packages = env.GetProjectOption('platform_packages')
# or look it up by the platform.class.name.
#
platform = env.PioPlatform()
from platformio.package.meta import PackageSpec # Remove all tool items from platform_packages
platform_packages = env.GetProjectOption('platform_packages') platform_packages = [x for x in platform_packages if not x.startswith("platformio/tool-")]
# Remove all tool items from platform_packages if len(platform_packages) == 0:
platform_packages = [x for x in platform_packages if not x.startswith("platformio/tool-")] framewords = {
"Ststm32Platform": "framework-arduinoststm32",
"AtmelavrPlatform": "framework-arduino-avr"
}
platform_name = framewords[platform.__class__.__name__]
else:
platform_name = PackageSpec(platform_packages[0]).name
if len(platform_packages) == 0: if platform_name in [ "usb-host-msc", "usb-host-msc-cdc-msc", "usb-host-msc-cdc-msc-2", "usb-host-msc-cdc-msc-3", "tool-stm32duino", "biqu-bx-workaround", "main" ]:
framewords = { platform_name = "framework-arduinoststm32"
"Ststm32Platform": "framework-arduinoststm32",
"AtmelavrPlatform": "framework-arduino-avr"
}
platform_name = framewords[platform.__class__.__name__]
else:
platform_name = PackageSpec(platform_packages[0]).name
if platform_name in [ "usb-host-msc", "usb-host-msc-cdc-msc", "usb-host-msc-cdc-msc-2", "usb-host-msc-cdc-msc-3", "tool-stm32duino", "biqu-bx-workaround", "main" ]: FRAMEWORK_DIR = Path(platform.get_package_dir(platform_name))
platform_name = "framework-arduinoststm32" assert FRAMEWORK_DIR.is_dir()
FRAMEWORK_DIR = platform.get_package_dir(platform_name) board = env.BoardConfig()
assert os.path.isdir(FRAMEWORK_DIR)
board = env.BoardConfig() #mcu_type = board.get("build.mcu")[:-2]
variant = board.get("build.variant")
#series = mcu_type[:7].upper() + "xx"
#mcu_type = board.get("build.mcu")[:-2] # Prepare a new empty folder at the destination
variant = board.get("build.variant") variant_dir = FRAMEWORK_DIR / "variants" / variant
#series = mcu_type[:7].upper() + "xx" if variant_dir.is_dir():
shutil.rmtree(variant_dir)
if not variant_dir.is_dir():
variant_dir.mkdir()
# Prepare a new empty folder at the destination # Source dir is a local variant sub-folder
variant_dir = os.path.join(FRAMEWORK_DIR, "variants", variant) source_dir = Path("buildroot/share/PlatformIO/variants", variant)
if os.path.isdir(variant_dir): assert source_dir.is_dir()
shutil.rmtree(variant_dir)
if not os.path.isdir(variant_dir):
os.mkdir(variant_dir)
# Source dir is a local variant sub-folder marlin.copytree(source_dir, variant_dir)
source_dir = os.path.join("buildroot/share/PlatformIO/variants", variant)
assert os.path.isdir(source_dir)
marlin.copytree(source_dir, variant_dir)

View File

@@ -4,37 +4,32 @@
# #
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
import os,marlin
# Append ${PROGNAME}.bin firmware after bootloader and save it as 'jgaurora_firmware.bin'
def addboot(source, target, env):
firmware = open(target[0].path, "rb")
lengthfirmware = os.path.getsize(target[0].path)
bootloader_bin = "buildroot/share/PlatformIO/scripts/" + "jgaurora_bootloader.bin"
bootloader = open(bootloader_bin, "rb")
lengthbootloader = os.path.getsize(bootloader_bin)
firmware_with_boothloader_bin = target[0].dir.path + '/firmware_with_bootloader.bin' # Append ${PROGNAME}.bin firmware after bootloader and save it as 'jgaurora_firmware.bin'
if os.path.exists(firmware_with_boothloader_bin): def addboot(source, target, env):
os.remove(firmware_with_boothloader_bin) from pathlib import Path
firmwareimage = open(firmware_with_boothloader_bin, "wb")
position = 0
while position < lengthbootloader:
byte = bootloader.read(1)
firmwareimage.write(byte)
position += 1
position = 0
while position < lengthfirmware:
byte = firmware.read(1)
firmwareimage.write(byte)
position += 1
bootloader.close()
firmware.close()
firmwareimage.close()
firmware_without_bootloader_bin = target[0].dir.path + '/firmware_for_sd_upload.bin' fw_path = Path(target[0].path)
if os.path.exists(firmware_without_bootloader_bin): fwb_path = fw_path.parent / 'firmware_with_bootloader.bin'
os.remove(firmware_without_bootloader_bin) with fwb_path.open("wb") as fwb_file:
os.rename(target[0].path, firmware_without_bootloader_bin) bl_path = Path("buildroot/share/PlatformIO/scripts/jgaurora_bootloader.bin")
#os.rename(target[0].dir.path+'/firmware_with_bootloader.bin', target[0].dir.path+'/firmware.bin') bl_file = bl_path.open("rb")
while True:
b = bl_file.read(1)
if b == b'': break
else: fwb_file.write(b)
marlin.add_post_action(addboot); with fw_path.open("rb") as fw_file:
while True:
b = fw_file.read(1)
if b == b'': break
else: fwb_file.write(b)
fws_path = Path(target[0].dir.path, 'firmware_for_sd_upload.bin')
if fws_path.exists():
fws_path.unlink()
fw_path.rename(fws_path)
import marlin
marlin.add_post_action(addboot);

View File

@@ -7,43 +7,41 @@
# #
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
import os,marlin import os,marlin
Import("env")
from SCons.Script import DefaultEnvironment board = marlin.env.BoardConfig()
board = DefaultEnvironment().BoardConfig()
def encryptByte(byte): def encryptByte(byte):
byte = 0xFF & ((byte << 6) | (byte >> 2)) byte = 0xFF & ((byte << 6) | (byte >> 2))
i = 0x58 + byte i = 0x58 + byte
j = 0x05 + byte + (i >> 8) j = 0x05 + byte + (i >> 8)
byte = (0xF8 & i) | (0x07 & j) byte = (0xF8 & i) | (0x07 & j)
return byte return byte
def encrypt_file(input, output_file, file_length): def encrypt_file(input, output_file, file_length):
input_file = bytearray(input.read()) input_file = bytearray(input.read())
for i in range(len(input_file)): for i in range(len(input_file)):
input_file[i] = encryptByte(input_file[i]) input_file[i] = encryptByte(input_file[i])
output_file.write(input_file) output_file.write(input_file)
# Encrypt ${PROGNAME}.bin and save it with the name given in build.crypt_lerdge # Encrypt ${PROGNAME}.bin and save it with the name given in build.crypt_lerdge
def encrypt(source, target, env): def encrypt(source, target, env):
fwpath = target[0].path fwpath = target[0].path
enname = board.get("build.crypt_lerdge") enname = board.get("build.crypt_lerdge")
print("Encrypting %s to %s" % (fwpath, enname)) print("Encrypting %s to %s" % (fwpath, enname))
fwfile = open(fwpath, "rb") fwfile = open(fwpath, "rb")
enfile = open(target[0].dir.path + "/" + enname, "wb") enfile = open(target[0].dir.path + "/" + enname, "wb")
length = os.path.getsize(fwpath) length = os.path.getsize(fwpath)
encrypt_file(fwfile, enfile, length) encrypt_file(fwfile, enfile, length)
fwfile.close() fwfile.close()
enfile.close() enfile.close()
os.remove(fwpath) os.remove(fwpath)
if 'crypt_lerdge' in board.get("build").keys(): if 'crypt_lerdge' in board.get("build").keys():
if board.get("build.crypt_lerdge") != "": if board.get("build.crypt_lerdge") != "":
marlin.add_post_action(encrypt) marlin.add_post_action(encrypt)
else: else:
print("LERDGE builds require output file via board_build.crypt_lerdge = 'filename' parameter") print("LERDGE builds require output file via board_build.crypt_lerdge = 'filename' parameter")
exit(1) exit(1)

View File

@@ -2,74 +2,72 @@
# marlin.py # marlin.py
# Helper module with some commonly-used functions # Helper module with some commonly-used functions
# #
import os,shutil import shutil
from pathlib import Path
from SCons.Script import DefaultEnvironment from SCons.Script import DefaultEnvironment
env = DefaultEnvironment() env = DefaultEnvironment()
from os.path import join
def copytree(src, dst, symlinks=False, ignore=None): def copytree(src, dst, symlinks=False, ignore=None):
for item in os.listdir(src): for item in src.iterdir():
s = join(src, item) if item.is_dir():
d = join(dst, item) shutil.copytree(item, dst / item.name, symlinks, ignore)
if os.path.isdir(s): else:
shutil.copytree(s, d, symlinks, ignore) shutil.copy2(item, dst / item.name)
else:
shutil.copy2(s, d)
def replace_define(field, value): def replace_define(field, value):
for define in env['CPPDEFINES']: envdefs = env['CPPDEFINES'].copy()
if define[0] == field: for define in envdefs:
env['CPPDEFINES'].remove(define) if define[0] == field:
env['CPPDEFINES'].append((field, value)) env['CPPDEFINES'].remove(define)
env['CPPDEFINES'].append((field, value))
# Relocate the firmware to a new address, such as "0x08005000" # Relocate the firmware to a new address, such as "0x08005000"
def relocate_firmware(address): def relocate_firmware(address):
replace_define("VECT_TAB_ADDR", address) replace_define("VECT_TAB_ADDR", address)
# Relocate the vector table with a new offset # Relocate the vector table with a new offset
def relocate_vtab(address): def relocate_vtab(address):
replace_define("VECT_TAB_OFFSET", address) replace_define("VECT_TAB_OFFSET", address)
# Replace the existing -Wl,-T with the given ldscript path # Replace the existing -Wl,-T with the given ldscript path
def custom_ld_script(ldname): def custom_ld_script(ldname):
apath = os.path.abspath("buildroot/share/PlatformIO/ldscripts/" + ldname) apath = str(Path("buildroot/share/PlatformIO/ldscripts", ldname).resolve())
for i, flag in enumerate(env["LINKFLAGS"]): for i, flag in enumerate(env["LINKFLAGS"]):
if "-Wl,-T" in flag: if "-Wl,-T" in flag:
env["LINKFLAGS"][i] = "-Wl,-T" + apath env["LINKFLAGS"][i] = "-Wl,-T" + apath
elif flag == "-T": elif flag == "-T":
env["LINKFLAGS"][i + 1] = apath env["LINKFLAGS"][i + 1] = apath
# Encrypt ${PROGNAME}.bin and save it with a new name. This applies (mostly) to MKS boards # Encrypt ${PROGNAME}.bin and save it with a new name. This applies (mostly) to MKS boards
# This PostAction is set up by offset_and_rename.py for envs with 'build.encrypt_mks'. # This PostAction is set up by offset_and_rename.py for envs with 'build.encrypt_mks'.
def encrypt_mks(source, target, env, new_name): def encrypt_mks(source, target, env, new_name):
import sys import sys
key = [0xA3, 0xBD, 0xAD, 0x0D, 0x41, 0x11, 0xBB, 0x8D, 0xDC, 0x80, 0x2D, 0xD0, 0xD2, 0xC4, 0x9B, 0x1E, 0x26, 0xEB, 0xE3, 0x33, 0x4A, 0x15, 0xE4, 0x0A, 0xB3, 0xB1, 0x3C, 0x93, 0xBB, 0xAF, 0xF7, 0x3E] key = [0xA3, 0xBD, 0xAD, 0x0D, 0x41, 0x11, 0xBB, 0x8D, 0xDC, 0x80, 0x2D, 0xD0, 0xD2, 0xC4, 0x9B, 0x1E, 0x26, 0xEB, 0xE3, 0x33, 0x4A, 0x15, 0xE4, 0x0A, 0xB3, 0xB1, 0x3C, 0x93, 0xBB, 0xAF, 0xF7, 0x3E]
# If FIRMWARE_BIN is defined by config, override all # If FIRMWARE_BIN is defined by config, override all
mf = env["MARLIN_FEATURES"] mf = env["MARLIN_FEATURES"]
if "FIRMWARE_BIN" in mf: new_name = mf["FIRMWARE_BIN"] if "FIRMWARE_BIN" in mf: new_name = mf["FIRMWARE_BIN"]
fwpath = target[0].path fwpath = Path(target[0].path)
fwfile = open(fwpath, "rb") fwfile = fwpath.open("rb")
enfile = open(target[0].dir.path + "/" + new_name, "wb") enfile = Path(target[0].dir.path, new_name).open("wb")
length = os.path.getsize(fwpath) length = fwpath.stat().st_size
position = 0 position = 0
try: try:
while position < length: while position < length:
byte = fwfile.read(1) byte = fwfile.read(1)
if position >= 320 and position < 31040: if 320 <= position < 31040:
byte = chr(ord(byte) ^ key[position & 31]) byte = chr(ord(byte) ^ key[position & 31])
if sys.version_info[0] > 2: if sys.version_info[0] > 2:
byte = bytes(byte, 'latin1') byte = bytes(byte, 'latin1')
enfile.write(byte) enfile.write(byte)
position += 1 position += 1
finally: finally:
fwfile.close() fwfile.close()
enfile.close() enfile.close()
os.remove(fwpath) fwpath.unlink()
def add_post_action(action): def add_post_action(action):
env.AddPostAction(join("$BUILD_DIR", "${PROGNAME}.bin"), action); env.AddPostAction(str(Path("$BUILD_DIR", "${PROGNAME}.bin")), action);

View File

@@ -5,65 +5,64 @@
import json import json
import sys import sys
import shutil import shutil
import re
opt_output = '--opt' in sys.argv opt_output = '--opt' in sys.argv
output_suffix = '.sh' if opt_output else '' if '--bare-output' in sys.argv else '.gen' output_suffix = '.sh' if opt_output else '' if '--bare-output' in sys.argv else '.gen'
try: try:
with open('marlin_config.json', 'r') as infile: with open('marlin_config.json', 'r') as infile:
conf = json.load(infile) conf = json.load(infile)
for key in conf: for key in conf:
# We don't care about the hash when restoring here # We don't care about the hash when restoring here
if key == '__INITIAL_HASH': if key == '__INITIAL_HASH':
continue continue
if key == 'VERSION': if key == 'VERSION':
for k, v in sorted(conf[key].items()): for k, v in sorted(conf[key].items()):
print(k + ': ' + v) print(k + ': ' + v)
continue continue
# The key is the file name, so let's build it now # The key is the file name, so let's build it now
outfile = open('Marlin/' + key + output_suffix, 'w') outfile = open('Marlin/' + key + output_suffix, 'w')
for k, v in sorted(conf[key].items()): for k, v in sorted(conf[key].items()):
# Make define line now # Make define line now
if opt_output: if opt_output:
if v != '': if v != '':
if '"' in v: if '"' in v:
v = "'%s'" % v v = "'%s'" % v
elif ' ' in v: elif ' ' in v:
v = '"%s"' % v v = '"%s"' % v
define = 'opt_set ' + k + ' ' + v + '\n' define = 'opt_set ' + k + ' ' + v + '\n'
else: else:
define = 'opt_enable ' + k + '\n' define = 'opt_enable ' + k + '\n'
else: else:
define = '#define ' + k + ' ' + v + '\n' define = '#define ' + k + ' ' + v + '\n'
outfile.write(define) outfile.write(define)
outfile.close() outfile.close()
# Try to apply changes to the actual configuration file (in order to keep useful comments) # Try to apply changes to the actual configuration file (in order to keep useful comments)
if output_suffix != '': if output_suffix != '':
# Move the existing configuration so it doesn't interfere # Move the existing configuration so it doesn't interfere
shutil.move('Marlin/' + key, 'Marlin/' + key + '.orig') shutil.move('Marlin/' + key, 'Marlin/' + key + '.orig')
infile_lines = open('Marlin/' + key + '.orig', 'r').read().split('\n') infile_lines = open('Marlin/' + key + '.orig', 'r').read().split('\n')
outfile = open('Marlin/' + key, 'w') outfile = open('Marlin/' + key, 'w')
for line in infile_lines: for line in infile_lines:
sline = line.strip(" \t\n\r") sline = line.strip(" \t\n\r")
if sline[:7] == "#define": if sline[:7] == "#define":
# Extract the key here (we don't care about the value) # Extract the key here (we don't care about the value)
kv = sline[8:].strip().split(' ') kv = sline[8:].strip().split(' ')
if kv[0] in conf[key]: if kv[0] in conf[key]:
outfile.write('#define ' + kv[0] + ' ' + conf[key][kv[0]] + '\n') outfile.write('#define ' + kv[0] + ' ' + conf[key][kv[0]] + '\n')
# Remove the key from the dict, so we can still write all missing keys at the end of the file # Remove the key from the dict, so we can still write all missing keys at the end of the file
del conf[key][kv[0]] del conf[key][kv[0]]
else: else:
outfile.write(line + '\n') outfile.write(line + '\n')
else: else:
outfile.write(line + '\n') outfile.write(line + '\n')
# Process any remaining defines here # Process any remaining defines here
for k, v in sorted(conf[key].items()): for k, v in sorted(conf[key].items()):
define = '#define ' + k + ' ' + v + '\n' define = '#define ' + k + ' ' + v + '\n'
outfile.write(define) outfile.write(define)
outfile.close() outfile.close()
print('Output configuration written to: ' + 'Marlin/' + key + output_suffix) print('Output configuration written to: ' + 'Marlin/' + key + output_suffix)
except: except:
print('No marlin_config.json found.') print('No marlin_config.json found.')

View File

@@ -2,61 +2,64 @@
# offset_and_rename.py # offset_and_rename.py
# #
# - If 'build.offset' is provided, either by JSON or by the environment... # - If 'build.offset' is provided, either by JSON or by the environment...
# - Set linker flag LD_FLASH_OFFSET and relocate the VTAB based on 'build.offset'. # - Set linker flag LD_FLASH_OFFSET and relocate the VTAB based on 'build.offset'.
# - Set linker flag LD_MAX_DATA_SIZE based on 'build.maximum_ram_size'. # - Set linker flag LD_MAX_DATA_SIZE based on 'build.maximum_ram_size'.
# - Define STM32_FLASH_SIZE from 'upload.maximum_size' for use by Flash-based EEPROM emulation. # - Define STM32_FLASH_SIZE from 'upload.maximum_size' for use by Flash-based EEPROM emulation.
# #
# - For 'board_build.rename' add a post-action to rename the firmware file. # - For 'board_build.rename' add a post-action to rename the firmware file.
# #
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
import os,sys,marlin import marlin
Import("env")
from SCons.Script import DefaultEnvironment env = marlin.env
board = DefaultEnvironment().BoardConfig() board = env.BoardConfig()
board_keys = board.get("build").keys()
board_keys = board.get("build").keys() #
# For build.offset define LD_FLASH_OFFSET, used by ldscript.ld
#
if 'offset' in board_keys:
LD_FLASH_OFFSET = board.get("build.offset")
marlin.relocate_vtab(LD_FLASH_OFFSET)
# # Flash size
# For build.offset define LD_FLASH_OFFSET, used by ldscript.ld maximum_flash_size = int(board.get("upload.maximum_size") / 1024)
# marlin.replace_define('STM32_FLASH_SIZE', maximum_flash_size)
if 'offset' in board_keys:
LD_FLASH_OFFSET = board.get("build.offset")
marlin.relocate_vtab(LD_FLASH_OFFSET)
# Flash size # Get upload.maximum_ram_size (defined by /buildroot/share/PlatformIO/boards/VARIOUS.json)
maximum_flash_size = int(board.get("upload.maximum_size") / 1024) maximum_ram_size = board.get("upload.maximum_ram_size")
marlin.replace_define('STM32_FLASH_SIZE', maximum_flash_size)
# Get upload.maximum_ram_size (defined by /buildroot/share/PlatformIO/boards/VARIOUS.json) for i, flag in enumerate(env["LINKFLAGS"]):
maximum_ram_size = board.get("upload.maximum_ram_size") if "-Wl,--defsym=LD_FLASH_OFFSET" in flag:
env["LINKFLAGS"][i] = "-Wl,--defsym=LD_FLASH_OFFSET=" + LD_FLASH_OFFSET
if "-Wl,--defsym=LD_MAX_DATA_SIZE" in flag:
env["LINKFLAGS"][i] = "-Wl,--defsym=LD_MAX_DATA_SIZE=" + str(maximum_ram_size - 40)
for i, flag in enumerate(env["LINKFLAGS"]): #
if "-Wl,--defsym=LD_FLASH_OFFSET" in flag: # For build.encrypt_mks rename and encode the firmware file.
env["LINKFLAGS"][i] = "-Wl,--defsym=LD_FLASH_OFFSET=" + LD_FLASH_OFFSET #
if "-Wl,--defsym=LD_MAX_DATA_SIZE" in flag: if 'encrypt_mks' in board_keys:
env["LINKFLAGS"][i] = "-Wl,--defsym=LD_MAX_DATA_SIZE=" + str(maximum_ram_size - 40)
# # Encrypt ${PROGNAME}.bin and save it with the name given in build.encrypt_mks
# For build.encrypt_mks rename and encode the firmware file. def encrypt(source, target, env):
# marlin.encrypt_mks(source, target, env, board.get("build.encrypt_mks"))
if 'encrypt_mks' in board_keys:
# Encrypt ${PROGNAME}.bin and save it with the name given in build.encrypt_mks if board.get("build.encrypt_mks") != "":
def encrypt(source, target, env): marlin.add_post_action(encrypt)
marlin.encrypt_mks(source, target, env, board.get("build.encrypt_mks"))
if board.get("build.encrypt_mks") != "": #
marlin.add_post_action(encrypt) # For build.rename simply rename the firmware file.
#
if 'rename' in board_keys:
# # If FIRMWARE_BIN is defined by config, override all
# For build.rename simply rename the firmware file. mf = env["MARLIN_FEATURES"]
# if "FIRMWARE_BIN" in mf: new_name = mf["FIRMWARE_BIN"]
if 'rename' in board_keys: else: new_name = board.get("build.rename")
def rename_target(source, target, env): def rename_target(source, target, env):
firmware = os.path.join(target[0].dir.path, board.get("build.rename")) from pathlib import Path
os.replace(target[0].path, firmware) Path(target[0].path).replace(Path(target[0].dir.path, new_name))
marlin.add_post_action(rename_target) marlin.add_post_action(rename_target)

View File

@@ -3,18 +3,17 @@
# #
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
import os,sys from os.path import join
from os.path import join
Import("env") Import("env")
board = env.BoardConfig() board = env.BoardConfig()
board_keys = board.get("build").keys() board_keys = board.get("build").keys()
if 'encode' in board_keys: if 'encode' in board_keys:
env.AddPostAction( env.AddPostAction(
join("$BUILD_DIR", "${PROGNAME}.bin"), join("$BUILD_DIR", "${PROGNAME}.bin"),
env.VerboseAction(" ".join([ env.VerboseAction(" ".join([
"$OBJCOPY", "-O", "srec", "$OBJCOPY", "-O", "srec",
"\"$BUILD_DIR/${PROGNAME}.elf\"", "\"" + join("$BUILD_DIR", board.get("build.encode")) + "\"" "\"$BUILD_DIR/${PROGNAME}.elf\"", "\"" + join("$BUILD_DIR", board.get("build.encode")) + "\""
]), "Building " + board.get("build.encode")) ]), "Building " + board.get("build.encode"))
) )

View File

@@ -4,10 +4,11 @@
# Make sure 'vscode init' is not the current command # Make sure 'vscode init' is not the current command
def is_pio_build(): def is_pio_build():
from SCons.Script import DefaultEnvironment from SCons.Script import DefaultEnvironment
env = DefaultEnvironment() env = DefaultEnvironment()
return not env.IsIntegrationDump() if "IsCleanTarget" in dir(env) and env.IsCleanTarget(): return False
return not env.IsIntegrationDump()
def get_pio_version(): def get_pio_version():
from platformio import util from platformio import util
return util.pioversion_to_intstr() return util.pioversion_to_intstr()

View File

@@ -5,115 +5,134 @@
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
import os,re,sys import re,sys
Import("env") from pathlib import Path
Import("env")
def get_envs_for_board(board): def get_envs_for_board(board):
with open(os.path.join("Marlin", "src", "pins", "pins.h"), "r") as file: ppath = Path("Marlin/src/pins/pins.h")
with ppath.open() as file:
if sys.platform == 'win32': if sys.platform == 'win32':
envregex = r"(?:env|win):" envregex = r"(?:env|win):"
elif sys.platform == 'darwin': elif sys.platform == 'darwin':
envregex = r"(?:env|mac|uni):" envregex = r"(?:env|mac|uni):"
elif sys.platform == 'linux': elif sys.platform == 'linux':
envregex = r"(?:env|lin|uni):" envregex = r"(?:env|lin|uni):"
else: else:
envregex = r"(?:env):" envregex = r"(?:env):"
r = re.compile(r"if\s+MB\((.+)\)") r = re.compile(r"if\s+MB\((.+)\)")
if board.startswith("BOARD_"): if board.startswith("BOARD_"):
board = board[6:] board = board[6:]
for line in file: for line in file:
mbs = r.findall(line) mbs = r.findall(line)
if mbs and board in re.split(r",\s*", mbs[0]): if mbs and board in re.split(r",\s*", mbs[0]):
line = file.readline() line = file.readline()
found_envs = re.match(r"\s*#include .+" + envregex, line) found_envs = re.match(r"\s*#include .+" + envregex, line)
if found_envs: if found_envs:
envlist = re.findall(envregex + r"(\w+)", line) envlist = re.findall(envregex + r"(\w+)", line)
return [ "env:"+s for s in envlist ] return [ "env:"+s for s in envlist ]
return [] return []
def check_envs(build_env, board_envs, config): def check_envs(build_env, board_envs, config):
if build_env in board_envs: if build_env in board_envs:
return True return True
ext = config.get(build_env, 'extends', default=None) ext = config.get(build_env, 'extends', default=None)
if ext: if ext:
if isinstance(ext, str): if isinstance(ext, str):
return check_envs(ext, board_envs, config) return check_envs(ext, board_envs, config)
elif isinstance(ext, list): elif isinstance(ext, list):
for ext_env in ext: for ext_env in ext:
if check_envs(ext_env, board_envs, config): if check_envs(ext_env, board_envs, config):
return True return True
return False return False
def sanity_check_target(): def sanity_check_target():
# Sanity checks: # Sanity checks:
if 'PIOENV' not in env: if 'PIOENV' not in env:
raise SystemExit("Error: PIOENV is not defined. This script is intended to be used with PlatformIO") raise SystemExit("Error: PIOENV is not defined. This script is intended to be used with PlatformIO")
if 'MARLIN_FEATURES' not in env: # Require PlatformIO 6.1.1 or later
raise SystemExit("Error: this script should be used after common Marlin scripts") vers = pioutil.get_pio_version()
if vers < [6, 1, 1]:
raise SystemExit("Error: Marlin requires PlatformIO >= 6.1.1. Use 'pio upgrade' to get a newer version.")
if 'MOTHERBOARD' not in env['MARLIN_FEATURES']: if 'MARLIN_FEATURES' not in env:
raise SystemExit("Error: MOTHERBOARD is not defined in Configuration.h") raise SystemExit("Error: this script should be used after common Marlin scripts.")
build_env = env['PIOENV'] if len(env['MARLIN_FEATURES']) == 0:
motherboard = env['MARLIN_FEATURES']['MOTHERBOARD'] raise SystemExit("Error: Failed to parse Marlin features. See previous error messages.")
board_envs = get_envs_for_board(motherboard)
config = env.GetProjectConfig()
result = check_envs("env:"+build_env, board_envs, config)
if not result: build_env = env['PIOENV']
err = "Error: Build environment '%s' is incompatible with %s. Use one of these: %s" % \ motherboard = env['MARLIN_FEATURES']['MOTHERBOARD']
( build_env, motherboard, ", ".join([ e[4:] for e in board_envs if e.startswith("env:") ]) ) board_envs = get_envs_for_board(motherboard)
raise SystemExit(err) config = env.GetProjectConfig()
result = check_envs("env:"+build_env, board_envs, config)
# if not result:
# Check for Config files in two common incorrect places err = "Error: Build environment '%s' is incompatible with %s. Use one of these: %s" % \
# ( build_env, motherboard, ", ".join([ e[4:] for e in board_envs if e.startswith("env:") ]) )
for p in [ env['PROJECT_DIR'], os.path.join(env['PROJECT_DIR'], "config") ]: raise SystemExit(err)
for f in [ "Configuration.h", "Configuration_adv.h" ]:
if os.path.isfile(os.path.join(p, f)):
err = "ERROR: Config files found in directory %s. Please move them into the Marlin subfolder." % p
raise SystemExit(err)
# #
# Find the name.cpp.o or name.o and remove it # Check for Config files in two common incorrect places
# #
def rm_ofile(subdir, name): epath = Path(env['PROJECT_DIR'])
build_dir = os.path.join(env['PROJECT_BUILD_DIR'], build_env); for p in [ epath, epath / "config" ]:
for outdir in [ build_dir, os.path.join(build_dir, "debug") ]: for f in ("Configuration.h", "Configuration_adv.h"):
for ext in [ ".cpp.o", ".o" ]: if (p / f).is_file():
fpath = os.path.join(outdir, "src", "src", subdir, name + ext) err = "ERROR: Config files found in directory %s. Please move them into the Marlin subfolder." % p
if os.path.exists(fpath): raise SystemExit(err)
os.remove(fpath)
# #
# Give warnings on every build # Find the name.cpp.o or name.o and remove it
# #
rm_ofile("inc", "Warnings") def rm_ofile(subdir, name):
build_dir = Path(env['PROJECT_BUILD_DIR'], build_env);
for outdir in (build_dir, build_dir / "debug"):
for ext in (".cpp.o", ".o"):
fpath = outdir / "src/src" / subdir / (name + ext)
if fpath.exists():
fpath.unlink()
# #
# Rebuild 'settings.cpp' for EEPROM_INIT_NOW # Give warnings on every build
# #
if 'EEPROM_INIT_NOW' in env['MARLIN_FEATURES']: rm_ofile("inc", "Warnings")
rm_ofile("module", "settings")
# #
# Check for old files indicating an entangled Marlin (mixing old and new code) # Rebuild 'settings.cpp' for EEPROM_INIT_NOW
# #
mixedin = [] if 'EEPROM_INIT_NOW' in env['MARLIN_FEATURES']:
p = os.path.join(env['PROJECT_DIR'], "Marlin", "src", "lcd", "dogm") rm_ofile("module", "settings")
for f in [ "ultralcd_DOGM.cpp", "ultralcd_DOGM.h" ]:
if os.path.isfile(os.path.join(p, f)):
mixedin += [ f ]
p = os.path.join(env['PROJECT_DIR'], "Marlin", "src", "feature", "bedlevel", "abl")
for f in [ "abl.cpp", "abl.h" ]:
if os.path.isfile(os.path.join(p, f)):
mixedin += [ f ]
if mixedin:
err = "ERROR: Old files fell into your Marlin folder. Remove %s and try again" % ", ".join(mixedin)
raise SystemExit(err)
sanity_check_target() #
# Check for old files indicating an entangled Marlin (mixing old and new code)
#
mixedin = []
p = Path(env['PROJECT_DIR'], "Marlin/src/lcd/dogm")
for f in [ "ultralcd_DOGM.cpp", "ultralcd_DOGM.h" ]:
if (p / f).is_file():
mixedin += [ f ]
p = Path(env['PROJECT_DIR'], "Marlin/src/feature/bedlevel/abl")
for f in [ "abl.cpp", "abl.h" ]:
if (p / f).is_file():
mixedin += [ f ]
if mixedin:
err = "ERROR: Old files fell into your Marlin folder. Remove %s and try again" % ", ".join(mixedin)
raise SystemExit(err)
#
# Check FILAMENT_RUNOUT_SCRIPT has a %c parammeter when required
#
if 'FILAMENT_RUNOUT_SENSOR' in env['MARLIN_FEATURES'] and 'NUM_RUNOUT_SENSORS' in env['MARLIN_FEATURES']:
if env['MARLIN_FEATURES']['NUM_RUNOUT_SENSORS'].isdigit() and int(env['MARLIN_FEATURES']['NUM_RUNOUT_SENSORS']) > 1:
if 'FILAMENT_RUNOUT_SCRIPT' in env['MARLIN_FEATURES']:
frs = env['MARLIN_FEATURES']['FILAMENT_RUNOUT_SCRIPT']
if "M600" in frs and "%c" not in frs:
err = "ERROR: FILAMENT_RUNOUT_SCRIPT needs a %c parameter (e.g., \"M600 T%c\") when NUM_RUNOUT_SENSORS is > 1"
raise SystemExit(err)
sanity_check_target()

View File

@@ -1,14 +1,14 @@
# #
# preprocessor.py # preprocessor.py
# #
import subprocess,os,re import subprocess
nocache = 1 nocache = 1
verbose = 0 verbose = 0
def blab(str): def blab(str):
if verbose: if verbose:
print(str) print(str)
################################################################################ ################################################################################
# #
@@ -16,36 +16,36 @@ def blab(str):
# #
preprocessor_cache = {} preprocessor_cache = {}
def run_preprocessor(env, fn=None): def run_preprocessor(env, fn=None):
filename = fn or 'buildroot/share/PlatformIO/scripts/common-dependencies.h' filename = fn or 'buildroot/share/PlatformIO/scripts/common-dependencies.h'
if filename in preprocessor_cache: if filename in preprocessor_cache:
return preprocessor_cache[filename] return preprocessor_cache[filename]
# Process defines # Process defines
build_flags = env.get('BUILD_FLAGS') build_flags = env.get('BUILD_FLAGS')
build_flags = env.ParseFlagsExtended(build_flags) build_flags = env.ParseFlagsExtended(build_flags)
cxx = search_compiler(env) cxx = search_compiler(env)
cmd = ['"' + cxx + '"'] cmd = ['"' + cxx + '"']
# Build flags from board.json # Build flags from board.json
#if 'BOARD' in env: #if 'BOARD' in env:
# cmd += [env.BoardConfig().get("build.extra_flags")] # cmd += [env.BoardConfig().get("build.extra_flags")]
for s in build_flags['CPPDEFINES']: for s in build_flags['CPPDEFINES']:
if isinstance(s, tuple): if isinstance(s, tuple):
cmd += ['-D' + s[0] + '=' + str(s[1])] cmd += ['-D' + s[0] + '=' + str(s[1])]
else: else:
cmd += ['-D' + s] cmd += ['-D' + s]
cmd += ['-D__MARLIN_DEPS__ -w -dM -E -x c++'] cmd += ['-D__MARLIN_DEPS__ -w -dM -E -x c++']
depcmd = cmd + [ filename ] depcmd = cmd + [ filename ]
cmd = ' '.join(depcmd) cmd = ' '.join(depcmd)
blab(cmd) blab(cmd)
try: try:
define_list = subprocess.check_output(cmd, shell=True).splitlines() define_list = subprocess.check_output(cmd, shell=True).splitlines()
except: except:
define_list = {} define_list = {}
preprocessor_cache[filename] = define_list preprocessor_cache[filename] = define_list
return define_list return define_list
################################################################################ ################################################################################
@@ -54,51 +54,41 @@ def run_preprocessor(env, fn=None):
# #
def search_compiler(env): def search_compiler(env):
ENV_BUILD_PATH = os.path.join(env['PROJECT_BUILD_DIR'], env['PIOENV']) from pathlib import Path, PurePath
GCC_PATH_CACHE = os.path.join(ENV_BUILD_PATH, ".gcc_path")
try: ENV_BUILD_PATH = Path(env['PROJECT_BUILD_DIR'], env['PIOENV'])
filepath = env.GetProjectOption('custom_gcc') GCC_PATH_CACHE = ENV_BUILD_PATH / ".gcc_path"
blab("Getting compiler from env")
return filepath
except:
pass
# Warning: The cached .gcc_path will obscure a newly-installed toolkit try:
if not nocache and os.path.exists(GCC_PATH_CACHE): gccpath = env.GetProjectOption('custom_gcc')
blab("Getting g++ path from cache") blab("Getting compiler from env")
with open(GCC_PATH_CACHE, 'r') as f: return gccpath
return f.read() except:
pass
# Find the current platform compiler by searching the $PATH # Warning: The cached .gcc_path will obscure a newly-installed toolkit
# which will be in a platformio toolchain bin folder if not nocache and GCC_PATH_CACHE.exists():
path_regex = re.escape(env['PROJECT_PACKAGES_DIR']) blab("Getting g++ path from cache")
gcc = "g++" return GCC_PATH_CACHE.read_text()
if env['PLATFORM'] == 'win32':
path_separator = ';'
path_regex += r'.*\\bin'
gcc += ".exe"
else:
path_separator = ':'
path_regex += r'/.+/bin'
# Search for the compiler # Use any item in $PATH corresponding to a platformio toolchain bin folder
for pathdir in env['ENV']['PATH'].split(path_separator): path_separator = ':'
if not re.search(path_regex, pathdir, re.IGNORECASE): gcc_exe = '*g++'
continue if env['PLATFORM'] == 'win32':
for filepath in os.listdir(pathdir): path_separator = ';'
if not filepath.endswith(gcc): gcc_exe += ".exe"
continue
# Use entire path to not rely on env PATH
filepath = os.path.sep.join([pathdir, filepath])
# Cache the g++ path to no search always
if not nocache and os.path.exists(ENV_BUILD_PATH):
blab("Caching g++ for current env")
with open(GCC_PATH_CACHE, 'w+') as f:
f.write(filepath)
return filepath # Search for the compiler in PATH
for ppath in map(Path, env['ENV']['PATH'].split(path_separator)):
if ppath.match(env['PROJECT_PACKAGES_DIR'] + "/**/bin"):
for gpath in ppath.glob(gcc_exe):
gccpath = str(gpath.resolve())
# Cache the g++ path to no search always
if not nocache and ENV_BUILD_PATH.exists():
blab("Caching g++ for current env")
GCC_PATH_CACHE.write_text(gccpath)
return gccpath
filepath = env.get('CXX') gccpath = env.get('CXX')
blab("Couldn't find a compiler! Fallback to %s" % filepath) blab("Couldn't find a compiler! Fallback to %s" % gccpath)
return filepath return gccpath

View File

@@ -4,6 +4,6 @@
# #
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
from datetime import datetime from datetime import datetime
Import("env") Import("env")
env['PROGNAME'] = datetime.now().strftime("firmware-%Y%m%d-%H%M%S") env['PROGNAME'] = datetime.now().strftime("firmware-%Y%m%d-%H%M%S")

View File

@@ -0,0 +1,421 @@
#!/usr/bin/env python3
#
# schema.py
#
# Used by signature.py via common-dependencies.py to generate a schema file during the PlatformIO build.
# This script can also be run standalone from within the Marlin repo to generate all schema files.
#
import re,json
from pathlib import Path
def extend_dict(d:dict, k:tuple):
if len(k) >= 1 and k[0] not in d:
d[k[0]] = {}
if len(k) >= 2 and k[1] not in d[k[0]]:
d[k[0]][k[1]] = {}
if len(k) >= 3 and k[2] not in d[k[0]][k[1]]:
d[k[0]][k[1]][k[2]] = {}
grouping_patterns = [
re.compile(r'^([XYZIJKUVW]|[XYZ]2|Z[34]|E[0-7])$'),
re.compile(r'^AXIS\d$'),
re.compile(r'^(MIN|MAX)$'),
re.compile(r'^[0-8]$'),
re.compile(r'^HOTEND[0-7]$'),
re.compile(r'^(HOTENDS|BED|PROBE|COOLER)$'),
re.compile(r'^[XYZIJKUVW]M(IN|AX)$')
]
# If the indexed part of the option name matches a pattern
# then add it to the dictionary.
def find_grouping(gdict, filekey, sectkey, optkey, pindex):
optparts = optkey.split('_')
if 1 < len(optparts) > pindex:
for patt in grouping_patterns:
if patt.match(optparts[pindex]):
subkey = optparts[pindex]
modkey = '_'.join(optparts)
optparts[pindex] = '*'
wildkey = '_'.join(optparts)
kkey = f'{filekey}|{sectkey}|{wildkey}'
if kkey not in gdict: gdict[kkey] = []
gdict[kkey].append((subkey, modkey))
# Build a list of potential groups. Only those with multiple items will be grouped.
def group_options(schema):
for pindex in range(10, -1, -1):
found_groups = {}
for filekey, f in schema.items():
for sectkey, s in f.items():
for optkey in s:
find_grouping(found_groups, filekey, sectkey, optkey, pindex)
fkeys = [ k for k in found_groups.keys() ]
for kkey in fkeys:
items = found_groups[kkey]
if len(items) > 1:
f, s, w = kkey.split('|')
extend_dict(schema, (f, s, w)) # Add wildcard group to schema
for subkey, optkey in items: # Add all items to wildcard group
schema[f][s][w][subkey] = schema[f][s][optkey] # Move non-wildcard item to wildcard group
del schema[f][s][optkey]
del found_groups[kkey]
# Extract all board names from boards.h
def load_boards():
bpath = Path("Marlin/src/core/boards.h")
if bpath.is_file():
with bpath.open() as bfile:
boards = []
for line in bfile:
if line.startswith("#define BOARD_"):
bname = line.split()[1]
if bname != "BOARD_UNKNOWN": boards.append(bname)
return "['" + "','".join(boards) + "']"
return ''
#
# Extract a schema from the current configuration files
#
def extract():
# Load board names from boards.h
boards = load_boards()
# Parsing states
class Parse:
NORMAL = 0 # No condition yet
BLOCK_COMMENT = 1 # Looking for the end of the block comment
EOL_COMMENT = 2 # EOL comment started, maybe add the next comment?
GET_SENSORS = 3 # Gathering temperature sensor options
ERROR = 9 # Syntax error
# List of files to process, with shorthand
filekey = { 'Configuration.h':'basic', 'Configuration_adv.h':'advanced' }
# A JSON object to store the data
sch_out = { 'basic':{}, 'advanced':{} }
# Regex for #define NAME [VALUE] [COMMENT] with sanitized line
defgrep = re.compile(r'^(//)?\s*(#define)\s+([A-Za-z0-9_]+)\s*(.*?)\s*(//.+)?$')
# Defines to ignore
ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXAMPLES_DIR', 'CONFIG_EXPORT')
# Start with unknown state
state = Parse.NORMAL
# Serial ID
sid = 0
# Loop through files and parse them line by line
for fn, fk in filekey.items():
with Path("Marlin", fn).open() as fileobj:
section = 'none' # Current Settings section
line_number = 0 # Counter for the line number of the file
conditions = [] # Create a condition stack for the current file
comment_buff = [] # A temporary buffer for comments
options_json = '' # A buffer for the most recent options JSON found
eol_options = False # The options came from end of line, so only apply once
join_line = False # A flag that the line should be joined with the previous one
line = '' # A line buffer to handle \ continuation
last_added_ref = None # Reference to the last added item
# Loop through the lines in the file
for the_line in fileobj.readlines():
line_number += 1
# Clean the line for easier parsing
the_line = the_line.strip()
if join_line: # A previous line is being made longer
line += (' ' if line else '') + the_line
else: # Otherwise, start the line anew
line, line_start = the_line, line_number
# If the resulting line ends with a \, don't process now.
# Strip the end off. The next line will be joined with it.
join_line = line.endswith("\\")
if join_line:
line = line[:-1].strip()
continue
else:
line_end = line_number
defmatch = defgrep.match(line)
# Special handling for EOL comments after a #define.
# At this point the #define is already digested and inserted,
# so we have to extend it
if state == Parse.EOL_COMMENT:
# If the line is not a comment, we're done with the EOL comment
if not defmatch and the_line.startswith('//'):
comment_buff.append(the_line[2:].strip())
else:
last_added_ref['comment'] = ' '.join(comment_buff)
comment_buff = []
state = Parse.NORMAL
def use_comment(c, opt, sec, bufref):
if c.startswith(':'): # If the comment starts with : then it has magic JSON
d = c[1:].strip() # Strip the leading :
cbr = c.rindex('}') if d.startswith('{') else c.rindex(']') if d.startswith('[') else 0
if cbr:
opt, cmt = c[1:cbr+1].strip(), c[cbr+1:].strip()
if cmt != '': bufref.append(cmt)
else:
opt = c[1:].strip()
elif c.startswith('@section'): # Start a new section
sec = c[8:].strip()
elif not c.startswith('========'):
bufref.append(c)
return opt, sec
# In a block comment, capture lines up to the end of the comment.
# Assume nothing follows the comment closure.
if state in (Parse.BLOCK_COMMENT, Parse.GET_SENSORS):
endpos = line.find('*/')
if endpos < 0:
cline = line
else:
cline, line = line[:endpos].strip(), line[endpos+2:].strip()
# Temperature sensors are done
if state == Parse.GET_SENSORS:
options_json = f'[ {options_json[:-2]} ]'
state = Parse.NORMAL
# Strip the leading '*' from block comments
if cline.startswith('*'): cline = cline[1:].strip()
# Collect temperature sensors
if state == Parse.GET_SENSORS:
sens = re.match(r'^(-?\d+)\s*:\s*(.+)$', cline)
if sens:
s2 = sens[2].replace("'","''")
options_json += f"{sens[1]}:'{s2}', "
elif state == Parse.BLOCK_COMMENT:
# Look for temperature sensors
if cline == "Temperature sensors available:":
state, cline = Parse.GET_SENSORS, "Temperature Sensors"
options_json, section = use_comment(cline, options_json, section, comment_buff)
# For the normal state we're looking for any non-blank line
elif state == Parse.NORMAL:
# Skip a commented define when evaluating comment opening
st = 2 if re.match(r'^//\s*#define', line) else 0
cpos1 = line.find('/*') # Start a block comment on the line?
cpos2 = line.find('//', st) # Start an end of line comment on the line?
# Only the first comment starter gets evaluated
cpos = -1
if cpos1 != -1 and (cpos1 < cpos2 or cpos2 == -1):
cpos = cpos1
comment_buff = []
state = Parse.BLOCK_COMMENT
eol_options = False
elif cpos2 != -1 and (cpos2 < cpos1 or cpos1 == -1):
cpos = cpos2
# Comment after a define may be continued on the following lines
if defmatch != None and cpos > 10:
state = Parse.EOL_COMMENT
comment_buff = []
# Process the start of a new comment
if cpos != -1:
cline, line = line[cpos+2:].strip(), line[:cpos].strip()
if state == Parse.BLOCK_COMMENT:
# Strip leading '*' from block comments
if cline.startswith('*'): cline = cline[1:].strip()
else:
# Expire end-of-line options after first use
if cline.startswith(':'): eol_options = True
# Buffer a non-empty comment start
if cline != '':
options_json, section = use_comment(cline, options_json, section, comment_buff)
# If the line has nothing before the comment, go to the next line
if line == '':
options_json = ''
continue
# Parenthesize the given expression if needed
def atomize(s):
if s == '' \
or re.match(r'^[A-Za-z0-9_]*(\([^)]+\))?$', s) \
or re.match(r'^[A-Za-z0-9_]+ == \d+?$', s):
return s
return f'({s})'
#
# The conditions stack is an array containing condition-arrays.
# Each condition-array lists the conditions for the current block.
# IF/N/DEF adds a new condition-array to the stack.
# ELSE/ELIF/ENDIF pop the condition-array.
# ELSE/ELIF negate the last item in the popped condition-array.
# ELIF adds a new condition to the end of the array.
# ELSE/ELIF re-push the condition-array.
#
cparts = line.split()
iselif, iselse = cparts[0] == '#elif', cparts[0] == '#else'
if iselif or iselse or cparts[0] == '#endif':
if len(conditions) == 0:
raise Exception(f'no #if block at line {line_number}')
# Pop the last condition-array from the stack
prev = conditions.pop()
if iselif or iselse:
prev[-1] = '!' + prev[-1] # Invert the last condition
if iselif: prev.append(atomize(line[5:].strip()))
conditions.append(prev)
elif cparts[0] == '#if':
conditions.append([ atomize(line[3:].strip()) ])
elif cparts[0] == '#ifdef':
conditions.append([ f'defined({line[6:].strip()})' ])
elif cparts[0] == '#ifndef':
conditions.append([ f'!defined({line[7:].strip()})' ])
# Handle a complete #define line
elif defmatch != None:
# Get the match groups into vars
enabled, define_name, val = defmatch[1] == None, defmatch[3], defmatch[4]
# Increment the serial ID
sid += 1
# Create a new dictionary for the current #define
define_info = {
'section': section,
'name': define_name,
'enabled': enabled,
'line': line_start,
'sid': sid
}
# Type is based on the value
if val == '':
value_type = 'switch'
elif re.match(r'^(true|false)$', val):
value_type = 'bool'
val = val == 'true'
elif re.match(r'^[-+]?\s*\d+$', val):
value_type = 'int'
val = int(val)
elif re.match(r'[-+]?\s*(\d+\.|\d*\.\d+)([eE][-+]?\d+)?[fF]?', val):
value_type = 'float'
val = float(val.replace('f',''))
else:
value_type = 'string' if val[0] == '"' \
else 'char' if val[0] == "'" \
else 'state' if re.match(r'^(LOW|HIGH)$', val) \
else 'enum' if re.match(r'^[A-Za-z0-9_]{3,}$', val) \
else 'int[]' if re.match(r'^{(\s*[-+]?\s*\d+\s*(,\s*)?)+}$', val) \
else 'float[]' if re.match(r'^{(\s*[-+]?\s*(\d+\.|\d*\.\d+)([eE][-+]?\d+)?[fF]?\s*(,\s*)?)+}$', val) \
else 'array' if val[0] == '{' \
else ''
if val != '': define_info['value'] = val
if value_type != '': define_info['type'] = value_type
# Join up accumulated conditions with &&
if conditions: define_info['requires'] = ' && '.join(sum(conditions, []))
# If the comment_buff is not empty, add the comment to the info
if comment_buff:
full_comment = '\n'.join(comment_buff)
# An EOL comment will be added later
# The handling could go here instead of above
if state == Parse.EOL_COMMENT:
define_info['comment'] = ''
else:
define_info['comment'] = full_comment
comment_buff = []
# If the comment specifies units, add that to the info
units = re.match(r'^\(([^)]+)\)', full_comment)
if units:
units = units[1]
if units == 's' or units == 'sec': units = 'seconds'
define_info['units'] = units
# Set the options for the current #define
if define_name == "MOTHERBOARD" and boards != '':
define_info['options'] = boards
elif options_json != '':
define_info['options'] = options_json
if eol_options: options_json = ''
# Create section dict if it doesn't exist yet
if section not in sch_out[fk]: sch_out[fk][section] = {}
# If define has already been seen...
if define_name in sch_out[fk][section]:
info = sch_out[fk][section][define_name]
if isinstance(info, dict): info = [ info ] # Convert a single dict into a list
info.append(define_info) # Add to the list
else:
# Add the define dict with name as key
sch_out[fk][section][define_name] = define_info
if state == Parse.EOL_COMMENT:
last_added_ref = define_info
return sch_out
def dump_json(schema:dict, jpath:Path):
with jpath.open('w') as jfile:
json.dump(schema, jfile, ensure_ascii=False, indent=2)
def dump_yaml(schema:dict, ypath:Path):
import yaml
with ypath.open('w') as yfile:
yaml.dump(schema, yfile, default_flow_style=False, width=120, indent=2)
def main():
try:
schema = extract()
except Exception as exc:
print("Error: " + str(exc))
schema = None
if schema:
# Get the first command line argument
import sys
if len(sys.argv) > 1:
arg = sys.argv[1]
else:
arg = 'some'
# JSON schema
if arg in ['some', 'json', 'jsons']:
print("Generating JSON ...")
dump_json(schema, Path('schema.json'))
# JSON schema (wildcard names)
if arg in ['group', 'jsons']:
group_options(schema)
dump_json(schema, Path('schema_grouped.json'))
# YAML
if arg in ['some', 'yml', 'yaml']:
try:
import yaml
except ImportError:
print("Installing YAML module ...")
import subprocess
try:
subprocess.run(['python3', '-m', 'pip', 'install', 'pyyaml'])
import yaml
except:
print("Failed to install YAML module")
return
print("Generating YML ...")
dump_yaml(schema, Path('schema.yml'))
if __name__ == '__main__':
main()

View File

@@ -1,7 +1,11 @@
# #
# signature.py # signature.py
# #
import os,subprocess,re,json,hashlib import schema
import subprocess,re,json,hashlib
from datetime import datetime
from pathlib import Path
# #
# Return all macro names in a header as an array, so we can take # Return all macro names in a header as an array, so we can take
@@ -12,32 +16,32 @@ import os,subprocess,re,json,hashlib
# resulting config.ini to produce more exact configuration files. # resulting config.ini to produce more exact configuration files.
# #
def extract_defines(filepath): def extract_defines(filepath):
f = open(filepath, encoding="utf8").read().split("\n") f = open(filepath, encoding="utf8").read().split("\n")
a = [] a = []
for line in f: for line in f:
sline = line.strip() sline = line.strip()
if sline[:7] == "#define": if sline[:7] == "#define":
# Extract the key here (we don't care about the value) # Extract the key here (we don't care about the value)
kv = sline[8:].strip().split() kv = sline[8:].strip().split()
a.append(kv[0]) a.append(kv[0])
return a return a
# Compute the SHA256 hash of a file # Compute the SHA256 hash of a file
def get_file_sha256sum(filepath): def get_file_sha256sum(filepath):
sha256_hash = hashlib.sha256() sha256_hash = hashlib.sha256()
with open(filepath,"rb") as f: with open(filepath,"rb") as f:
# Read and update hash string value in blocks of 4K # Read and update hash string value in blocks of 4K
for byte_block in iter(lambda: f.read(4096),b""): for byte_block in iter(lambda: f.read(4096),b""):
sha256_hash.update(byte_block) sha256_hash.update(byte_block)
return sha256_hash.hexdigest() return sha256_hash.hexdigest()
# #
# Compress a JSON file into a zip file # Compress a JSON file into a zip file
# #
import zipfile import zipfile
def compress_file(filepath, outputbase): def compress_file(filepath, outpath):
with zipfile.ZipFile(outputbase + '.zip', 'w', compression=zipfile.ZIP_BZIP2, compresslevel=9) as zipf: with zipfile.ZipFile(outpath, 'w', compression=zipfile.ZIP_BZIP2, compresslevel=9) as zipf:
zipf.write(filepath, compress_type=zipfile.ZIP_BZIP2, compresslevel=9) zipf.write(filepath, compress_type=zipfile.ZIP_BZIP2, compresslevel=9)
# #
# Compute the build signature. The idea is to extract all defines in the configuration headers # Compute the build signature. The idea is to extract all defines in the configuration headers
@@ -45,139 +49,228 @@ def compress_file(filepath, outputbase):
# We can reverse the signature to get a 1:1 equivalent configuration file # We can reverse the signature to get a 1:1 equivalent configuration file
# #
def compute_build_signature(env): def compute_build_signature(env):
if 'BUILD_SIGNATURE' in env: if 'BUILD_SIGNATURE' in env:
return return
# Definitions from these files will be kept # Definitions from these files will be kept
files_to_keep = [ 'Marlin/Configuration.h', 'Marlin/Configuration_adv.h' ] files_to_keep = [ 'Marlin/Configuration.h', 'Marlin/Configuration_adv.h' ]
build_dir = os.path.join(env['PROJECT_BUILD_DIR'], env['PIOENV']) build_path = Path(env['PROJECT_BUILD_DIR'], env['PIOENV'])
# Check if we can skip processing # Check if we can skip processing
hashes = '' hashes = ''
for header in files_to_keep: for header in files_to_keep:
hashes += get_file_sha256sum(header)[0:10] hashes += get_file_sha256sum(header)[0:10]
marlin_json = os.path.join(build_dir, 'marlin_config.json') marlin_json = build_path / 'marlin_config.json'
marlin_zip = os.path.join(build_dir, 'mc') marlin_zip = build_path / 'mc.zip'
# Read existing config file # Read existing config file
try: try:
with open(marlin_json, 'r') as infile: with marlin_json.open() as infile:
conf = json.load(infile) conf = json.load(infile)
if conf['__INITIAL_HASH'] == hashes: if conf['__INITIAL_HASH'] == hashes:
# Same configuration, skip recomputing the building signature # Same configuration, skip recomputing the building signature
compress_file(marlin_json, marlin_zip) compress_file(marlin_json, marlin_zip)
return return
except: except:
pass pass
# Get enabled config options based on preprocessor # Get enabled config options based on preprocessor
from preprocessor import run_preprocessor from preprocessor import run_preprocessor
complete_cfg = run_preprocessor(env) complete_cfg = run_preprocessor(env)
# Dumb #define extraction from the configuration files # Dumb #define extraction from the configuration files
conf_defines = {} conf_defines = {}
all_defines = [] all_defines = []
for header in files_to_keep: for header in files_to_keep:
defines = extract_defines(header) defines = extract_defines(header)
# To filter only the define we want # To filter only the define we want
all_defines += defines all_defines += defines
# To remember from which file it cames from # To remember from which file it cames from
conf_defines[header.split('/')[-1]] = defines conf_defines[header.split('/')[-1]] = defines
r = re.compile(r"\(+(\s*-*\s*_.*)\)+") r = re.compile(r"\(+(\s*-*\s*_.*)\)+")
# First step is to collect all valid macros # First step is to collect all valid macros
defines = {} defines = {}
for line in complete_cfg: for line in complete_cfg:
# Split the define from the value # Split the define from the value
key_val = line[8:].strip().decode().split(' ') key_val = line[8:].strip().decode().split(' ')
key, value = key_val[0], ' '.join(key_val[1:]) key, value = key_val[0], ' '.join(key_val[1:])
# Ignore values starting with two underscore, since it's low level # Ignore values starting with two underscore, since it's low level
if len(key) > 2 and key[0:2] == "__" : if len(key) > 2 and key[0:2] == "__" :
continue continue
# Ignore values containing a parenthesis (likely a function macro) # Ignore values containing a parenthesis (likely a function macro)
if '(' in key and ')' in key: if '(' in key and ')' in key:
continue continue
# Then filter dumb values # Then filter dumb values
if r.match(value): if r.match(value):
continue continue
defines[key] = value if len(value) else "" defines[key] = value if len(value) else ""
if not 'CONFIGURATION_EMBEDDING' in defines: #
return # Continue to gather data for CONFIGURATION_EMBEDDING or CONFIG_EXPORT
#
if not ('CONFIGURATION_EMBEDDING' in defines or 'CONFIG_EXPORT' in defines):
return
# Second step is to filter useless macro # Second step is to filter useless macro
resolved_defines = {} resolved_defines = {}
for key in defines: for key in defines:
# Remove all boards now # Remove all boards now
if key.startswith("BOARD_") and key != "BOARD_INFO_NAME": if key.startswith("BOARD_") and key != "BOARD_INFO_NAME":
continue continue
# Remove all keys ending by "_NAME" as it does not make a difference to the configuration # Remove all keys ending by "_NAME" as it does not make a difference to the configuration
if key.endswith("_NAME") and key != "CUSTOM_MACHINE_NAME": if key.endswith("_NAME") and key != "CUSTOM_MACHINE_NAME":
continue continue
# Remove all keys ending by "_T_DECLARED" as it's a copy of extraneous system stuff # Remove all keys ending by "_T_DECLARED" as it's a copy of extraneous system stuff
if key.endswith("_T_DECLARED"): if key.endswith("_T_DECLARED"):
continue continue
# Remove keys that are not in the #define list in the Configuration list # Remove keys that are not in the #define list in the Configuration list
if key not in all_defines + [ 'DETAILED_BUILD_VERSION', 'STRING_DISTRIBUTION_DATE' ]: if key not in all_defines + [ 'DETAILED_BUILD_VERSION', 'STRING_DISTRIBUTION_DATE' ]:
continue continue
# Don't be that smart guy here # Don't be that smart guy here
resolved_defines[key] = defines[key] resolved_defines[key] = defines[key]
# Generate a build signature now # Generate a build signature now
# We are making an object that's a bit more complex than a basic dictionary here # We are making an object that's a bit more complex than a basic dictionary here
data = {} data = {}
data['__INITIAL_HASH'] = hashes data['__INITIAL_HASH'] = hashes
# First create a key for each header here # First create a key for each header here
for header in conf_defines: for header in conf_defines:
data[header] = {} data[header] = {}
# Then populate the object where each key is going to (that's a O(N^2) algorithm here...) # Then populate the object where each key is going to (that's a O(N^2) algorithm here...)
for key in resolved_defines: for key in resolved_defines:
for header in conf_defines: for header in conf_defines:
if key in conf_defines[header]: if key in conf_defines[header]:
data[header][key] = resolved_defines[key] data[header][key] = resolved_defines[key]
# Append the source code version and date # Every python needs this toy
data['VERSION'] = {} def tryint(key):
data['VERSION']['DETAILED_BUILD_VERSION'] = resolved_defines['DETAILED_BUILD_VERSION'] try:
data['VERSION']['STRING_DISTRIBUTION_DATE'] = resolved_defines['STRING_DISTRIBUTION_DATE'] return int(defines[key])
try: except:
curver = subprocess.check_output(["git", "describe", "--match=NeVeRmAtCh", "--always"]).strip() return 0
data['VERSION']['GIT_REF'] = curver.decode()
except:
pass
# config_dump = tryint('CONFIG_EXPORT')
# Produce a JSON file for CONFIGURATION_EMBEDDING or CONFIG_DUMP > 0
#
with open(marlin_json, 'w') as outfile:
json.dump(data, outfile, separators=(',', ':'))
# Compress the JSON file as much as we can #
compress_file(marlin_json, marlin_zip) # Produce an INI file if CONFIG_EXPORT == 2
#
if config_dump == 2:
print("Generating config.ini ...")
config_ini = build_path / 'config.ini'
with config_ini.open('w') as outfile:
ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXPORT')
filegrp = { 'Configuration.h':'config:basic', 'Configuration_adv.h':'config:advanced' }
vers = defines["CONFIGURATION_H_VERSION"]
dt_string = datetime.now().strftime("%Y-%m-%d at %H:%M:%S")
ini_fmt = '{0:40}{1}\n'
outfile.write(
'#\n'
+ '# Marlin Firmware\n'
+ '# config.ini - Options to apply before the build\n'
+ '#\n'
+ f'# Generated by Marlin build on {dt_string}\n'
+ '#\n'
+ '\n'
+ '[config:base]\n'
+ ini_fmt.format('ini_use_config', ' = all')
+ ini_fmt.format('ini_config_vers', f' = {vers}')
)
# Loop through the data array of arrays
for header in data:
if header.startswith('__'):
continue
outfile.write('\n[' + filegrp[header] + ']\n')
for key in sorted(data[header]):
if key not in ignore:
val = 'on' if data[header][key] == '' else data[header][key]
outfile.write(ini_fmt.format(key.lower(), ' = ' + val))
# Generate a C source file for storing this array #
with open('Marlin/src/mczip.h','wb') as result_file: # Produce a schema.json file if CONFIG_EXPORT == 3
result_file.write( #
b'#ifndef NO_CONFIGURATION_EMBEDDING_WARNING\n' if config_dump >= 3:
+ b' #warning "Generated file \'mc.zip\' is embedded (Define NO_CONFIGURATION_EMBEDDING_WARNING to suppress this warning.)"\n' try:
+ b'#endif\n' conf_schema = schema.extract()
+ b'const unsigned char mc_zip[] PROGMEM = {\n ' except Exception as exc:
) print("Error: " + str(exc))
count = 0 conf_schema = None
for b in open(os.path.join(build_dir, 'mc.zip'), 'rb').read():
result_file.write(b' 0x%02X,' % b) if conf_schema:
count += 1 #
if (count % 16 == 0): # Produce a schema.json file if CONFIG_EXPORT == 3
result_file.write(b'\n ') #
if (count % 16): if config_dump in (3, 13):
result_file.write(b'\n') print("Generating schema.json ...")
result_file.write(b'};\n') schema.dump_json(conf_schema, build_path / 'schema.json')
if config_dump == 13:
schema.group_options(conf_schema)
schema.dump_json(conf_schema, build_path / 'schema_grouped.json')
#
# Produce a schema.yml file if CONFIG_EXPORT == 4
#
elif config_dump == 4:
print("Generating schema.yml ...")
try:
import yaml
except ImportError:
env.Execute(env.VerboseAction(
'$PYTHONEXE -m pip install "pyyaml"',
"Installing YAML for schema.yml export",
))
import yaml
schema.dump_yaml(conf_schema, build_path / 'schema.yml')
# Append the source code version and date
data['VERSION'] = {}
data['VERSION']['DETAILED_BUILD_VERSION'] = resolved_defines['DETAILED_BUILD_VERSION']
data['VERSION']['STRING_DISTRIBUTION_DATE'] = resolved_defines['STRING_DISTRIBUTION_DATE']
try:
curver = subprocess.check_output(["git", "describe", "--match=NeVeRmAtCh", "--always"]).strip()
data['VERSION']['GIT_REF'] = curver.decode()
except:
pass
#
# Produce a JSON file for CONFIGURATION_EMBEDDING or CONFIG_EXPORT == 1
#
if config_dump == 1 or 'CONFIGURATION_EMBEDDING' in defines:
with marlin_json.open('w') as outfile:
json.dump(data, outfile, separators=(',', ':'))
#
# The rest only applies to CONFIGURATION_EMBEDDING
#
if not 'CONFIGURATION_EMBEDDING' in defines:
return
# Compress the JSON file as much as we can
compress_file(marlin_json, marlin_zip)
# Generate a C source file for storing this array
with open('Marlin/src/mczip.h','wb') as result_file:
result_file.write(
b'#ifndef NO_CONFIGURATION_EMBEDDING_WARNING\n'
+ b' #warning "Generated file \'mc.zip\' is embedded (Define NO_CONFIGURATION_EMBEDDING_WARNING to suppress this warning.)"\n'
+ b'#endif\n'
+ b'const unsigned char mc_zip[] PROGMEM = {\n '
)
count = 0
for b in (build_path / 'mc.zip').open('rb').read():
result_file.write(b' 0x%02X,' % b)
count += 1
if count % 16 == 0:
result_file.write(b'\n ')
if count % 16:
result_file.write(b'\n')
result_file.write(b'};\n')

View File

@@ -2,51 +2,52 @@
# simulator.py # simulator.py
# PlatformIO pre: script for simulator builds # PlatformIO pre: script for simulator builds
# #
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
# Get the environment thus far for the build # Get the environment thus far for the build
Import("env") Import("env")
#print(env.Dump()) #print(env.Dump())
# #
# Give the binary a distinctive name # Give the binary a distinctive name
# #
env['PROGNAME'] = "MarlinSimulator" env['PROGNAME'] = "MarlinSimulator"
# #
# If Xcode is installed add the path to its Frameworks folder, # If Xcode is installed add the path to its Frameworks folder,
# or if Mesa is installed try to use its GL/gl.h. # or if Mesa is installed try to use its GL/gl.h.
# #
import sys import sys
if sys.platform == 'darwin': if sys.platform == 'darwin':
# #
# Silence half of the ranlib warnings. (No equivalent for 'ARFLAGS') # Silence half of the ranlib warnings. (No equivalent for 'ARFLAGS')
# #
env['RANLIBFLAGS'] += [ "-no_warning_for_no_symbols" ] env['RANLIBFLAGS'] += [ "-no_warning_for_no_symbols" ]
# Default paths for Xcode and a lucky GL/gl.h dropped by Mesa # Default paths for Xcode and a lucky GL/gl.h dropped by Mesa
xcode_path = "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks" xcode_path = "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks"
mesa_path = "/opt/local/include/GL/gl.h" mesa_path = "/opt/local/include/GL/gl.h"
import os.path import os.path
if os.path.exists(xcode_path): if os.path.exists(xcode_path):
env['BUILD_FLAGS'] += [ "-F" + xcode_path ] env['BUILD_FLAGS'] += [ "-F" + xcode_path ]
print("Using OpenGL framework headers from Xcode.app") print("Using OpenGL framework headers from Xcode.app")
elif os.path.exists(mesa_path): elif os.path.exists(mesa_path):
env['BUILD_FLAGS'] += [ '-D__MESA__' ] env['BUILD_FLAGS'] += [ '-D__MESA__' ]
print("Using OpenGL header from", mesa_path) print("Using OpenGL header from", mesa_path)
else: else:
print("\n\nNo OpenGL headers found. Install Xcode for matching headers, or use 'sudo port install mesa' to get a GL/gl.h.\n\n") print("\n\nNo OpenGL headers found. Install Xcode for matching headers, or use 'sudo port install mesa' to get a GL/gl.h.\n\n")
# Break out of the PIO build immediately # Break out of the PIO build immediately
sys.exit(1) sys.exit(1)

View File

@@ -3,59 +3,59 @@
# #
import pioutil import pioutil
if pioutil.is_pio_build(): if pioutil.is_pio_build():
Import("env") Import("env")
# Get a build flag's value or None # Get a build flag's value or None
def getBuildFlagValue(name): def getBuildFlagValue(name):
for flag in build_flags: for flag in build_flags:
if isinstance(flag, list) and flag[0] == name: if isinstance(flag, list) and flag[0] == name:
return flag[1] return flag[1]
return None return None
# Get an overriding buffer size for RX or TX from the build flags # Get an overriding buffer size for RX or TX from the build flags
def getInternalSize(side): def getInternalSize(side):
return getBuildFlagValue(f"MF_{side}_BUFFER_SIZE") or \ return getBuildFlagValue(f"MF_{side}_BUFFER_SIZE") or \
getBuildFlagValue(f"SERIAL_{side}_BUFFER_SIZE") or \ getBuildFlagValue(f"SERIAL_{side}_BUFFER_SIZE") or \
getBuildFlagValue(f"USART_{side}_BUF_SIZE") getBuildFlagValue(f"USART_{side}_BUF_SIZE")
# Get the largest defined buffer size for RX or TX # Get the largest defined buffer size for RX or TX
def getBufferSize(side, default): def getBufferSize(side, default):
# Get a build flag value or fall back to the given default # Get a build flag value or fall back to the given default
internal = int(getInternalSize(side) or default) internal = int(getInternalSize(side) or default)
flag = side + "_BUFFER_SIZE" flag = side + "_BUFFER_SIZE"
# Return the largest value # Return the largest value
return max(int(mf[flag]), internal) if flag in mf else internal return max(int(mf[flag]), internal) if flag in mf else internal
# Add a build flag if it's not already defined # Add a build flag if it's not already defined
def tryAddFlag(name, value): def tryAddFlag(name, value):
if getBuildFlagValue(name) is None: if getBuildFlagValue(name) is None:
env.Append(BUILD_FLAGS=[f"-D{name}={value}"]) env.Append(BUILD_FLAGS=[f"-D{name}={value}"])
# Marlin uses the `RX_BUFFER_SIZE` \ `TX_BUFFER_SIZE` options to # Marlin uses the `RX_BUFFER_SIZE` \ `TX_BUFFER_SIZE` options to
# configure buffer sizes for receiving \ transmitting serial data. # configure buffer sizes for receiving \ transmitting serial data.
# Stm32duino uses another set of defines for the same purpose, so this # Stm32duino uses another set of defines for the same purpose, so this
# script gets the values from the configuration and uses them to define # script gets the values from the configuration and uses them to define
# `SERIAL_RX_BUFFER_SIZE` and `SERIAL_TX_BUFFER_SIZE` as global build # `SERIAL_RX_BUFFER_SIZE` and `SERIAL_TX_BUFFER_SIZE` as global build
# flags so they are available for use by the platform. # flags so they are available for use by the platform.
# #
# The script will set the value as the default one (64 bytes) # The script will set the value as the default one (64 bytes)
# or the user-configured one, whichever is higher. # or the user-configured one, whichever is higher.
# #
# Marlin's default buffer sizes are 128 for RX and 32 for TX. # Marlin's default buffer sizes are 128 for RX and 32 for TX.
# The highest value is taken (128/64). # The highest value is taken (128/64).
# #
# If MF_*_BUFFER_SIZE, SERIAL_*_BUFFER_SIZE, USART_*_BUF_SIZE, are # If MF_*_BUFFER_SIZE, SERIAL_*_BUFFER_SIZE, USART_*_BUF_SIZE, are
# defined, the first of these values will be used as the minimum. # defined, the first of these values will be used as the minimum.
build_flags = env.ParseFlags(env.get('BUILD_FLAGS'))["CPPDEFINES"] build_flags = env.ParseFlags(env.get('BUILD_FLAGS'))["CPPDEFINES"]
mf = env["MARLIN_FEATURES"] mf = env["MARLIN_FEATURES"]
# Get the largest defined buffer sizes for RX or TX, using defaults for undefined # Get the largest defined buffer sizes for RX or TX, using defaults for undefined
rxBuf = getBufferSize("RX", 128) rxBuf = getBufferSize("RX", 128)
txBuf = getBufferSize("TX", 64) txBuf = getBufferSize("TX", 64)
# Provide serial buffer sizes to the stm32duino platform # Provide serial buffer sizes to the stm32duino platform
tryAddFlag("SERIAL_RX_BUFFER_SIZE", rxBuf) tryAddFlag("SERIAL_RX_BUFFER_SIZE", rxBuf)
tryAddFlag("SERIAL_TX_BUFFER_SIZE", txBuf) tryAddFlag("SERIAL_TX_BUFFER_SIZE", txBuf)
tryAddFlag("USART_RX_BUF_SIZE", rxBuf) tryAddFlag("USART_RX_BUF_SIZE", rxBuf)
tryAddFlag("USART_TX_BUF_SIZE", txBuf) tryAddFlag("USART_TX_BUF_SIZE", txBuf)

View File

@@ -144,7 +144,7 @@ class DWIN_ICO_File():
# process each file: # process each file:
try: try:
index = int(dirEntry.name[0:3]) index = int(dirEntry.name[0:3])
if (index < 0) or (index > 255): if not (0 <= index <= 255):
print('...Ignoring invalid index on', dirEntry.path) print('...Ignoring invalid index on', dirEntry.path)
continue continue
#dirEntry.path is iconDir/name #dirEntry.path is iconDir/name

View File

@@ -18,7 +18,6 @@
# along with this program. If not, see <https://www.gnu.org/licenses/>. # along with this program. If not, see <https://www.gnu.org/licenses/>.
#---------------------------------------------------------------- #----------------------------------------------------------------
import os
import os.path import os.path
import argparse import argparse
import DWIN_ICO import DWIN_ICO

View File

@@ -18,7 +18,6 @@
# along with this program. If not, see <https://www.gnu.org/licenses/>. # along with this program. If not, see <https://www.gnu.org/licenses/>.
#---------------------------------------------------------------- #----------------------------------------------------------------
import os
import os.path import os.path
import argparse import argparse
import DWIN_ICO import DWIN_ICO

View File

@@ -47,152 +47,152 @@ different_out_dir = not (output_examples_dir == input_examples_dir)
#---------------------------------------------- #----------------------------------------------
def process_file(subdir: str, filename: str): def process_file(subdir: str, filename: str):
#---------------------------------------------- #----------------------------------------------
global filenum global filenum
filenum += 1 filenum += 1
print(str(filenum) + ' ' + filename + ': ' + subdir) print(str(filenum) + ' ' + filename + ': ' + subdir)
def_line = (def_macro_name + ' "' + subdir.replace('\\', '/') + '"') def_line = (def_macro_name + ' "' + subdir.replace('\\', '/') + '"')
#------------------------ #------------------------
# Read file # Read file
#------------------------ #------------------------
lines = [] lines = []
infilepath = os.path.join(input_examples_dir, subdir, filename) infilepath = os.path.join(input_examples_dir, subdir, filename)
try: try:
# UTF-8 because some files contain unicode chars # UTF-8 because some files contain unicode chars
with open(infilepath, 'rt', encoding="utf-8") as infile: with open(infilepath, 'rt', encoding="utf-8") as infile:
lines = infile.readlines() lines = infile.readlines()
except Exception as e: except Exception as e:
print('Failed to read file: ' + str(e) ) print('Failed to read file: ' + str(e) )
raise Exception raise Exception
lines = [line.rstrip('\r\n') for line in lines] lines = [line.rstrip('\r\n') for line in lines]
#------------------------ #------------------------
# Process lines # Process lines
#------------------------ #------------------------
file_modified = False file_modified = False
# region state machine # region state machine
# -1 = before pragma once; # -1 = before pragma once;
# 0 = region to place define; # 0 = region to place define;
# 1 = past region to place define # 1 = past region to place define
region = -1 region = -1
outlines = [] outlines = []
for line in lines: for line in lines:
outline = line outline = line
if (region == -1) and (def_macro_name in line): if (region == -1) and (def_macro_name in line):
outline = None outline = None
file_modified = True file_modified = True
elif (region == -1) and ('pragma once' in line): elif (region == -1) and ('pragma once' in line):
region = 0 region = 0
elif (region == 0): elif (region == 0):
if (line.strip() == ''): if (line.strip() == ''):
pass pass
elif (def_macro_name in line): elif (def_macro_name in line):
region = 1 region = 1
if line == def_line: # leave it as is if line == def_line: # leave it as is
pass pass
else: else:
outline = def_line outline = def_line
file_modified = True file_modified = True
else: # some other string else: # some other string
outlines.append(def_line) outlines.append(def_line)
outlines.append('') outlines.append('')
region = 1 region = 1
file_modified = True file_modified = True
elif (region == 1): elif (region == 1):
if (def_macro_name in line): if (def_macro_name in line):
outline = None outline = None
file_modified = True file_modified = True
else: else:
pass pass
# end if # end if
if outline is not None: if outline is not None:
outlines.append(outline) outlines.append(outline)
# end for # end for
#------------------------- #-------------------------
# Output file # Output file
#------------------------- #-------------------------
outdir = os.path.join(output_examples_dir, subdir) outdir = os.path.join(output_examples_dir, subdir)
outfilepath = os.path.join(outdir, filename) outfilepath = os.path.join(outdir, filename)
if file_modified: if file_modified:
# Note: no need to create output dirs, as the initial copy_tree # Note: no need to create output dirs, as the initial copy_tree
# will do that. # will do that.
print(' writing ' + str(outfilepath)) print(' writing ' + str(outfilepath))
try: try:
# Preserve unicode chars; Avoid CR-LF on Windows. # Preserve unicode chars; Avoid CR-LF on Windows.
with open(outfilepath, "w", encoding="utf-8", newline='\n') as outfile: with open(outfilepath, "w", encoding="utf-8", newline='\n') as outfile:
outfile.write("\n".join(outlines)) outfile.write("\n".join(outlines))
outfile.write("\n") outfile.write("\n")
except Exception as e: except Exception as e:
print('Failed to write file: ' + str(e) ) print('Failed to write file: ' + str(e) )
raise Exception raise Exception
else: else:
print(' no change for ' + str(outfilepath)) print(' no change for ' + str(outfilepath))
#---------- #----------
def main(): def main():
#---------- #----------
global filenum global filenum
global input_examples_dir global input_examples_dir
global output_examples_dir global output_examples_dir
filenum = 0 filenum = 0
#-------------------------------- #--------------------------------
# Check for requirements # Check for requirements
#-------------------------------- #--------------------------------
input_examples_dir = input_examples_dir.strip() input_examples_dir = input_examples_dir.strip()
input_examples_dir = input_examples_dir.rstrip('\\/') input_examples_dir = input_examples_dir.rstrip('\\/')
output_examples_dir = output_examples_dir.strip() output_examples_dir = output_examples_dir.strip()
output_examples_dir = output_examples_dir.rstrip('\\/') output_examples_dir = output_examples_dir.rstrip('\\/')
for dir in [input_examples_dir, output_examples_dir]: for dir in [input_examples_dir, output_examples_dir]:
if not (os.path.exists(dir)): if not (os.path.exists(dir)):
print('Directory not found: ' + dir) print('Directory not found: ' + dir)
sys.exit(1) sys.exit(1)
#-------------------------------- #--------------------------------
# Copy tree if necessary. # Copy tree if necessary.
#-------------------------------- #--------------------------------
# This includes files that are not otherwise included in the # This includes files that are not otherwise included in the
# insertion of the define statement. # insertion of the define statement.
# #
if different_out_dir: if different_out_dir:
print('Copying files to new directory: ' + output_examples_dir) print('Copying files to new directory: ' + output_examples_dir)
try: try:
copy_tree(input_examples_dir, output_examples_dir) copy_tree(input_examples_dir, output_examples_dir)
except Exception as e: except Exception as e:
print('Failed to copy directory: ' + str(e) ) print('Failed to copy directory: ' + str(e) )
raise Exception raise Exception
#----------------------------- #-----------------------------
# Find and process files # Find and process files
#----------------------------- #-----------------------------
len_input_examples_dir = len(input_examples_dir); len_input_examples_dir = len(input_examples_dir);
len_input_examples_dir += 1 len_input_examples_dir += 1
for filename in files_to_mod: for filename in files_to_mod:
input_path = Path(input_examples_dir) input_path = Path(input_examples_dir)
filepathlist = input_path.rglob(filename) filepathlist = input_path.rglob(filename)
for filepath in filepathlist: for filepath in filepathlist:
fulldirpath = str(filepath.parent) fulldirpath = str(filepath.parent)
subdir = fulldirpath[len_input_examples_dir:] subdir = fulldirpath[len_input_examples_dir:]
process_file(subdir, filename) process_file(subdir, filename)
#============== #==============
print('--- Starting config-labels ---') print('--- Starting config-labels ---')

View File

@@ -50,7 +50,7 @@ class Thermistor:
if c < 0: if c < 0:
print("//////////////////////////////////////////////////////////////////////////////////////") print("//////////////////////////////////////////////////////////////////////////////////////")
print("// WARNING: negative coefficient 'c'! Something may be wrong with the measurements! //") print("// WARNING: Negative coefficient 'c'! Something may be wrong with the measurements! //")
print("//////////////////////////////////////////////////////////////////////////////////////") print("//////////////////////////////////////////////////////////////////////////////////////")
c = -c c = -c
self.c1 = a # Steinhart-Hart coefficients self.c1 = a # Steinhart-Hart coefficients
@@ -93,8 +93,8 @@ def main(argv):
r2 = 1641.9 # resistance at middle temperature (1.6 KOhm) r2 = 1641.9 # resistance at middle temperature (1.6 KOhm)
t3 = 250 # high temperature in Kelvin (250 degC) t3 = 250 # high temperature in Kelvin (250 degC)
r3 = 226.15 # resistance at high temperature (226.15 Ohm) r3 = 226.15 # resistance at high temperature (226.15 Ohm)
rp = 4700; # pull-up resistor (4.7 kOhm) rp = 4700 # pull-up resistor (4.7 kOhm)
num_temps = 36; # number of entries for look-up table num_temps = 36 # number of entries for look-up table
try: try:
opts, args = getopt.getopt(argv, "h", ["help", "rp=", "t1=", "t2=", "t3=", "num-temps="]) opts, args = getopt.getopt(argv, "h", ["help", "rp=", "t1=", "t2=", "t3=", "num-temps="])
@@ -125,13 +125,13 @@ def main(argv):
num_temps = int(arg) num_temps = int(arg)
t = Thermistor(rp, t1, r1, t2, r2, t3, r3) t = Thermistor(rp, t1, r1, t2, r2, t3, r3)
increment = int((ARES-1)/(num_temps-1)); increment = int((ARES - 1) / (num_temps - 1))
step = (TMIN-TMAX) / (num_temps-1) step = int((TMIN - TMAX) / (num_temps - 1))
low_bound = t.temp(ARES-1); low_bound = t.temp(ARES - 1)
up_bound = t.temp(1); up_bound = t.temp(1)
min_temp = int(TMIN if TMIN > low_bound else low_bound) min_temp = int(TMIN if TMIN > low_bound else low_bound)
max_temp = int(TMAX if TMAX < up_bound else up_bound) max_temp = int(TMAX if TMAX < up_bound else up_bound)
temps = list(range(max_temp, TMIN+step, step)); temps = list(range(max_temp, TMIN + step, step))
print("// Thermistor lookup table for Marlin") print("// Thermistor lookup table for Marlin")
print("// ./createTemperatureLookupMarlin.py --rp=%s --t1=%s:%s --t2=%s:%s --t3=%s:%s --num-temps=%s" % (rp, t1, r1, t2, r2, t3, r3, num_temps)) print("// ./createTemperatureLookupMarlin.py --rp=%s --t1=%s:%s --t2=%s:%s --t3=%s:%s --num-temps=%s" % (rp, t1, r1, t2, r2, t3, r3, num_temps))

View File

@@ -22,42 +22,42 @@
# Generate Marlin TFT Images from bitmaps/PNG/JPG # Generate Marlin TFT Images from bitmaps/PNG/JPG
import sys,re,struct import sys,struct
from PIL import Image,ImageDraw from PIL import Image
def image2bin(image, output_file): def image2bin(image, output_file):
if output_file.endswith(('.c', '.cpp')): if output_file.endswith(('.c', '.cpp')):
f = open(output_file, 'wt') f = open(output_file, 'wt')
is_cpp = True is_cpp = True
f.write("const uint16_t image[%d] = {\n" % (image.size[1] * image.size[0])) f.write("const uint16_t image[%d] = {\n" % (image.size[1] * image.size[0]))
else: else:
f = open(output_file, 'wb') f = open(output_file, 'wb')
is_cpp = False is_cpp = False
pixs = image.load() pixs = image.load()
for y in range(image.size[1]): for y in range(image.size[1]):
for x in range(image.size[0]): for x in range(image.size[0]):
R = pixs[x, y][0] >> 3 R = pixs[x, y][0] >> 3
G = pixs[x, y][1] >> 2 G = pixs[x, y][1] >> 2
B = pixs[x, y][2] >> 3 B = pixs[x, y][2] >> 3
rgb = (R << 11) | (G << 5) | B rgb = (R << 11) | (G << 5) | B
if is_cpp: if is_cpp:
strHex = '0x{0:04X}, '.format(rgb) strHex = '0x{0:04X}, '.format(rgb)
f.write(strHex) f.write(strHex)
else: else:
f.write(struct.pack("B", (rgb & 0xFF))) f.write(struct.pack("B", (rgb & 0xFF)))
f.write(struct.pack("B", (rgb >> 8) & 0xFF)) f.write(struct.pack("B", (rgb >> 8) & 0xFF))
if is_cpp: if is_cpp:
f.write("\n") f.write("\n")
if is_cpp: if is_cpp:
f.write("};\n") f.write("};\n")
f.close() f.close()
if len(sys.argv) <= 2: if len(sys.argv) <= 2:
print("Utility to export a image in Marlin TFT friendly format.") print("Utility to export a image in Marlin TFT friendly format.")
print("It will dump a raw bin RGB565 image or create a CPP file with an array of 16 bit image pixels.") print("It will dump a raw bin RGB565 image or create a CPP file with an array of 16 bit image pixels.")
print("Usage: gen-tft-image.py INPUT_IMAGE.(png|bmp|jpg) OUTPUT_FILE.(cpp|bin)") print("Usage: gen-tft-image.py INPUT_IMAGE.(png|bmp|jpg) OUTPUT_FILE.(cpp|bin)")
print("Author: rhapsodyv") print("Author: rhapsodyv")
exit(1) exit(1)
output_img = sys.argv[2] output_img = sys.argv[2]
img = Image.open(sys.argv[1]) img = Image.open(sys.argv[1])

View File

@@ -189,9 +189,7 @@ def Upload(source, target, env):
'BOARD_CREALITY_V427', 'BOARD_CREALITY_V431', 'BOARD_CREALITY_V452', 'BOARD_CREALITY_V453', 'BOARD_CREALITY_V427', 'BOARD_CREALITY_V431', 'BOARD_CREALITY_V452', 'BOARD_CREALITY_V453',
'BOARD_CREALITY_V24S1'] 'BOARD_CREALITY_V24S1']
# "upload_random_name": generate a random 8.3 firmware filename to upload # "upload_random_name": generate a random 8.3 firmware filename to upload
upload_random_filename = marlin_motherboard in ['BOARD_CREALITY_V4', 'BOARD_CREALITY_V4210', 'BOARD_CREALITY_V422', 'BOARD_CREALITY_V423', upload_random_filename = upload_delete_old_bins and not marlin_long_filename_host_support
'BOARD_CREALITY_V427', 'BOARD_CREALITY_V431', 'BOARD_CREALITY_V452', 'BOARD_CREALITY_V453',
'BOARD_CREALITY_V24S1'] and not marlin_long_filename_host_support
try: try:

View File

@@ -252,7 +252,7 @@ def resolve_path(path):
while 0 <= path.find('../'): while 0 <= path.find('../'):
end = path.find('../') - 1 end = path.find('../') - 1
start = path.find('/') start = path.find('/')
while 0 <= path.find('/', start) and end > path.find('/', start): while 0 <= path.find('/', start) < end:
start = path.find('/', start) + 1 start = path.find('/', start) + 1
path = path[0:start] + path[end + 4:] path = path[0:start] + path[end + 4:]
@@ -674,7 +674,7 @@ def line_print(line_input):
if 0 == highlight[1]: if 0 == highlight[1]:
found_1 = text.find(' ') found_1 = text.find(' ')
found_tab = text.find('\t') found_tab = text.find('\t')
if found_1 < 0 or found_1 > found_tab: if not (0 <= found_1 <= found_tab):
found_1 = found_tab found_1 = found_tab
write_to_screen_queue(text[:found_1 + 1]) write_to_screen_queue(text[:found_1 + 1])
for highlight_2 in highlights: for highlight_2 in highlights:
@@ -684,7 +684,7 @@ def line_print(line_input):
if found >= 0: if found >= 0:
found_space = text.find(' ', found_1 + 1) found_space = text.find(' ', found_1 + 1)
found_tab = text.find('\t', found_1 + 1) found_tab = text.find('\t', found_1 + 1)
if found_space < 0 or found_space > found_tab: if not (0 <= found_space <= found_tab):
found_space = found_tab found_space = found_tab
found_right = text.find(']', found + 1) found_right = text.find(']', found + 1)
write_to_screen_queue(text[found_1 + 1:found_space + 1], highlight[2]) write_to_screen_queue(text[found_1 + 1:found_space + 1], highlight[2])
@@ -701,7 +701,7 @@ def line_print(line_input):
break break
if did_something == False: if did_something == False:
r_loc = text.find('\r') + 1 r_loc = text.find('\r') + 1
if r_loc > 0 and r_loc < len(text): # need to split this line if 0 < r_loc < len(text): # need to split this line
text = text.split('\r') text = text.split('\r')
for line in text: for line in text:
if line != '': if line != '':

View File

@@ -13,7 +13,7 @@
from __future__ import print_function from __future__ import print_function
from __future__ import division from __future__ import division
import subprocess,os,sys,platform import subprocess,os,platform
from SCons.Script import DefaultEnvironment from SCons.Script import DefaultEnvironment
current_OS = platform.system() current_OS = platform.system()

View File

@@ -9,7 +9,7 @@
# Will continue on if a COM port isn't found so that the compilation can be done. # Will continue on if a COM port isn't found so that the compilation can be done.
# #
import os,sys import os
from SCons.Script import DefaultEnvironment from SCons.Script import DefaultEnvironment
import platform import platform
current_OS = platform.system() current_OS = platform.system()

View File

@@ -1,3 +1,3 @@
# Where have all the configurations gone? # Where have all the configurations gone?
## https://github.com/MarlinFirmware/Configurations/archive/release-2.0.9.5.zip ## https://github.com/MarlinFirmware/Configurations/archive/release-2.0.9.7.zip

View File

@@ -6,7 +6,7 @@ import yaml
with open('.github/workflows/test-builds.yml') as f: with open('.github/workflows/test-builds.yml') as f:
github_configuration = yaml.safe_load(f) github_configuration = yaml.safe_load(f)
test_platforms = github_configuration\ test_platforms = github_configuration\
['jobs']['test_builds']['strategy']['matrix']['test-platform'] ['jobs']['test_builds']['strategy']['matrix']['test-platform']
print(' '.join(test_platforms)) print(' '.join(test_platforms))

View File

@@ -20,6 +20,7 @@ build_src_filter = ${common.default_src_filter} +<src/HAL/ESP32>
lib_ignore = NativeEthernet lib_ignore = NativeEthernet
upload_speed = 500000 upload_speed = 500000
monitor_speed = 250000 monitor_speed = 250000
monitor_filters = colorize, time, send_on_enter, log2file, esp32_exception_decoder
#upload_port = marlinesp.local #upload_port = marlinesp.local
#board_build.flash_mode = qio #board_build.flash_mode = qio

View File

@@ -12,230 +12,230 @@
[features] [features]
YHCB2004 = red-scorp/LiquidCrystal_AIP31068@^1.0.4, red-scorp/SoftSPIB@^1.1.1 YHCB2004 = red-scorp/LiquidCrystal_AIP31068@^1.0.4, red-scorp/SoftSPIB@^1.1.1
HAS_TFT_LVGL_UI = lvgl=https://github.com/makerbase-mks/LVGL-6.1.1-MKS/archive/master.zip HAS_TFT_LVGL_UI = lvgl=https://github.com/makerbase-mks/LVGL-6.1.1-MKS/archive/master.zip
src_filter=+<src/lcd/extui/mks_ui> build_src_filter=+<src/lcd/extui/mks_ui>
extra_scripts=download_mks_assets.py extra_scripts=download_mks_assets.py
POSTMORTEM_DEBUGGING = src_filter=+<src/HAL/shared/cpu_exception> +<src/HAL/shared/backtrace> POSTMORTEM_DEBUGGING = build_src_filter=+<src/HAL/shared/cpu_exception> +<src/HAL/shared/backtrace>
build_flags=-funwind-tables build_flags=-funwind-tables
MKS_WIFI_MODULE = QRCode=https://github.com/makerbase-mks/QRCode/archive/master.zip MKS_WIFI_MODULE = QRCode=https://github.com/makerbase-mks/QRCode/archive/master.zip
HAS_TRINAMIC_CONFIG = TMCStepper@~0.7.3 HAS_TRINAMIC_CONFIG = TMCStepper@~0.7.3
src_filter=+<src/module/stepper/trinamic.cpp> +<src/gcode/feature/trinamic/M122.cpp> +<src/gcode/feature/trinamic/M906.cpp> +<src/gcode/feature/trinamic/M911-M914.cpp> +<src/gcode/feature/trinamic/M919.cpp> build_src_filter=+<src/module/stepper/trinamic.cpp> +<src/gcode/feature/trinamic/M122.cpp> +<src/gcode/feature/trinamic/M906.cpp> +<src/gcode/feature/trinamic/M911-M914.cpp> +<src/gcode/feature/trinamic/M919.cpp>
HAS_T(RINAMIC_CONFIG|MC_SPI) = src_filter=+<src/feature/tmc_util.cpp> HAS_T(RINAMIC_CONFIG|MC_SPI) = build_src_filter=+<src/feature/tmc_util.cpp>
HAS_STEALTHCHOP = src_filter=+<src/gcode/feature/trinamic/M569.cpp> HAS_STEALTHCHOP = build_src_filter=+<src/gcode/feature/trinamic/M569.cpp>
SR_LCD_3W_NL = SailfishLCD=https://github.com/mikeshub/SailfishLCD/archive/master.zip SR_LCD_3W_NL = SailfishLCD=https://github.com/mikeshub/SailfishLCD/archive/master.zip
HAS_MOTOR_CURRENT_I2C = SlowSoftI2CMaster HAS_MOTOR_CURRENT_I2C = SlowSoftI2CMaster
src_filter=+<src/feature/digipot> build_src_filter=+<src/feature/digipot>
HAS_TMC26X = TMC26XStepper=https://github.com/MarlinFirmware/TMC26XStepper/archive/master.zip HAS_TMC26X = TMC26XStepper=https://github.com/MarlinFirmware/TMC26XStepper/archive/master.zip
src_filter=+<src/module/stepper/TMC26X.cpp> build_src_filter=+<src/module/stepper/TMC26X.cpp>
HAS_L64XX = Arduino-L6470@0.8.0 HAS_L64XX = Arduino-L6470@0.8.0
src_filter=+<src/libs/L64XX> +<src/module/stepper/L64xx.cpp> +<src/gcode/feature/L6470> +<src/HAL/shared/HAL_spi_L6470.cpp> build_src_filter=+<src/libs/L64XX> +<src/module/stepper/L64xx.cpp> +<src/gcode/feature/L6470> +<src/HAL/shared/HAL_spi_L6470.cpp>
LIB_INTERNAL_MAX31865 = src_filter=+<src/libs/MAX31865.cpp> LIB_INTERNAL_MAX31865 = build_src_filter=+<src/libs/MAX31865.cpp>
NEOPIXEL_LED = adafruit/Adafruit NeoPixel@~1.8.0 NEOPIXEL_LED = adafruit/Adafruit NeoPixel@~1.8.0
src_filter=+<src/feature/leds/neopixel.cpp> build_src_filter=+<src/feature/leds/neopixel.cpp>
I2C_AMMETER = peterus/INA226Lib@1.1.2 I2C_AMMETER = peterus/INA226Lib@1.1.2
src_filter=+<src/feature/ammeter.cpp> build_src_filter=+<src/feature/ammeter.cpp>
USES_LIQUIDCRYSTAL = LiquidCrystal=https://github.com/MarlinFirmware/New-LiquidCrystal/archive/1.5.1.zip USES_LIQUIDCRYSTAL = LiquidCrystal=https://github.com/MarlinFirmware/New-LiquidCrystal/archive/1.5.1.zip
USES_LIQUIDCRYSTAL_I2C = marcoschwartz/LiquidCrystal_I2C@1.1.4 USES_LIQUIDCRYSTAL_I2C = marcoschwartz/LiquidCrystal_I2C@1.1.4
USES_LIQUIDTWI2 = LiquidTWI2@1.2.7 USES_LIQUIDTWI2 = LiquidTWI2@1.2.7
HAS_LCDPRINT = src_filter=+<src/lcd/lcdprint.cpp> HAS_LCDPRINT = build_src_filter=+<src/lcd/lcdprint.cpp>
HAS_MARLINUI_HD44780 = src_filter=+<src/lcd/HD44780> HAS_MARLINUI_HD44780 = build_src_filter=+<src/lcd/HD44780>
HAS_MARLINUI_U8GLIB = U8glib-HAL@~0.5.2 HAS_MARLINUI_U8GLIB = U8glib-HAL@~0.5.2
src_filter=+<src/lcd/dogm> build_src_filter=+<src/lcd/dogm>
HAS_(FSMC|SPI|LTDC)_TFT = src_filter=+<src/HAL/STM32/tft> +<src/HAL/STM32F1/tft> +<src/lcd/tft_io> HAS_(FSMC|SPI|LTDC)_TFT = build_src_filter=+<src/HAL/STM32/tft> +<src/HAL/STM32F1/tft> +<src/lcd/tft_io>
HAS_FSMC_TFT = src_filter=+<src/HAL/STM32/tft/tft_fsmc.cpp> +<src/HAL/STM32F1/tft/tft_fsmc.cpp> HAS_FSMC_TFT = build_src_filter=+<src/HAL/STM32/tft/tft_fsmc.cpp> +<src/HAL/STM32F1/tft/tft_fsmc.cpp>
HAS_SPI_TFT = src_filter=+<src/HAL/STM32/tft/tft_spi.cpp> +<src/HAL/STM32F1/tft/tft_spi.cpp> HAS_SPI_TFT = build_src_filter=+<src/HAL/STM32/tft/tft_spi.cpp> +<src/HAL/STM32F1/tft/tft_spi.cpp>
I2C_EEPROM = src_filter=+<src/HAL/shared/eeprom_if_i2c.cpp> I2C_EEPROM = build_src_filter=+<src/HAL/shared/eeprom_if_i2c.cpp>
SOFT_I2C_EEPROM = SlowSoftI2CMaster, SlowSoftWire=https://github.com/felias-fogg/SlowSoftWire/archive/master.zip SOFT_I2C_EEPROM = SlowSoftI2CMaster, SlowSoftWire=https://github.com/felias-fogg/SlowSoftWire/archive/master.zip
SPI_EEPROM = src_filter=+<src/HAL/shared/eeprom_if_spi.cpp> SPI_EEPROM = build_src_filter=+<src/HAL/shared/eeprom_if_spi.cpp>
HAS_DWIN_E3V2|IS_DWIN_MARLINUI = src_filter=+<src/lcd/e3v2/common> HAS_DWIN_E3V2|IS_DWIN_MARLINUI = build_src_filter=+<src/lcd/e3v2/common>
DWIN_CREALITY_LCD = src_filter=+<src/lcd/e3v2/creality> DWIN_CREALITY_LCD = build_src_filter=+<src/lcd/e3v2/creality>
DWIN_LCD_PROUI = src_filter=+<src/lcd/e3v2/proui> DWIN_LCD_PROUI = build_src_filter=+<src/lcd/e3v2/proui>
DWIN_CREALITY_LCD_JYERSUI = src_filter=+<src/lcd/e3v2/jyersui> DWIN_CREALITY_LCD_JYERSUI = build_src_filter=+<src/lcd/e3v2/jyersui>
IS_DWIN_MARLINUI = src_filter=+<src/lcd/e3v2/marlinui> IS_DWIN_MARLINUI = build_src_filter=+<src/lcd/e3v2/marlinui>
HAS_GRAPHICAL_TFT = src_filter=+<src/lcd/tft> HAS_GRAPHICAL_TFT = build_src_filter=+<src/lcd/tft>
IS_TFTGLCD_PANEL = src_filter=+<src/lcd/TFTGLCD> IS_TFTGLCD_PANEL = build_src_filter=+<src/lcd/TFTGLCD>
HAS_TOUCH_BUTTONS = src_filter=+<src/lcd/touch/touch_buttons.cpp> HAS_TOUCH_BUTTONS = build_src_filter=+<src/lcd/touch/touch_buttons.cpp>
HAS_MARLINUI_MENU = src_filter=+<src/lcd/menu> HAS_MARLINUI_MENU = build_src_filter=+<src/lcd/menu>
HAS_GAMES = src_filter=+<src/lcd/menu/game/game.cpp> HAS_GAMES = build_src_filter=+<src/lcd/menu/game/game.cpp>
MARLIN_BRICKOUT = src_filter=+<src/lcd/menu/game/brickout.cpp> MARLIN_BRICKOUT = build_src_filter=+<src/lcd/menu/game/brickout.cpp>
MARLIN_INVADERS = src_filter=+<src/lcd/menu/game/invaders.cpp> MARLIN_INVADERS = build_src_filter=+<src/lcd/menu/game/invaders.cpp>
MARLIN_MAZE = src_filter=+<src/lcd/menu/game/maze.cpp> MARLIN_MAZE = build_src_filter=+<src/lcd/menu/game/maze.cpp>
MARLIN_SNAKE = src_filter=+<src/lcd/menu/game/snake.cpp> MARLIN_SNAKE = build_src_filter=+<src/lcd/menu/game/snake.cpp>
HAS_MENU_BACKLASH = src_filter=+<src/lcd/menu/menu_backlash.cpp> HAS_MENU_BACKLASH = build_src_filter=+<src/lcd/menu/menu_backlash.cpp>
HAS_MENU_BED_CORNERS = src_filter=+<src/lcd/menu/menu_bed_corners.cpp> HAS_MENU_BED_CORNERS = build_src_filter=+<src/lcd/menu/menu_bed_corners.cpp>
LCD_BED_LEVELING = src_filter=+<src/lcd/menu/menu_bed_leveling.cpp> LCD_BED_LEVELING = build_src_filter=+<src/lcd/menu/menu_bed_leveling.cpp>
HAS_MENU_CANCELOBJECT = src_filter=+<src/lcd/menu/menu_cancelobject.cpp> HAS_MENU_CANCELOBJECT = build_src_filter=+<src/lcd/menu/menu_cancelobject.cpp>
HAS_MENU_DELTA_CALIBRATE = src_filter=+<src/lcd/menu/menu_delta_calibrate.cpp> HAS_MENU_DELTA_CALIBRATE = build_src_filter=+<src/lcd/menu/menu_delta_calibrate.cpp>
HAS_MENU_FILAMENT = src_filter=+<src/lcd/menu/menu_filament.cpp> HAS_MENU_FILAMENT = build_src_filter=+<src/lcd/menu/menu_filament.cpp>
LCD_INFO_MENU = src_filter=+<src/lcd/menu/menu_info.cpp> LCD_INFO_MENU = build_src_filter=+<src/lcd/menu/menu_info.cpp>
HAS_MENU_JOB_RECOVERY = src_filter=+<src/lcd/menu/menu_job_recovery.cpp> HAS_MENU_JOB_RECOVERY = build_src_filter=+<src/lcd/menu/menu_job_recovery.cpp>
HAS_MULTI_LANGUAGE = src_filter=+<src/lcd/menu/menu_language.cpp> +<src/gcode/lcd/M414.cpp> HAS_MULTI_LANGUAGE = build_src_filter=+<src/lcd/menu/menu_language.cpp> +<src/gcode/lcd/M414.cpp>
HAS_MENU_LED = src_filter=+<src/lcd/menu/menu_led.cpp> HAS_MENU_LED = build_src_filter=+<src/lcd/menu/menu_led.cpp>
HAS_MENU_MEDIA = src_filter=+<src/lcd/menu/menu_media.cpp> HAS_MENU_MEDIA = build_src_filter=+<src/lcd/menu/menu_media.cpp>
HAS_MENU_MIXER = src_filter=+<src/lcd/menu/menu_mixer.cpp> HAS_MENU_MIXER = build_src_filter=+<src/lcd/menu/menu_mixer.cpp>
HAS_MENU_MMU2 = src_filter=+<src/lcd/menu/menu_mmu2.cpp> HAS_MENU_MMU2 = build_src_filter=+<src/lcd/menu/menu_mmu2.cpp>
HAS_MENU_PASSWORD = src_filter=+<src/lcd/menu/menu_password.cpp> HAS_MENU_PASSWORD = build_src_filter=+<src/lcd/menu/menu_password.cpp>
HAS_MENU_POWER_MONITOR = src_filter=+<src/lcd/menu/menu_power_monitor.cpp> HAS_MENU_POWER_MONITOR = build_src_filter=+<src/lcd/menu/menu_power_monitor.cpp>
HAS_MENU_CUTTER = src_filter=+<src/lcd/menu/menu_spindle_laser.cpp> HAS_MENU_CUTTER = build_src_filter=+<src/lcd/menu/menu_spindle_laser.cpp>
HAS_MENU_TEMPERATURE = src_filter=+<src/lcd/menu/menu_temperature.cpp> HAS_MENU_TEMPERATURE = build_src_filter=+<src/lcd/menu/menu_temperature.cpp>
HAS_MENU_TMC = src_filter=+<src/lcd/menu/menu_tmc.cpp> HAS_MENU_TMC = build_src_filter=+<src/lcd/menu/menu_tmc.cpp>
HAS_MENU_TOUCH_SCREEN = src_filter=+<src/lcd/menu/menu_touch_screen.cpp> HAS_MENU_TOUCH_SCREEN = build_src_filter=+<src/lcd/menu/menu_touch_screen.cpp>
HAS_MENU_TRAMMING = src_filter=+<src/lcd/menu/menu_tramming.cpp> HAS_MENU_TRAMMING = build_src_filter=+<src/lcd/menu/menu_tramming.cpp>
HAS_MENU_UBL = src_filter=+<src/lcd/menu/menu_ubl.cpp> HAS_MENU_UBL = build_src_filter=+<src/lcd/menu/menu_ubl.cpp>
ANYCUBIC_LCD_CHIRON = src_filter=+<src/lcd/extui/anycubic_chiron> ANYCUBIC_LCD_CHIRON = build_src_filter=+<src/lcd/extui/anycubic_chiron>
ANYCUBIC_LCD_I3MEGA = src_filter=+<src/lcd/extui/anycubic_i3mega> ANYCUBIC_LCD_I3MEGA = build_src_filter=+<src/lcd/extui/anycubic_i3mega>
HAS_DGUS_LCD_CLASSIC = src_filter=+<src/lcd/extui/dgus> HAS_DGUS_LCD_CLASSIC = build_src_filter=+<src/lcd/extui/dgus>
DGUS_LCD_UI_RELOADED = src_filter=+<src/lcd/extui/dgus_reloaded> DGUS_LCD_UI_RELOADED = build_src_filter=+<src/lcd/extui/dgus_reloaded>
DGUS_LCD_UI_FYSETC = src_filter=+<src/lcd/extui/dgus/fysetc> DGUS_LCD_UI_FYSETC = build_src_filter=+<src/lcd/extui/dgus/fysetc>
DGUS_LCD_UI_HIPRECY = src_filter=+<src/lcd/extui/dgus/hiprecy> DGUS_LCD_UI_HIPRECY = build_src_filter=+<src/lcd/extui/dgus/hiprecy>
DGUS_LCD_UI_MKS = src_filter=+<src/lcd/extui/dgus/mks> DGUS_LCD_UI_MKS = build_src_filter=+<src/lcd/extui/dgus/mks>
DGUS_LCD_UI_ORIGIN = src_filter=+<src/lcd/extui/dgus/origin> DGUS_LCD_UI_ORIGIN = build_src_filter=+<src/lcd/extui/dgus/origin>
EXTUI_EXAMPLE = src_filter=+<src/lcd/extui/example> EXTUI_EXAMPLE = build_src_filter=+<src/lcd/extui/example>
TOUCH_UI_FTDI_EVE = src_filter=+<src/lcd/extui/ftdi_eve_touch_ui> TOUCH_UI_FTDI_EVE = build_src_filter=+<src/lcd/extui/ftdi_eve_touch_ui>
MALYAN_LCD = src_filter=+<src/lcd/extui/malyan> MALYAN_LCD = build_src_filter=+<src/lcd/extui/malyan>
NEXTION_TFT = src_filter=+<src/lcd/extui/nextion> NEXTION_TFT = build_src_filter=+<src/lcd/extui/nextion>
USE_UHS2_USB = src_filter=+<src/sd/usb_flashdrive/lib-uhs2> USE_UHS2_USB = build_src_filter=+<src/sd/usb_flashdrive/lib-uhs2>
USE_UHS3_USB = src_filter=+<src/sd/usb_flashdrive/lib-uhs3> USE_UHS3_USB = build_src_filter=+<src/sd/usb_flashdrive/lib-uhs3>
USB_FLASH_DRIVE_SUPPORT = src_filter=+<src/sd/usb_flashdrive/Sd2Card_FlashDrive.cpp> USB_FLASH_DRIVE_SUPPORT = build_src_filter=+<src/sd/usb_flashdrive/Sd2Card_FlashDrive.cpp>
HAS_MCP3426_ADC = src_filter=+<src/feature/adc> +<src/gcode/feature/adc> HAS_MCP3426_ADC = build_src_filter=+<src/feature/adc> +<src/gcode/feature/adc>
AUTO_BED_LEVELING_BILINEAR = src_filter=+<src/feature/bedlevel/abl> AUTO_BED_LEVELING_BILINEAR = build_src_filter=+<src/feature/bedlevel/abl>
AUTO_BED_LEVELING_(3POINT|(BI)?LINEAR) = src_filter=+<src/gcode/bedlevel/abl> AUTO_BED_LEVELING_(3POINT|(BI)?LINEAR) = build_src_filter=+<src/gcode/bedlevel/abl>
X_AXIS_TWIST_COMPENSATION = src_filter=+<src/feature/x_twist.cpp> +<src/lcd/menu/menu_x_twist.cpp> +<src/gcode/probe/M423.cpp> X_AXIS_TWIST_COMPENSATION = build_src_filter=+<src/feature/x_twist.cpp> +<src/lcd/menu/menu_x_twist.cpp> +<src/gcode/probe/M423.cpp>
MESH_BED_LEVELING = src_filter=+<src/feature/bedlevel/mbl> +<src/gcode/bedlevel/mbl> MESH_BED_LEVELING = build_src_filter=+<src/feature/bedlevel/mbl> +<src/gcode/bedlevel/mbl>
AUTO_BED_LEVELING_UBL = src_filter=+<src/feature/bedlevel/ubl> +<src/gcode/bedlevel/ubl> AUTO_BED_LEVELING_UBL = build_src_filter=+<src/feature/bedlevel/ubl> +<src/gcode/bedlevel/ubl>
UBL_HILBERT_CURVE = src_filter=+<src/feature/bedlevel/hilbert_curve.cpp> UBL_HILBERT_CURVE = build_src_filter=+<src/feature/bedlevel/hilbert_curve.cpp>
BACKLASH_COMPENSATION = src_filter=+<src/feature/backlash.cpp> BACKLASH_COMPENSATION = build_src_filter=+<src/feature/backlash.cpp>
BARICUDA = src_filter=+<src/feature/baricuda.cpp> +<src/gcode/feature/baricuda> BARICUDA = build_src_filter=+<src/feature/baricuda.cpp> +<src/gcode/feature/baricuda>
BINARY_FILE_TRANSFER = src_filter=+<src/feature/binary_stream.cpp> +<src/libs/heatshrink> BINARY_FILE_TRANSFER = build_src_filter=+<src/feature/binary_stream.cpp> +<src/libs/heatshrink>
BLTOUCH = src_filter=+<src/feature/bltouch.cpp> BLTOUCH = build_src_filter=+<src/feature/bltouch.cpp>
CANCEL_OBJECTS = src_filter=+<src/feature/cancel_object.cpp> +<src/gcode/feature/cancel> CANCEL_OBJECTS = build_src_filter=+<src/feature/cancel_object.cpp> +<src/gcode/feature/cancel>
CASE_LIGHT_ENABLE = src_filter=+<src/feature/caselight.cpp> +<src/gcode/feature/caselight> CASE_LIGHT_ENABLE = build_src_filter=+<src/feature/caselight.cpp> +<src/gcode/feature/caselight>
EXTERNAL_CLOSED_LOOP_CONTROLLER = src_filter=+<src/feature/closedloop.cpp> +<src/gcode/calibrate/M12.cpp> EXTERNAL_CLOSED_LOOP_CONTROLLER = build_src_filter=+<src/feature/closedloop.cpp> +<src/gcode/calibrate/M12.cpp>
USE_CONTROLLER_FAN = src_filter=+<src/feature/controllerfan.cpp> USE_CONTROLLER_FAN = build_src_filter=+<src/feature/controllerfan.cpp>
HAS_COOLER|LASER_COOLANT_FLOW_METER = src_filter=+<src/feature/cooler.cpp> HAS_COOLER|LASER_COOLANT_FLOW_METER = build_src_filter=+<src/feature/cooler.cpp>
HAS_MOTOR_CURRENT_DAC = src_filter=+<src/feature/dac> HAS_MOTOR_CURRENT_DAC = build_src_filter=+<src/feature/dac>
DIRECT_STEPPING = src_filter=+<src/feature/direct_stepping.cpp> +<src/gcode/motion/G6.cpp> DIRECT_STEPPING = build_src_filter=+<src/feature/direct_stepping.cpp> +<src/gcode/motion/G6.cpp>
EMERGENCY_PARSER = src_filter=+<src/feature/e_parser.cpp> -<src/gcode/control/M108_*.cpp> EMERGENCY_PARSER = build_src_filter=+<src/feature/e_parser.cpp> -<src/gcode/control/M108_*.cpp>
EASYTHREED_UI = src_filter=+<src/feature/easythreed_ui.cpp> EASYTHREED_UI = build_src_filter=+<src/feature/easythreed_ui.cpp>
I2C_POSITION_ENCODERS = src_filter=+<src/feature/encoder_i2c.cpp> I2C_POSITION_ENCODERS = build_src_filter=+<src/feature/encoder_i2c.cpp>
IIC_BL24CXX_EEPROM = src_filter=+<src/libs/BL24CXX.cpp> IIC_BL24CXX_EEPROM = build_src_filter=+<src/libs/BL24CXX.cpp>
HAS_SPI_FLASH = src_filter=+<src/libs/W25Qxx.cpp> HAS_SPI_FLASH = build_src_filter=+<src/libs/W25Qxx.cpp>
HAS_ETHERNET = src_filter=+<src/feature/ethernet.cpp> +<src/gcode/feature/network/M552-M554.cpp> HAS_ETHERNET = build_src_filter=+<src/feature/ethernet.cpp> +<src/gcode/feature/network/M552-M554.cpp>
HAS_FANCHECK = src_filter=+<src/feature/fancheck.cpp> +<src/gcode/temp/M123.cpp> HAS_FANCHECK = build_src_filter=+<src/feature/fancheck.cpp> +<src/gcode/temp/M123.cpp>
HAS_FANMUX = src_filter=+<src/feature/fanmux.cpp> HAS_FANMUX = build_src_filter=+<src/feature/fanmux.cpp>
FILAMENT_WIDTH_SENSOR = src_filter=+<src/feature/filwidth.cpp> +<src/gcode/feature/filwidth> FILAMENT_WIDTH_SENSOR = build_src_filter=+<src/feature/filwidth.cpp> +<src/gcode/feature/filwidth>
FWRETRACT = src_filter=+<src/feature/fwretract.cpp> +<src/gcode/feature/fwretract> FWRETRACT = build_src_filter=+<src/feature/fwretract.cpp> +<src/gcode/feature/fwretract>
HOST_ACTION_COMMANDS = src_filter=+<src/feature/host_actions.cpp> HOST_ACTION_COMMANDS = build_src_filter=+<src/feature/host_actions.cpp>
HOTEND_IDLE_TIMEOUT = src_filter=+<src/feature/hotend_idle.cpp> HOTEND_IDLE_TIMEOUT = build_src_filter=+<src/feature/hotend_idle.cpp>
JOYSTICK = src_filter=+<src/feature/joystick.cpp> JOYSTICK = build_src_filter=+<src/feature/joystick.cpp>
BLINKM = src_filter=+<src/feature/leds/blinkm.cpp> BLINKM = build_src_filter=+<src/feature/leds/blinkm.cpp>
HAS_COLOR_LEDS = src_filter=+<src/feature/leds/leds.cpp> +<src/gcode/feature/leds/M150.cpp> HAS_COLOR_LEDS = build_src_filter=+<src/feature/leds/leds.cpp> +<src/gcode/feature/leds/M150.cpp>
PCA9533 = src_filter=+<src/feature/leds/pca9533.cpp> PCA9533 = build_src_filter=+<src/feature/leds/pca9533.cpp>
PCA9632 = src_filter=+<src/feature/leds/pca9632.cpp> PCA9632 = build_src_filter=+<src/feature/leds/pca9632.cpp>
PRINTER_EVENT_LEDS = src_filter=+<src/feature/leds/printer_event_leds.cpp> PRINTER_EVENT_LEDS = build_src_filter=+<src/feature/leds/printer_event_leds.cpp>
TEMP_STAT_LEDS = src_filter=+<src/feature/leds/tempstat.cpp> TEMP_STAT_LEDS = build_src_filter=+<src/feature/leds/tempstat.cpp>
MAX7219_DEBUG = src_filter=+<src/feature/max7219.cpp> +<src/gcode/feature/leds/M7219.cpp> MAX7219_DEBUG = build_src_filter=+<src/feature/max7219.cpp> +<src/gcode/feature/leds/M7219.cpp>
HAS_MEATPACK = src_filter=+<src/feature/meatpack.cpp> HAS_MEATPACK = build_src_filter=+<src/feature/meatpack.cpp>
MIXING_EXTRUDER = src_filter=+<src/feature/mixing.cpp> +<src/gcode/feature/mixing/M163-M165.cpp> MIXING_EXTRUDER = build_src_filter=+<src/feature/mixing.cpp> +<src/gcode/feature/mixing/M163-M165.cpp>
HAS_PRUSA_MMU1 = src_filter=+<src/feature/mmu/mmu.cpp> HAS_PRUSA_MMU1 = build_src_filter=+<src/feature/mmu/mmu.cpp>
HAS_PRUSA_MMU2 = src_filter=+<src/feature/mmu/mmu2.cpp> +<src/gcode/feature/prusa_MMU2> HAS_PRUSA_MMU2 = build_src_filter=+<src/feature/mmu/mmu2.cpp> +<src/gcode/feature/prusa_MMU2>
PASSWORD_FEATURE = src_filter=+<src/feature/password> +<src/gcode/feature/password> PASSWORD_FEATURE = build_src_filter=+<src/feature/password> +<src/gcode/feature/password>
ADVANCED_PAUSE_FEATURE = src_filter=+<src/feature/pause.cpp> +<src/gcode/feature/pause/M600.cpp> +<src/gcode/feature/pause/M603.cpp> ADVANCED_PAUSE_FEATURE = build_src_filter=+<src/feature/pause.cpp> +<src/gcode/feature/pause/M600.cpp> +<src/gcode/feature/pause/M603.cpp>
PSU_CONTROL = src_filter=+<src/feature/power.cpp> PSU_CONTROL = build_src_filter=+<src/feature/power.cpp>
HAS_POWER_MONITOR = src_filter=+<src/feature/power_monitor.cpp> +<src/gcode/feature/power_monitor> HAS_POWER_MONITOR = build_src_filter=+<src/feature/power_monitor.cpp> +<src/gcode/feature/power_monitor>
POWER_LOSS_RECOVERY = src_filter=+<src/feature/powerloss.cpp> +<src/gcode/feature/powerloss> POWER_LOSS_RECOVERY = build_src_filter=+<src/feature/powerloss.cpp> +<src/gcode/feature/powerloss>
HAS_PTC = src_filter=+<src/feature/probe_temp_comp.cpp> +<src/gcode/calibrate/G76_M871.cpp> HAS_PTC = build_src_filter=+<src/feature/probe_temp_comp.cpp> +<src/gcode/calibrate/G76_M871.cpp>
HAS_FILAMENT_SENSOR = src_filter=+<src/feature/runout.cpp> +<src/gcode/feature/runout> HAS_FILAMENT_SENSOR = build_src_filter=+<src/feature/runout.cpp> +<src/gcode/feature/runout>
(EXT|MANUAL)_SOLENOID.* = src_filter=+<src/feature/solenoid.cpp> +<src/gcode/control/M380_M381.cpp> (EXT|MANUAL)_SOLENOID.* = build_src_filter=+<src/feature/solenoid.cpp> +<src/gcode/control/M380_M381.cpp>
MK2_MULTIPLEXER = src_filter=+<src/feature/snmm.cpp> MK2_MULTIPLEXER = build_src_filter=+<src/feature/snmm.cpp>
HAS_CUTTER = src_filter=+<src/feature/spindle_laser.cpp> +<src/gcode/control/M3-M5.cpp> HAS_CUTTER = build_src_filter=+<src/feature/spindle_laser.cpp> +<src/gcode/control/M3-M5.cpp>
HAS_DRIVER_SAFE_POWER_PROTECT = src_filter=+<src/feature/stepper_driver_safety.cpp> HAS_DRIVER_SAFE_POWER_PROTECT = build_src_filter=+<src/feature/stepper_driver_safety.cpp>
EXPERIMENTAL_I2CBUS = src_filter=+<src/feature/twibus.cpp> +<src/gcode/feature/i2c> EXPERIMENTAL_I2CBUS = build_src_filter=+<src/feature/twibus.cpp> +<src/gcode/feature/i2c>
G26_MESH_VALIDATION = src_filter=+<src/gcode/bedlevel/G26.cpp> G26_MESH_VALIDATION = build_src_filter=+<src/gcode/bedlevel/G26.cpp>
ASSISTED_TRAMMING = src_filter=+<src/feature/tramming.cpp> +<src/gcode/bedlevel/G35.cpp> ASSISTED_TRAMMING = build_src_filter=+<src/feature/tramming.cpp> +<src/gcode/bedlevel/G35.cpp>
HAS_MESH = src_filter=+<src/gcode/bedlevel/G42.cpp> HAS_MESH = build_src_filter=+<src/gcode/bedlevel/G42.cpp>
HAS_LEVELING = src_filter=+<src/gcode/bedlevel/M420.cpp> +<src/feature/bedlevel/bedlevel.cpp> HAS_LEVELING = build_src_filter=+<src/gcode/bedlevel/M420.cpp> +<src/feature/bedlevel/bedlevel.cpp>
MECHANICAL_GANTRY_CAL.+ = src_filter=+<src/gcode/calibrate/G34.cpp> MECHANICAL_GANTRY_CAL.+ = build_src_filter=+<src/gcode/calibrate/G34.cpp>
Z_MULTI_ENDSTOPS|Z_STEPPER_AUTO_ALIGN = src_filter=+<src/gcode/calibrate/G34_M422.cpp> Z_MULTI_ENDSTOPS|Z_STEPPER_AUTO_ALIGN = build_src_filter=+<src/gcode/calibrate/G34_M422.cpp>
Z_STEPPER_AUTO_ALIGN = src_filter=+<src/feature/z_stepper_align.cpp> Z_STEPPER_AUTO_ALIGN = build_src_filter=+<src/feature/z_stepper_align.cpp>
DELTA_AUTO_CALIBRATION = src_filter=+<src/gcode/calibrate/G33.cpp> DELTA_AUTO_CALIBRATION = build_src_filter=+<src/gcode/calibrate/G33.cpp>
CALIBRATION_GCODE = src_filter=+<src/gcode/calibrate/G425.cpp> CALIBRATION_GCODE = build_src_filter=+<src/gcode/calibrate/G425.cpp>
Z_MIN_PROBE_REPEATABILITY_TEST = src_filter=+<src/gcode/calibrate/M48.cpp> Z_MIN_PROBE_REPEATABILITY_TEST = build_src_filter=+<src/gcode/calibrate/M48.cpp>
M100_FREE_MEMORY_WATCHER = src_filter=+<src/gcode/calibrate/M100.cpp> M100_FREE_MEMORY_WATCHER = build_src_filter=+<src/gcode/calibrate/M100.cpp>
BACKLASH_GCODE = src_filter=+<src/gcode/calibrate/M425.cpp> BACKLASH_GCODE = build_src_filter=+<src/gcode/calibrate/M425.cpp>
IS_KINEMATIC = src_filter=+<src/gcode/calibrate/M665.cpp> IS_KINEMATIC = build_src_filter=+<src/gcode/calibrate/M665.cpp>
HAS_EXTRA_ENDSTOPS = src_filter=+<src/gcode/calibrate/M666.cpp> HAS_EXTRA_ENDSTOPS = build_src_filter=+<src/gcode/calibrate/M666.cpp>
SKEW_CORRECTION_GCODE = src_filter=+<src/gcode/calibrate/M852.cpp> SKEW_CORRECTION_GCODE = build_src_filter=+<src/gcode/calibrate/M852.cpp>
DIRECT_PIN_CONTROL = src_filter=+<src/gcode/control/M42.cpp> +<src/gcode/control/M226.cpp> DIRECT_PIN_CONTROL = build_src_filter=+<src/gcode/control/M42.cpp> +<src/gcode/control/M226.cpp>
PINS_DEBUGGING = src_filter=+<src/gcode/config/M43.cpp> PINS_DEBUGGING = build_src_filter=+<src/gcode/config/M43.cpp>
NO_VOLUMETRICS = src_filter=-<src/gcode/config/M200-M205.cpp> NO_VOLUMETRICS = build_src_filter=-<src/gcode/config/M200-M205.cpp>
HAS_MULTI_EXTRUDER = src_filter=+<src/gcode/config/M217.cpp> HAS_MULTI_EXTRUDER = build_src_filter=+<src/gcode/config/M217.cpp>
HAS_HOTEND_OFFSET = src_filter=+<src/gcode/config/M218.cpp> HAS_HOTEND_OFFSET = build_src_filter=+<src/gcode/config/M218.cpp>
EDITABLE_SERVO_ANGLES = src_filter=+<src/gcode/config/M281.cpp> EDITABLE_SERVO_ANGLES = build_src_filter=+<src/gcode/config/M281.cpp>
PIDTEMP = src_filter=+<src/gcode/config/M301.cpp> PIDTEMP = build_src_filter=+<src/gcode/config/M301.cpp>
PREVENT_COLD_EXTRUSION = src_filter=+<src/gcode/config/M302.cpp> PREVENT_COLD_EXTRUSION = build_src_filter=+<src/gcode/config/M302.cpp>
PIDTEMPBED = src_filter=+<src/gcode/config/M304.cpp> PIDTEMPBED = build_src_filter=+<src/gcode/config/M304.cpp>
HAS_USER_THERMISTORS = src_filter=+<src/gcode/config/M305.cpp> HAS_USER_THERMISTORS = build_src_filter=+<src/gcode/config/M305.cpp>
SD_ABORT_ON_ENDSTOP_HIT = src_filter=+<src/gcode/config/M540.cpp> SD_ABORT_ON_ENDSTOP_HIT = build_src_filter=+<src/gcode/config/M540.cpp>
BAUD_RATE_GCODE = src_filter=+<src/gcode/config/M575.cpp> BAUD_RATE_GCODE = build_src_filter=+<src/gcode/config/M575.cpp>
HAS_SMART_EFF_MOD = src_filter=+<src/gcode/config/M672.cpp> HAS_SMART_EFF_MOD = build_src_filter=+<src/gcode/config/M672.cpp>
COOLANT_CONTROL|AIR_ASSIST = src_filter=+<src/gcode/control/M7-M9.cpp> COOLANT_CONTROL|AIR_ASSIST = build_src_filter=+<src/gcode/control/M7-M9.cpp>
AIR_EVACUATION = src_filter=+<src/gcode/control/M10-M11.cpp> AIR_EVACUATION = build_src_filter=+<src/gcode/control/M10-M11.cpp>
HAS_SOFTWARE_ENDSTOPS = src_filter=+<src/gcode/control/M211.cpp> HAS_SOFTWARE_ENDSTOPS = build_src_filter=+<src/gcode/control/M211.cpp>
SERVO_DETACH_GCODE = src_filter=+<src/gcode/control/M282.cpp> SERVO_DETACH_GCODE = build_src_filter=+<src/gcode/control/M282.cpp>
HAS_DUPLICATION_MODE = src_filter=+<src/gcode/control/M605.cpp> HAS_DUPLICATION_MODE = build_src_filter=+<src/gcode/control/M605.cpp>
LIN_ADVANCE = src_filter=+<src/gcode/feature/advance> LIN_ADVANCE = build_src_filter=+<src/gcode/feature/advance>
PHOTO_GCODE = src_filter=+<src/gcode/feature/camera> PHOTO_GCODE = build_src_filter=+<src/gcode/feature/camera>
CONTROLLER_FAN_EDITABLE = src_filter=+<src/gcode/feature/controllerfan> CONTROLLER_FAN_EDITABLE = build_src_filter=+<src/gcode/feature/controllerfan>
GCODE_MACROS = src_filter=+<src/gcode/feature/macro> GCODE_MACROS = build_src_filter=+<src/gcode/feature/macro>
GRADIENT_MIX = src_filter=+<src/gcode/feature/mixing/M166.cpp> GRADIENT_MIX = build_src_filter=+<src/gcode/feature/mixing/M166.cpp>
HAS_SAVED_POSITIONS = src_filter=+<src/gcode/feature/pause/G60.cpp> +<src/gcode/feature/pause/G61.cpp> HAS_SAVED_POSITIONS = build_src_filter=+<src/gcode/feature/pause/G60.cpp> +<src/gcode/feature/pause/G61.cpp>
PARK_HEAD_ON_PAUSE = src_filter=+<src/gcode/feature/pause/M125.cpp> PARK_HEAD_ON_PAUSE = build_src_filter=+<src/gcode/feature/pause/M125.cpp>
FILAMENT_LOAD_UNLOAD_GCODES = src_filter=+<src/gcode/feature/pause/M701_M702.cpp> FILAMENT_LOAD_UNLOAD_GCODES = build_src_filter=+<src/gcode/feature/pause/M701_M702.cpp>
CNC_WORKSPACE_PLANES = src_filter=+<src/gcode/geometry/G17-G19.cpp> CNC_WORKSPACE_PLANES = build_src_filter=+<src/gcode/geometry/G17-G19.cpp>
CNC_COORDINATE_SYSTEMS = src_filter=+<src/gcode/geometry/G53-G59.cpp> CNC_COORDINATE_SYSTEMS = build_src_filter=+<src/gcode/geometry/G53-G59.cpp>
HAS_M206_COMMAND = src_filter=+<src/gcode/geometry/M206_M428.cpp> HAS_M206_COMMAND = build_src_filter=+<src/gcode/geometry/M206_M428.cpp>
EXPECTED_PRINTER_CHECK = src_filter=+<src/gcode/host/M16.cpp> EXPECTED_PRINTER_CHECK = build_src_filter=+<src/gcode/host/M16.cpp>
HOST_KEEPALIVE_FEATURE = src_filter=+<src/gcode/host/M113.cpp> HOST_KEEPALIVE_FEATURE = build_src_filter=+<src/gcode/host/M113.cpp>
AUTO_REPORT_POSITION = src_filter=+<src/gcode/host/M154.cpp> AUTO_REPORT_POSITION = build_src_filter=+<src/gcode/host/M154.cpp>
REPETIER_GCODE_M360 = src_filter=+<src/gcode/host/M360.cpp> REPETIER_GCODE_M360 = build_src_filter=+<src/gcode/host/M360.cpp>
HAS_GCODE_M876 = src_filter=+<src/gcode/host/M876.cpp> HAS_GCODE_M876 = build_src_filter=+<src/gcode/host/M876.cpp>
HAS_RESUME_CONTINUE = src_filter=+<src/gcode/lcd/M0_M1.cpp> HAS_RESUME_CONTINUE = build_src_filter=+<src/gcode/lcd/M0_M1.cpp>
LCD_SET_PROGRESS_MANUALLY = src_filter=+<src/gcode/lcd/M73.cpp> LCD_SET_PROGRESS_MANUALLY = build_src_filter=+<src/gcode/lcd/M73.cpp>
HAS_STATUS_MESSAGE = src_filter=+<src/gcode/lcd/M117.cpp> HAS_STATUS_MESSAGE = build_src_filter=+<src/gcode/lcd/M117.cpp>
HAS_LCD_CONTRAST = src_filter=+<src/gcode/lcd/M250.cpp> HAS_LCD_CONTRAST = build_src_filter=+<src/gcode/lcd/M250.cpp>
HAS_GCODE_M255 = src_filter=+<src/gcode/lcd/M255.cpp> HAS_GCODE_M255 = build_src_filter=+<src/gcode/lcd/M255.cpp>
HAS_LCD_BRIGHTNESS = src_filter=+<src/gcode/lcd/M256.cpp> HAS_LCD_BRIGHTNESS = build_src_filter=+<src/gcode/lcd/M256.cpp>
HAS_SOUND = src_filter=+<src/gcode/lcd/M300.cpp> HAS_SOUND = build_src_filter=+<src/gcode/lcd/M300.cpp>
TOUCH_SCREEN_CALIBRATION = src_filter=+<src/gcode/lcd/M995.cpp> TOUCH_SCREEN_CALIBRATION = build_src_filter=+<src/gcode/lcd/M995.cpp>
ARC_SUPPORT = src_filter=+<src/gcode/motion/G2_G3.cpp> ARC_SUPPORT = build_src_filter=+<src/gcode/motion/G2_G3.cpp>
GCODE_MOTION_MODES = src_filter=+<src/gcode/motion/G80.cpp> GCODE_MOTION_MODES = build_src_filter=+<src/gcode/motion/G80.cpp>
BABYSTEPPING = src_filter=+<src/gcode/motion/M290.cpp> +<src/feature/babystep.cpp> BABYSTEPPING = build_src_filter=+<src/gcode/motion/M290.cpp> +<src/feature/babystep.cpp>
Z_PROBE_SLED = src_filter=+<src/gcode/probe/G31_G32.cpp> Z_PROBE_SLED = build_src_filter=+<src/gcode/probe/G31_G32.cpp>
G38_PROBE_TARGET = src_filter=+<src/gcode/probe/G38.cpp> G38_PROBE_TARGET = build_src_filter=+<src/gcode/probe/G38.cpp>
MAGNETIC_PARKING_EXTRUDER = src_filter=+<src/gcode/probe/M951.cpp> MAGNETIC_PARKING_EXTRUDER = build_src_filter=+<src/gcode/probe/M951.cpp>
SDSUPPORT = src_filter=+<src/sd/cardreader.cpp> +<src/sd/Sd2Card.cpp> +<src/sd/SdBaseFile.cpp> +<src/sd/SdFatUtil.cpp> +<src/sd/SdFile.cpp> +<src/sd/SdVolume.cpp> +<src/gcode/sd> SDSUPPORT = build_src_filter=+<src/sd/cardreader.cpp> +<src/sd/Sd2Card.cpp> +<src/sd/SdBaseFile.cpp> +<src/sd/SdFatUtil.cpp> +<src/sd/SdFile.cpp> +<src/sd/SdVolume.cpp> +<src/gcode/sd>
HAS_MEDIA_SUBCALLS = src_filter=+<src/gcode/sd/M32.cpp> HAS_MEDIA_SUBCALLS = build_src_filter=+<src/gcode/sd/M32.cpp>
GCODE_REPEAT_MARKERS = src_filter=+<src/feature/repeat.cpp> +<src/gcode/sd/M808.cpp> GCODE_REPEAT_MARKERS = build_src_filter=+<src/feature/repeat.cpp> +<src/gcode/sd/M808.cpp>
HAS_EXTRUDERS = src_filter=+<src/gcode/units/M82_M83.cpp> +<src/gcode/temp/M104_M109.cpp> +<src/gcode/config/M221.cpp> HAS_EXTRUDERS = build_src_filter=+<src/gcode/units/M82_M83.cpp> +<src/gcode/temp/M104_M109.cpp> +<src/gcode/config/M221.cpp>
HAS_TEMP_PROBE = src_filter=+<src/gcode/temp/M192.cpp> HAS_TEMP_PROBE = build_src_filter=+<src/gcode/temp/M192.cpp>
HAS_COOLER = src_filter=+<src/gcode/temp/M143_M193.cpp> HAS_COOLER = build_src_filter=+<src/gcode/temp/M143_M193.cpp>
AUTO_REPORT_TEMPERATURES = src_filter=+<src/gcode/temp/M155.cpp> AUTO_REPORT_TEMPERATURES = build_src_filter=+<src/gcode/temp/M155.cpp>
MPCTEMP = src_filter=+<src/gcode/temp/M306.cpp> MPCTEMP = build_src_filter=+<src/gcode/temp/M306.cpp>
INCH_MODE_SUPPORT = src_filter=+<src/gcode/units/G20_G21.cpp> INCH_MODE_SUPPORT = build_src_filter=+<src/gcode/units/G20_G21.cpp>
TEMPERATURE_UNITS_SUPPORT = src_filter=+<src/gcode/units/M149.cpp> TEMPERATURE_UNITS_SUPPORT = build_src_filter=+<src/gcode/units/M149.cpp>
NEED_HEX_PRINT = src_filter=+<src/libs/hex_print.cpp> NEED_HEX_PRINT = build_src_filter=+<src/libs/hex_print.cpp>
NEED_LSF = src_filter=+<src/libs/least_squares_fit.cpp> NEED_LSF = build_src_filter=+<src/libs/least_squares_fit.cpp>
NOZZLE_PARK_FEATURE = src_filter=+<src/libs/nozzle.cpp> +<src/gcode/feature/pause/G27.cpp> NOZZLE_PARK_FEATURE = build_src_filter=+<src/libs/nozzle.cpp> +<src/gcode/feature/pause/G27.cpp>
NOZZLE_CLEAN_FEATURE = src_filter=+<src/libs/nozzle.cpp> +<src/gcode/feature/clean> NOZZLE_CLEAN_FEATURE = build_src_filter=+<src/libs/nozzle.cpp> +<src/gcode/feature/clean>
DELTA = src_filter=+<src/module/delta.cpp> +<src/gcode/calibrate/M666.cpp> DELTA = build_src_filter=+<src/module/delta.cpp> +<src/gcode/calibrate/M666.cpp>
POLARGRAPH = src_filter=+<src/module/polargraph.cpp> POLARGRAPH = build_src_filter=+<src/module/polargraph.cpp>
BEZIER_CURVE_SUPPORT = src_filter=+<src/module/planner_bezier.cpp> +<src/gcode/motion/G5.cpp> BEZIER_CURVE_SUPPORT = build_src_filter=+<src/module/planner_bezier.cpp> +<src/gcode/motion/G5.cpp>
PRINTCOUNTER = src_filter=+<src/module/printcounter.cpp> PRINTCOUNTER = build_src_filter=+<src/module/printcounter.cpp>
HAS_BED_PROBE = src_filter=+<src/module/probe.cpp> +<src/gcode/probe/G30.cpp> +<src/gcode/probe/M401_M402.cpp> +<src/gcode/probe/M851.cpp> HAS_BED_PROBE = build_src_filter=+<src/module/probe.cpp> +<src/gcode/probe/G30.cpp> +<src/gcode/probe/M401_M402.cpp> +<src/gcode/probe/M851.cpp>
IS_SCARA = src_filter=+<src/module/scara.cpp> IS_SCARA = build_src_filter=+<src/module/scara.cpp>
HAS_SERVOS = src_filter=+<src/module/servo.cpp> +<src/gcode/control/M280.cpp> HAS_SERVOS = build_src_filter=+<src/module/servo.cpp> +<src/gcode/control/M280.cpp>
MORGAN_SCARA = src_filter=+<src/gcode/scara> MORGAN_SCARA = build_src_filter=+<src/gcode/scara>
HAS_MICROSTEPS = src_filter=+<src/gcode/control/M350_M351.cpp> HAS_MICROSTEPS = build_src_filter=+<src/gcode/control/M350_M351.cpp>
(ESP3D_)?WIFISUPPORT = AsyncTCP, ESP Async WebServer (ESP3D_)?WIFISUPPORT = AsyncTCP, ESP Async WebServer
ESP3DLib=https://github.com/luc-github/ESP3DLib/archive/master.zip ESP3DLib=https://github.com/luc-github/ESP3DLib/archive/master.zip
arduinoWebSockets=links2004/WebSockets@2.3.4 arduinoWebSockets=links2004/WebSockets@2.3.4

View File

@@ -16,6 +16,7 @@ boards_dir = buildroot/share/PlatformIO/boards
default_envs = mega2560 default_envs = mega2560
include_dir = Marlin include_dir = Marlin
extra_configs = extra_configs =
Marlin/config.ini
ini/avr.ini ini/avr.ini
ini/due.ini ini/due.ini
ini/esp32.ini ini/esp32.ini
@@ -44,6 +45,7 @@ extra_configs =
build_flags = -g3 -D__MARLIN_FIRMWARE__ -DNDEBUG build_flags = -g3 -D__MARLIN_FIRMWARE__ -DNDEBUG
-fmax-errors=5 -fmax-errors=5
extra_scripts = extra_scripts =
pre:buildroot/share/PlatformIO/scripts/configuration.py
pre:buildroot/share/PlatformIO/scripts/common-dependencies.py pre:buildroot/share/PlatformIO/scripts/common-dependencies.py
pre:buildroot/share/PlatformIO/scripts/common-cxxflags.py pre:buildroot/share/PlatformIO/scripts/common-cxxflags.py
pre:buildroot/share/PlatformIO/scripts/preflight-checks.py pre:buildroot/share/PlatformIO/scripts/preflight-checks.py
@@ -267,17 +269,10 @@ framework = arduino
extra_scripts = ${common.extra_scripts} extra_scripts = ${common.extra_scripts}
build_flags = ${common.build_flags} build_flags = ${common.build_flags}
lib_deps = ${common.lib_deps} lib_deps = ${common.lib_deps}
platform_packages = platformio/tool-dfuutil@^1.11.0
monitor_speed = 250000 monitor_speed = 250000
monitor_flags = monitor_eol = LF
--quiet monitor_echo = yes
--echo monitor_filters = colorize, time, send_on_enter
--eol
LF
--filter
colorize
--filter
time
# #
# Just print the dependency tree # Just print the dependency tree