Compare commits
138 Commits
2.0.9.5
...
bugfix-2.0
Author | SHA1 | Date | |
---|---|---|---|
|
70c5bca8c2 | ||
|
bb4a01c4f9 | ||
|
06a6708220 | ||
|
e3d1bd6d97 | ||
|
031bc6adb9 | ||
|
a6cc7a4f35 | ||
|
4f9fbcee2b | ||
|
d1211b9f90 | ||
|
d5699dd5c0 | ||
|
79bd1a68c7 | ||
|
0922702504 | ||
|
2bf631c6cc | ||
|
9a0d0e7ef1 | ||
|
3fab4898e4 | ||
|
eee8f11849 | ||
|
096bea208e | ||
|
daa7ee6c6a | ||
|
09cc5473b5 | ||
|
5ccaf1d233 | ||
|
78789ee11d | ||
|
39f6ae0e3c | ||
|
3441e917bc | ||
|
9ba4c58595 | ||
|
a720b1a335 | ||
|
799b8fccaf | ||
|
69a1c539fb | ||
|
6904e31e54 | ||
|
407c32563b | ||
|
0281459093 | ||
|
0ef496df2b | ||
|
9c2d0f47fb | ||
|
cd9a23c4d0 | ||
|
ab346f24ca | ||
|
5ee7e3ffa4 | ||
|
32765c600a | ||
|
602e14704b | ||
|
0ad83e0af5 | ||
|
9bd39749d7 | ||
|
ce5497218a | ||
|
ca06ec9abb | ||
|
cf1e4df51b | ||
|
38391eb116 | ||
|
03b50354cb | ||
|
ceeb6c646b | ||
|
20f79e290f | ||
|
5ecf3f876c | ||
|
dee41990cc | ||
|
94a8b70ce3 | ||
|
b2101b9928 | ||
|
976ac28be5 | ||
|
b0b340aab9 | ||
|
1ceac4a9fe | ||
|
a8046d2a95 | ||
|
c6f2be637c | ||
|
30da489f1c | ||
|
a540c58a2d | ||
|
e52298db35 | ||
|
31c350d55e | ||
|
d50a3129e2 | ||
|
614f54622a | ||
|
f89bb65220 | ||
|
284b35d120 | ||
|
fe5e941d92 | ||
|
80cc5f0413 | ||
|
0595a55700 | ||
|
2535ce2a26 | ||
|
777af4b6c4 | ||
|
b1162d97eb | ||
|
e70c350b3d | ||
|
ff516e257b | ||
|
41269e9c2b | ||
|
091b0f9664 | ||
|
dc04f61adc | ||
|
eb25530ba8 | ||
|
6133ca2d68 | ||
|
5a46b900d8 | ||
|
b09997d137 | ||
|
7dc3cfa1a6 | ||
|
93144f1e7d | ||
|
5e215fa3c4 | ||
|
2bdc5a78ad | ||
|
cd06d5f34f | ||
|
72f341b4bc | ||
|
9a4cfe4940 | ||
|
4a9ecdd70b | ||
|
160762742a | ||
|
95b0ee2fbf | ||
|
505ae61b8d | ||
|
5660c3b189 | ||
|
470512dd50 | ||
|
310a76444d | ||
|
ea630bbed7 | ||
|
15915ede53 | ||
|
a68aa255bc | ||
|
b3fe059f6c | ||
|
b938d99b32 | ||
|
b01caf0afe | ||
|
088fa84b7f | ||
|
74339bfefc | ||
|
40fa85b92e | ||
|
1c3d5827e6 | ||
|
0567d613ba | ||
|
3bf100301a | ||
|
f1483e76a1 | ||
|
9efccbf23e | ||
|
fe86ff2d53 | ||
|
f6e248df6e | ||
|
6d1ce46dd1 | ||
|
341bf27d1d | ||
|
0698fcb005 | ||
|
d725998340 | ||
|
2f814079d8 | ||
|
a3876c5896 | ||
|
7497890f04 | ||
|
b16a32e7ce | ||
|
052a64052b | ||
|
4648ade0e6 | ||
|
a67dd76db4 | ||
|
47b8671836 | ||
|
f04efa85cd | ||
|
5b2b08d048 | ||
|
3f9869a6c1 | ||
|
65490f27c4 | ||
|
1afb80d45d | ||
|
6a20b1271d | ||
|
733e5f3957 | ||
|
031633cde6 | ||
|
5149eed13c | ||
|
2ecaebeab2 | ||
|
047ecc5995 | ||
|
2268e1417b | ||
|
3fa767f533 | ||
|
9860580bed | ||
|
6df193a5d1 | ||
|
e5fb6ace4c | ||
|
b659bb2a52 | ||
|
7e27f06364 | ||
|
72346e80fa |
@@ -14,6 +14,10 @@ end_of_line = lf
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[{*.py,*.conf,*.sublime-project}]
|
||||
[{*.py}]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
[{*.conf,*.sublime-project}]
|
||||
indent_style = tab
|
||||
indent_size = 4
|
||||
|
147
.github/workflows/test-builds.yml
vendored
Normal file
147
.github/workflows/test-builds.yml
vendored
Normal file
@@ -0,0 +1,147 @@
|
||||
#
|
||||
# test-builds.yml
|
||||
# Do test builds to catch compile errors
|
||||
#
|
||||
|
||||
name: CI - bugfix-2.0.x
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- bugfix-2.0.x
|
||||
paths-ignore:
|
||||
- config/**
|
||||
- data/**
|
||||
- docs/**
|
||||
- '**/*.md'
|
||||
push:
|
||||
branches:
|
||||
- bugfix-2.0.x
|
||||
paths-ignore:
|
||||
- config/**
|
||||
- data/**
|
||||
- docs/**
|
||||
- '**/*.md'
|
||||
|
||||
jobs:
|
||||
test_builds:
|
||||
name: Run All Tests
|
||||
if: github.repository == 'MarlinFirmware/Marlin'
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
test-platform:
|
||||
# Base Environments
|
||||
|
||||
- DUE
|
||||
- DUE_archim
|
||||
- esp32
|
||||
- linux_native
|
||||
- mega2560
|
||||
- at90usb1286_dfu
|
||||
- teensy31
|
||||
- teensy35
|
||||
- teensy41
|
||||
- SAMD51_grandcentral_m4
|
||||
|
||||
# Extended AVR Environments
|
||||
|
||||
- FYSETC_F6
|
||||
- mega1280
|
||||
- rambo
|
||||
- sanguino1284p
|
||||
- sanguino644p
|
||||
|
||||
# STM32F1 (Maple) Environments
|
||||
|
||||
#- STM32F103RC_btt_maple
|
||||
- STM32F103RC_btt_USB_maple
|
||||
- STM32F103RC_fysetc_maple
|
||||
- STM32F103RC_meeb_maple
|
||||
- jgaurora_a5s_a1_maple
|
||||
- STM32F103VE_longer_maple
|
||||
#- mks_robin_maple
|
||||
- mks_robin_lite_maple
|
||||
- mks_robin_pro_maple
|
||||
#- mks_robin_nano35_maple
|
||||
#- STM32F103RE_creality_maple
|
||||
- STM32F103VE_ZM3E4V2_USB_maple
|
||||
|
||||
# STM32 (ST) Environments
|
||||
|
||||
- STM32F103RC_btt
|
||||
#- STM32F103RC_btt_USB
|
||||
- STM32F103RE_btt
|
||||
- STM32F103RE_btt_USB
|
||||
- STM32F103RE_creality
|
||||
- STM32F401RC_creality
|
||||
- STM32F103VE_longer
|
||||
- STM32F407VE_black
|
||||
- STM32F401VE_STEVAL
|
||||
- BIGTREE_BTT002
|
||||
- BIGTREE_SKR_PRO
|
||||
- BIGTREE_GTR_V1_0
|
||||
- mks_robin
|
||||
- ARMED
|
||||
- FYSETC_S6
|
||||
- STM32F070CB_malyan
|
||||
- STM32F070RB_malyan
|
||||
- malyan_M300
|
||||
- FLYF407ZG
|
||||
- rumba32
|
||||
- LERDGEX
|
||||
- LERDGEK
|
||||
- mks_robin_nano35
|
||||
- NUCLEO_F767ZI
|
||||
- REMRAM_V1
|
||||
- BTT_SKR_SE_BX
|
||||
- chitu_f103
|
||||
- Opulo_Lumen_REV3
|
||||
|
||||
# Put lengthy tests last
|
||||
|
||||
- LPC1768
|
||||
- LPC1769
|
||||
|
||||
# Non-working environment tests
|
||||
#- at90usb1286_cdc
|
||||
#- STM32F103CB_malyan
|
||||
#- STM32F103RE
|
||||
#- mks_robin_mini
|
||||
|
||||
steps:
|
||||
|
||||
- name: Check out the PR
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Cache pip
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
|
||||
- name: Cache PlatformIO
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.platformio
|
||||
key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
|
||||
|
||||
- name: Select Python 3.7
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: '3.7' # Version range or exact version of a Python version to use, using semvers version range syntax.
|
||||
architecture: 'x64' # optional x64 or x86. Defaults to x64 if not specified
|
||||
|
||||
- name: Install PlatformIO
|
||||
run: |
|
||||
pip install -U platformio
|
||||
pio upgrade --dev
|
||||
pio pkg update --global
|
||||
|
||||
- name: Run ${{ matrix.test-platform }} Tests
|
||||
run: |
|
||||
make tests-single-ci TEST_TARGET=${{ matrix.test-platform }}
|
@@ -28,7 +28,7 @@
|
||||
/**
|
||||
* Marlin release version identifier
|
||||
*/
|
||||
//#define SHORT_BUILD_VERSION "2.0.9.5"
|
||||
//#define SHORT_BUILD_VERSION "bugfix-2.0.x"
|
||||
|
||||
/**
|
||||
* Verbose version identifier which should contain a reference to the location
|
||||
@@ -41,7 +41,7 @@
|
||||
* here we define this default string as the date where the latest release
|
||||
* version was tagged.
|
||||
*/
|
||||
//#define STRING_DISTRIBUTION_DATE "2022-07-29"
|
||||
//#define STRING_DISTRIBUTION_DATE "2023-04-16"
|
||||
|
||||
/**
|
||||
* Defines a generic printer name to be output to the LCD after booting Marlin.
|
||||
|
211
Marlin/config.ini
Normal file
211
Marlin/config.ini
Normal file
@@ -0,0 +1,211 @@
|
||||
#
|
||||
# Marlin Firmware
|
||||
# config.ini - Options to apply before the build
|
||||
#
|
||||
[config:base]
|
||||
ini_use_config = none
|
||||
|
||||
# Load all config: sections in this file
|
||||
;ini_use_config = all
|
||||
# Load config file relative to Marlin/
|
||||
;ini_use_config = another.ini
|
||||
# Download configurations from GitHub
|
||||
;ini_use_config = example/Creality/Ender-5 Plus @ bugfix-2.1.x
|
||||
# Download configurations from your server
|
||||
;ini_use_config = https://me.myserver.com/path/to/configs
|
||||
# Evaluate config:base and do a config dump
|
||||
;ini_use_config = base
|
||||
;config_export = 2
|
||||
|
||||
[config:minimal]
|
||||
motherboard = BOARD_RAMPS_14_EFB
|
||||
serial_port = 0
|
||||
baudrate = 250000
|
||||
|
||||
use_watchdog = on
|
||||
thermal_protection_hotends = on
|
||||
thermal_protection_hysteresis = 4
|
||||
thermal_protection_period = 40
|
||||
|
||||
bufsize = 4
|
||||
block_buffer_size = 16
|
||||
max_cmd_size = 96
|
||||
|
||||
extruders = 1
|
||||
temp_sensor_0 = 1
|
||||
|
||||
temp_hysteresis = 3
|
||||
heater_0_mintemp = 5
|
||||
heater_0_maxtemp = 275
|
||||
preheat_1_temp_hotend = 180
|
||||
|
||||
bang_max = 255
|
||||
pidtemp = on
|
||||
pid_k1 = 0.95
|
||||
pid_max = BANG_MAX
|
||||
pid_functional_range = 10
|
||||
|
||||
default_kp = 22.20
|
||||
default_ki = 1.08
|
||||
default_kd = 114.00
|
||||
|
||||
x_driver_type = A4988
|
||||
y_driver_type = A4988
|
||||
z_driver_type = A4988
|
||||
e0_driver_type = A4988
|
||||
|
||||
x_bed_size = 200
|
||||
x_min_pos = 0
|
||||
x_max_pos = X_BED_SIZE
|
||||
|
||||
y_bed_size = 200
|
||||
y_min_pos = 0
|
||||
y_max_pos = Y_BED_SIZE
|
||||
|
||||
z_min_pos = 0
|
||||
z_max_pos = 200
|
||||
|
||||
x_home_dir = -1
|
||||
y_home_dir = -1
|
||||
z_home_dir = -1
|
||||
|
||||
use_xmin_plug = on
|
||||
use_ymin_plug = on
|
||||
use_zmin_plug = on
|
||||
|
||||
x_min_endstop_inverting = false
|
||||
y_min_endstop_inverting = false
|
||||
z_min_endstop_inverting = false
|
||||
|
||||
default_axis_steps_per_unit = { 80, 80, 400, 500 }
|
||||
axis_relative_modes = { false, false, false, false }
|
||||
default_max_feedrate = { 300, 300, 5, 25 }
|
||||
default_max_acceleration = { 3000, 3000, 100, 10000 }
|
||||
|
||||
homing_feedrate_mm_m = { (50*60), (50*60), (4*60) }
|
||||
homing_bump_divisor = { 2, 2, 4 }
|
||||
|
||||
x_enable_on = 0
|
||||
y_enable_on = 0
|
||||
z_enable_on = 0
|
||||
e_enable_on = 0
|
||||
|
||||
invert_x_dir = false
|
||||
invert_y_dir = true
|
||||
invert_z_dir = false
|
||||
invert_e0_dir = false
|
||||
|
||||
invert_e_step_pin = false
|
||||
invert_x_step_pin = false
|
||||
invert_y_step_pin = false
|
||||
invert_z_step_pin = false
|
||||
|
||||
disable_x = false
|
||||
disable_y = false
|
||||
disable_z = false
|
||||
disable_e = false
|
||||
|
||||
proportional_font_ratio = 1.0
|
||||
default_nominal_filament_dia = 1.75
|
||||
|
||||
junction_deviation_mm = 0.013
|
||||
|
||||
default_acceleration = 3000
|
||||
default_travel_acceleration = 3000
|
||||
default_retract_acceleration = 3000
|
||||
|
||||
default_minimumfeedrate = 0.0
|
||||
default_mintravelfeedrate = 0.0
|
||||
|
||||
minimum_planner_speed = 0.05
|
||||
min_steps_per_segment = 6
|
||||
default_minsegmenttime = 20000
|
||||
|
||||
[config:basic]
|
||||
bed_overshoot = 10
|
||||
busy_while_heating = on
|
||||
default_ejerk = 5.0
|
||||
default_keepalive_interval = 2
|
||||
default_leveling_fade_height = 0.0
|
||||
disable_inactive_extruder = on
|
||||
display_charset_hd44780 = JAPANESE
|
||||
eeprom_boot_silent = on
|
||||
eeprom_chitchat = on
|
||||
endstoppullups = on
|
||||
extrude_maxlength = 200
|
||||
extrude_mintemp = 170
|
||||
host_keepalive_feature = on
|
||||
hotend_overshoot = 15
|
||||
jd_handle_small_segments = on
|
||||
lcd_info_screen_style = 0
|
||||
lcd_language = en
|
||||
max_bed_power = 255
|
||||
mesh_inset = 0
|
||||
min_software_endstops = on
|
||||
max_software_endstops = on
|
||||
min_software_endstop_x = on
|
||||
min_software_endstop_y = on
|
||||
min_software_endstop_z = on
|
||||
max_software_endstop_x = on
|
||||
max_software_endstop_y = on
|
||||
max_software_endstop_z = on
|
||||
preheat_1_fan_speed = 0
|
||||
preheat_1_label = "PLA"
|
||||
preheat_1_temp_bed = 70
|
||||
prevent_cold_extrusion = on
|
||||
prevent_lengthy_extrude = on
|
||||
printjob_timer_autostart = on
|
||||
probing_margin = 10
|
||||
show_bootscreen = on
|
||||
soft_pwm_scale = 0
|
||||
string_config_h_author = "(none, default config)"
|
||||
temp_bed_hysteresis = 3
|
||||
temp_bed_residency_time = 10
|
||||
temp_bed_window = 1
|
||||
temp_residency_time = 10
|
||||
temp_window = 1
|
||||
validate_homing_endstops = on
|
||||
xy_probe_feedrate = (133*60)
|
||||
z_clearance_between_probes = 5
|
||||
z_clearance_deploy_probe = 10
|
||||
z_clearance_multi_probe = 5
|
||||
|
||||
[config:advanced]
|
||||
arc_support = on
|
||||
auto_report_temperatures = on
|
||||
autotemp = on
|
||||
autotemp_oldweight = 0.98
|
||||
bed_check_interval = 5000
|
||||
default_stepper_deactive_time = 120
|
||||
default_volumetric_extruder_limit = 0.00
|
||||
disable_inactive_e = true
|
||||
disable_inactive_x = true
|
||||
disable_inactive_y = true
|
||||
disable_inactive_z = true
|
||||
e0_auto_fan_pin = -1
|
||||
encoder_100x_steps_per_sec = 80
|
||||
encoder_10x_steps_per_sec = 30
|
||||
encoder_rate_multiplier = on
|
||||
extended_capabilities_report = on
|
||||
extruder_auto_fan_speed = 255
|
||||
extruder_auto_fan_temperature = 50
|
||||
fanmux0_pin = -1
|
||||
fanmux1_pin = -1
|
||||
fanmux2_pin = -1
|
||||
faster_gcode_parser = on
|
||||
homing_bump_mm = { 5, 5, 2 }
|
||||
max_arc_segment_mm = 1.0
|
||||
min_arc_segment_mm = 0.1
|
||||
min_circle_segments = 72
|
||||
n_arc_correction = 25
|
||||
serial_overrun_protection = on
|
||||
slowdown = on
|
||||
slowdown_divisor = 2
|
||||
temp_sensor_bed = 0
|
||||
thermal_protection_bed_hysteresis = 2
|
||||
thermocouple_max_errors = 15
|
||||
tx_buffer_size = 0
|
||||
watch_bed_temp_increase = 2
|
||||
watch_bed_temp_period = 60
|
||||
watch_temp_increase = 2
|
||||
watch_temp_period = 20
|
@@ -6,14 +6,14 @@
|
||||
#
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import platform
|
||||
current_OS = platform.system()
|
||||
import platform
|
||||
current_OS = platform.system()
|
||||
|
||||
if current_OS == 'Windows':
|
||||
if current_OS == 'Windows':
|
||||
|
||||
Import("env")
|
||||
Import("env")
|
||||
|
||||
# Use bossac.exe on Windows
|
||||
env.Replace(
|
||||
UPLOADCMD="bossac --info --unlock --write --verify --reset --erase -U false --boot $SOURCE"
|
||||
)
|
||||
# Use bossac.exe on Windows
|
||||
env.Replace(
|
||||
UPLOADCMD="bossac --info --unlock --write --verify --reset --erase -U false --boot $SOURCE"
|
||||
)
|
||||
|
@@ -9,119 +9,127 @@ from __future__ import print_function
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
|
||||
target_filename = "FIRMWARE.CUR"
|
||||
target_drive = "REARM"
|
||||
target_filename = "FIRMWARE.CUR"
|
||||
target_drive = "REARM"
|
||||
|
||||
import os,getpass,platform
|
||||
import platform
|
||||
|
||||
current_OS = platform.system()
|
||||
Import("env")
|
||||
current_OS = platform.system()
|
||||
Import("env")
|
||||
|
||||
def print_error(e):
|
||||
print('\nUnable to find destination disk (%s)\n' \
|
||||
'Please select it in platformio.ini using the upload_port keyword ' \
|
||||
'(https://docs.platformio.org/en/latest/projectconf/section_env_upload.html) ' \
|
||||
'or copy the firmware (.pio/build/%s/firmware.bin) manually to the appropriate disk\n' \
|
||||
%(e, env.get('PIOENV')))
|
||||
def print_error(e):
|
||||
print('\nUnable to find destination disk (%s)\n' \
|
||||
'Please select it in platformio.ini using the upload_port keyword ' \
|
||||
'(https://docs.platformio.org/en/latest/projectconf/section_env_upload.html) ' \
|
||||
'or copy the firmware (.pio/build/%s/firmware.bin) manually to the appropriate disk\n' \
|
||||
%(e, env.get('PIOENV')))
|
||||
|
||||
def before_upload(source, target, env):
|
||||
try:
|
||||
#
|
||||
# Find a disk for upload
|
||||
#
|
||||
upload_disk = 'Disk not found'
|
||||
target_file_found = False
|
||||
target_drive_found = False
|
||||
if current_OS == 'Windows':
|
||||
#
|
||||
# platformio.ini will accept this for a Windows upload port designation: 'upload_port = L:'
|
||||
# Windows - doesn't care about the disk's name, only cares about the drive letter
|
||||
import subprocess,string
|
||||
from ctypes import windll
|
||||
def before_upload(source, target, env):
|
||||
try:
|
||||
from pathlib import Path
|
||||
#
|
||||
# Find a disk for upload
|
||||
#
|
||||
upload_disk = 'Disk not found'
|
||||
target_file_found = False
|
||||
target_drive_found = False
|
||||
if current_OS == 'Windows':
|
||||
#
|
||||
# platformio.ini will accept this for a Windows upload port designation: 'upload_port = L:'
|
||||
# Windows - doesn't care about the disk's name, only cares about the drive letter
|
||||
import subprocess,string
|
||||
from ctypes import windll
|
||||
from pathlib import PureWindowsPath
|
||||
|
||||
# getting list of drives
|
||||
# https://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python
|
||||
drives = []
|
||||
bitmask = windll.kernel32.GetLogicalDrives()
|
||||
for letter in string.ascii_uppercase:
|
||||
if bitmask & 1:
|
||||
drives.append(letter)
|
||||
bitmask >>= 1
|
||||
# getting list of drives
|
||||
# https://stackoverflow.com/questions/827371/is-there-a-way-to-list-all-the-available-drive-letters-in-python
|
||||
drives = []
|
||||
bitmask = windll.kernel32.GetLogicalDrives()
|
||||
for letter in string.ascii_uppercase:
|
||||
if bitmask & 1:
|
||||
drives.append(letter)
|
||||
bitmask >>= 1
|
||||
|
||||
for drive in drives:
|
||||
final_drive_name = drive + ':\\'
|
||||
# print ('disc check: {}'.format(final_drive_name))
|
||||
try:
|
||||
volume_info = str(subprocess.check_output('cmd /C dir ' + final_drive_name, stderr=subprocess.STDOUT))
|
||||
except Exception as e:
|
||||
print ('error:{}'.format(e))
|
||||
continue
|
||||
else:
|
||||
if target_drive in volume_info and not target_file_found: # set upload if not found target file yet
|
||||
target_drive_found = True
|
||||
upload_disk = final_drive_name
|
||||
if target_filename in volume_info:
|
||||
if not target_file_found:
|
||||
upload_disk = final_drive_name
|
||||
target_file_found = True
|
||||
for drive in drives:
|
||||
final_drive_name = drive + ':'
|
||||
# print ('disc check: {}'.format(final_drive_name))
|
||||
try:
|
||||
volume_info = str(subprocess.check_output('cmd /C dir ' + final_drive_name, stderr=subprocess.STDOUT))
|
||||
except Exception as e:
|
||||
print ('error:{}'.format(e))
|
||||
continue
|
||||
else:
|
||||
if target_drive in volume_info and not target_file_found: # set upload if not found target file yet
|
||||
target_drive_found = True
|
||||
upload_disk = PureWindowsPath(final_drive_name)
|
||||
if target_filename in volume_info:
|
||||
if not target_file_found:
|
||||
upload_disk = PureWindowsPath(final_drive_name)
|
||||
target_file_found = True
|
||||
|
||||
elif current_OS == 'Linux':
|
||||
#
|
||||
# platformio.ini will accept this for a Linux upload port designation: 'upload_port = /media/media_name/drive'
|
||||
#
|
||||
drives = os.listdir(os.path.join(os.sep, 'media', getpass.getuser()))
|
||||
if target_drive in drives: # If target drive is found, use it.
|
||||
target_drive_found = True
|
||||
upload_disk = os.path.join(os.sep, 'media', getpass.getuser(), target_drive) + os.sep
|
||||
else:
|
||||
for drive in drives:
|
||||
try:
|
||||
files = os.listdir(os.path.join(os.sep, 'media', getpass.getuser(), drive))
|
||||
except:
|
||||
continue
|
||||
else:
|
||||
if target_filename in files:
|
||||
upload_disk = os.path.join(os.sep, 'media', getpass.getuser(), drive) + os.sep
|
||||
target_file_found = True
|
||||
break
|
||||
#
|
||||
# set upload_port to drive if found
|
||||
#
|
||||
elif current_OS == 'Linux':
|
||||
#
|
||||
# platformio.ini will accept this for a Linux upload port designation: 'upload_port = /media/media_name/drive'
|
||||
#
|
||||
import getpass
|
||||
user = getpass.getuser()
|
||||
mpath = Path('/media', user)
|
||||
drives = [ x for x in mpath.iterdir() if x.is_dir() ]
|
||||
if target_drive in drives: # If target drive is found, use it.
|
||||
target_drive_found = True
|
||||
upload_disk = mpath / target_drive
|
||||
else:
|
||||
for drive in drives:
|
||||
try:
|
||||
fpath = mpath / drive
|
||||
filenames = [ x.name for x in fpath.iterdir() if x.is_file() ]
|
||||
except:
|
||||
continue
|
||||
else:
|
||||
if target_filename in filenames:
|
||||
upload_disk = mpath / drive
|
||||
target_file_found = True
|
||||
break
|
||||
#
|
||||
# set upload_port to drive if found
|
||||
#
|
||||
|
||||
if target_file_found or target_drive_found:
|
||||
env.Replace(
|
||||
UPLOAD_FLAGS="-P$UPLOAD_PORT"
|
||||
)
|
||||
if target_file_found or target_drive_found:
|
||||
env.Replace(
|
||||
UPLOAD_FLAGS="-P$UPLOAD_PORT"
|
||||
)
|
||||
|
||||
elif current_OS == 'Darwin': # MAC
|
||||
#
|
||||
# platformio.ini will accept this for a OSX upload port designation: 'upload_port = /media/media_name/drive'
|
||||
#
|
||||
drives = os.listdir('/Volumes') # human readable names
|
||||
if target_drive in drives and not target_file_found: # set upload if not found target file yet
|
||||
target_drive_found = True
|
||||
upload_disk = '/Volumes/' + target_drive + '/'
|
||||
for drive in drives:
|
||||
try:
|
||||
filenames = os.listdir('/Volumes/' + drive + '/') # will get an error if the drive is protected
|
||||
except:
|
||||
continue
|
||||
else:
|
||||
if target_filename in filenames:
|
||||
if not target_file_found:
|
||||
upload_disk = '/Volumes/' + drive + '/'
|
||||
target_file_found = True
|
||||
elif current_OS == 'Darwin': # MAC
|
||||
#
|
||||
# platformio.ini will accept this for a OSX upload port designation: 'upload_port = /media/media_name/drive'
|
||||
#
|
||||
dpath = Path('/Volumes') # human readable names
|
||||
drives = [ x for x in dpath.iterdir() if x.is_dir() ]
|
||||
if target_drive in drives and not target_file_found: # set upload if not found target file yet
|
||||
target_drive_found = True
|
||||
upload_disk = dpath / target_drive
|
||||
for drive in drives:
|
||||
try:
|
||||
fpath = dpath / drive # will get an error if the drive is protected
|
||||
filenames = [ x.name for x in fpath.iterdir() if x.is_file() ]
|
||||
except:
|
||||
continue
|
||||
else:
|
||||
if target_filename in filenames:
|
||||
upload_disk = dpath / drive
|
||||
target_file_found = True
|
||||
break
|
||||
|
||||
#
|
||||
# Set upload_port to drive if found
|
||||
#
|
||||
if target_file_found or target_drive_found:
|
||||
env.Replace(UPLOAD_PORT=upload_disk)
|
||||
print('\nUpload disk: ', upload_disk, '\n')
|
||||
else:
|
||||
print_error('Autodetect Error')
|
||||
#
|
||||
# Set upload_port to drive if found
|
||||
#
|
||||
if target_file_found or target_drive_found:
|
||||
env.Replace(UPLOAD_PORT=str(upload_disk))
|
||||
print('\nUpload disk: ', upload_disk, '\n')
|
||||
else:
|
||||
print_error('Autodetect Error')
|
||||
|
||||
except Exception as e:
|
||||
print_error(str(e))
|
||||
except Exception as e:
|
||||
print_error(str(e))
|
||||
|
||||
env.AddPreAction("upload", before_upload)
|
||||
env.AddPreAction("upload", before_upload)
|
||||
|
@@ -108,7 +108,7 @@ void GcodeSuite::M907() {
|
||||
// Additional extruders use B,C,D.
|
||||
// TODO: Change these parameters because 'E' is used and because 'D' should be reserved for debugging. B<index>?
|
||||
#if E_STEPPERS >= 2
|
||||
for (uint8_t i = E_AXIS + 1; i < _MAX(DIGIPOT_I2C_NUM_CHANNELS, (NUM_AXES + 3)); i++)
|
||||
for (uint8_t i = E_AXIS + 1; i <= _MIN(DIGIPOT_I2C_NUM_CHANNELS - 1, E_AXIS + 3); i++) // Up to B=E1 C=E2 D=E3
|
||||
if (parser.seenval('B' + i - (E_AXIS + 1))) digipot_i2c.set_current(i, parser.value_float());
|
||||
#endif
|
||||
#endif
|
||||
|
@@ -25,7 +25,7 @@
|
||||
* Release version. Leave the Marlin version or apply a custom scheme.
|
||||
*/
|
||||
#ifndef SHORT_BUILD_VERSION
|
||||
#define SHORT_BUILD_VERSION "2.0.9.5"
|
||||
#define SHORT_BUILD_VERSION "bugfix-2.0.x"
|
||||
#endif
|
||||
|
||||
/**
|
||||
@@ -42,7 +42,7 @@
|
||||
* version was tagged.
|
||||
*/
|
||||
#ifndef STRING_DISTRIBUTION_DATE
|
||||
#define STRING_DISTRIBUTION_DATE "2022-07-29"
|
||||
#define STRING_DISTRIBUTION_DATE "2023-04-16"
|
||||
#endif
|
||||
|
||||
/**
|
||||
|
@@ -987,7 +987,7 @@ class Planner {
|
||||
FORCE_INLINE static void recalculate_max_e_jerk() {
|
||||
const float prop = junction_deviation_mm * SQRT(0.5) / (1.0f - SQRT(0.5));
|
||||
EXTRUDER_LOOP()
|
||||
max_e_jerk[E_INDEX_N(e)] = SQRT(prop * settings.max_acceleration_mm_per_s2[E_INDEX_N(e)]);
|
||||
max_e_jerk[E_INDEX_N(e)] = SQRT(prop * settings.max_acceleration_mm_per_s2[E_AXIS_N(e)]);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
@@ -423,6 +423,7 @@
|
||||
#define DOGLCD_A0 EXP1_07_PIN
|
||||
#define DOGLCD_SCK EXP2_02_PIN
|
||||
#define DOGLCD_MOSI EXP2_06_PIN
|
||||
#define FORCE_SOFT_SPI
|
||||
|
||||
#elif ENABLED(ENDER2_STOCKDISPLAY)
|
||||
|
||||
|
28
README.md
28
README.md
@@ -15,10 +15,14 @@
|
||||
Additional documentation can be found at the [Marlin Home Page](https://marlinfw.org/).
|
||||
Please test this firmware and let us know if it misbehaves in any way. Volunteers are standing by!
|
||||
|
||||
## Marlin 2.0
|
||||
## Marlin 2.0 Bugfix Branch
|
||||
|
||||
__Not for production use. Use with caution!__
|
||||
|
||||
Marlin 2.0 takes this popular RepRap firmware to the next level by adding support for much faster 32-bit and ARM-based boards while improving support for 8-bit AVR boards. Read about Marlin's decision to use a "Hardware Abstraction Layer" below.
|
||||
|
||||
This branch is for patches to the latest 2.0.x release version. Periodically this branch will form the basis for the next minor 2.0.x release.
|
||||
|
||||
Download earlier versions of Marlin on the [Releases page](https://github.com/MarlinFirmware/Marlin/releases).
|
||||
|
||||
## Example Configurations
|
||||
@@ -27,10 +31,11 @@ Before building Marlin you'll need to configure it for your specific hardware. Y
|
||||
|
||||
## Building Marlin 2.0
|
||||
|
||||
To build Marlin 2.0 you'll need [Arduino IDE 1.8.8 or newer](https://www.arduino.cc/en/main/software) or [PlatformIO](http://docs.platformio.org/en/latest/ide.html#platformio-ide). Detailed build and install instructions are posted at:
|
||||
To build Marlin 2.0 you'll need [Arduino IDE 1.8.8 or newer](https://www.arduino.cc/en/main/software) or [PlatformIO](https://docs.platformio.org/en/latest/ide.html#platformio-ide). We've posted detailed instructions on [Building Marlin with Arduino](https://marlinfw.org/docs/basics/install_arduino.html) and [Building Marlin with PlatformIO for ReArm](https://marlinfw.org/docs/basics/install_rearm.html) (which applies well to other 32-bit boards).
|
||||
|
||||
- [Installing Marlin (Arduino)](http://marlinfw.org/docs/basics/install_arduino.html)
|
||||
- [Installing Marlin (VSCode)](http://marlinfw.org/docs/basics/install_platformio_vscode.html).
|
||||
## Hardware Abstraction Layer (HAL)
|
||||
|
||||
Marlin 2.0 introduces a layer of abstraction so that all the existing high-level code can be built for 32-bit platforms while still retaining full 8-bit AVR compatibility. Retaining AVR compatibility and a single code-base is important to us, because we want to make sure that features and patches get as much testing and attention as possible, and that all platforms always benefit from the latest improvements.
|
||||
|
||||
### Supported Platforms
|
||||
|
||||
@@ -52,11 +57,18 @@ To build Marlin 2.0 you'll need [Arduino IDE 1.8.8 or newer](https://www.arduino
|
||||
[Teensy 4.1](https://www.pjrc.com/store/teensy41.html)|ARM® Cortex-M7|
|
||||
Linux Native|x86/ARM/etc.|Raspberry Pi
|
||||
|
||||
## Submitting Changes
|
||||
## Submitting Patches
|
||||
|
||||
- Submit **Bug Fixes** as Pull Requests to the ([bugfix-2.0.x](https://github.com/MarlinFirmware/Marlin/tree/bugfix-2.0.x)) branch.
|
||||
- Follow the [Coding Standards](http://marlinfw.org/docs/development/coding_standards.html) to gain points with the maintainers.
|
||||
- Please submit your questions and concerns to the [Issue Queue](https://github.com/MarlinFirmware/Marlin/issues).
|
||||
Proposed patches should be submitted as a Pull Request against the ([bugfix-2.0.x](https://github.com/MarlinFirmware/Marlin/tree/bugfix-2.0.x)) branch.
|
||||
|
||||
- This branch is for fixing bugs and integrating any new features for the duration of the Marlin 2.0.x life-cycle.
|
||||
- Follow the [Coding Standards](https://marlinfw.org/docs/development/coding_standards.html) to gain points with the maintainers.
|
||||
- Please submit Feature Requests and Bug Reports to the [Issue Queue](https://github.com/MarlinFirmware/Marlin/issues/new/choose). Support resources are also listed there.
|
||||
- Whenever you add new features, be sure to add tests to `buildroot/tests` and then run your tests locally, if possible.
|
||||
- It's optional: Running all the tests on Windows might take a long time, and they will run anyway on GitHub.
|
||||
- If you're running the tests on Linux (or on WSL with the code on a Linux volume) the speed is much faster.
|
||||
- You can use `make tests-all-local` or `make tests-single-local TEST_TARGET=...`.
|
||||
- If you prefer Docker you can use `make tests-all-local-docker` or `make tests-all-local-docker TEST_TARGET=...`.
|
||||
|
||||
## Marlin Support
|
||||
|
||||
|
@@ -5,7 +5,7 @@
|
||||
# Examples:
|
||||
# use_example_configs
|
||||
# use_example_configs Creality/CR-10/CrealityV1
|
||||
# use_example_configs release-2.0.9.5:Creality/CR-10/CrealityV1
|
||||
# use_example_configs release-2.0.9.4:Creality/CR-10/CrealityV1
|
||||
#
|
||||
# If a configpath has spaces (or quotes) escape them or enquote the path
|
||||
#
|
||||
|
@@ -4,17 +4,17 @@
|
||||
#
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
from os.path import join, isfile
|
||||
import shutil
|
||||
from os.path import join, isfile
|
||||
import shutil
|
||||
|
||||
Import("env")
|
||||
Import("env")
|
||||
|
||||
mf = env["MARLIN_FEATURES"]
|
||||
rxBuf = mf["RX_BUFFER_SIZE"] if "RX_BUFFER_SIZE" in mf else "0"
|
||||
txBuf = mf["TX_BUFFER_SIZE"] if "TX_BUFFER_SIZE" in mf else "0"
|
||||
mf = env["MARLIN_FEATURES"]
|
||||
rxBuf = mf["RX_BUFFER_SIZE"] if "RX_BUFFER_SIZE" in mf else "0"
|
||||
txBuf = mf["TX_BUFFER_SIZE"] if "TX_BUFFER_SIZE" in mf else "0"
|
||||
|
||||
serialBuf = str(max(int(rxBuf), int(txBuf), 350))
|
||||
serialBuf = str(max(int(rxBuf), int(txBuf), 350))
|
||||
|
||||
build_flags = env.get('BUILD_FLAGS')
|
||||
build_flags.append("-DSERIAL_BUFFER_SIZE=" + serialBuf)
|
||||
env.Replace(BUILD_FLAGS=build_flags)
|
||||
build_flags = env.get('BUILD_FLAGS')
|
||||
build_flags.append("-DSERIAL_BUFFER_SIZE=" + serialBuf)
|
||||
env.Replace(BUILD_FLAGS=build_flags)
|
||||
|
@@ -4,17 +4,16 @@
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
|
||||
import os
|
||||
Import("env", "projenv")
|
||||
Import("env", "projenv")
|
||||
|
||||
flash_size = 0
|
||||
vect_tab_addr = 0
|
||||
flash_size = 0
|
||||
vect_tab_addr = 0
|
||||
|
||||
for define in env['CPPDEFINES']:
|
||||
if define[0] == "VECT_TAB_ADDR":
|
||||
vect_tab_addr = define[1]
|
||||
if define[0] == "STM32_FLASH_SIZE":
|
||||
flash_size = define[1]
|
||||
for define in env['CPPDEFINES']:
|
||||
if define[0] == "VECT_TAB_ADDR":
|
||||
vect_tab_addr = define[1]
|
||||
if define[0] == "STM32_FLASH_SIZE":
|
||||
flash_size = define[1]
|
||||
|
||||
print('Use the {0:s} address as the marlin app entry point.'.format(vect_tab_addr))
|
||||
print('Use the {0:d}KB flash version of stm32f103rct6 chip.'.format(flash_size))
|
||||
print('Use the {0:s} address as the marlin app entry point.'.format(vect_tab_addr))
|
||||
print('Use the {0:d}KB flash version of stm32f103rct6 chip.'.format(flash_size))
|
||||
|
@@ -3,26 +3,25 @@
|
||||
#
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import os
|
||||
from os.path import join
|
||||
from os.path import expandvars
|
||||
Import("env")
|
||||
from os.path import join
|
||||
from os.path import expandvars
|
||||
Import("env")
|
||||
|
||||
# Custom HEX from ELF
|
||||
env.AddPostAction(
|
||||
join("$BUILD_DIR", "${PROGNAME}.elf"),
|
||||
env.VerboseAction(" ".join([
|
||||
"$OBJCOPY", "-O ihex", "$TARGET",
|
||||
"\"" + join("$BUILD_DIR", "${PROGNAME}.hex") + "\"", # Note: $BUILD_DIR is a full path
|
||||
]), "Building $TARGET"))
|
||||
# Custom HEX from ELF
|
||||
env.AddPostAction(
|
||||
join("$BUILD_DIR", "${PROGNAME}.elf"),
|
||||
env.VerboseAction(" ".join([
|
||||
"$OBJCOPY", "-O ihex", "$TARGET",
|
||||
"\"" + join("$BUILD_DIR", "${PROGNAME}.hex") + "\"", # Note: $BUILD_DIR is a full path
|
||||
]), "Building $TARGET"))
|
||||
|
||||
# In-line command with arguments
|
||||
UPLOAD_TOOL="stm32flash"
|
||||
platform = env.PioPlatform()
|
||||
if platform.get_package_dir("tool-stm32duino") != None:
|
||||
UPLOAD_TOOL=expandvars("\"" + join(platform.get_package_dir("tool-stm32duino"),"stm32flash","stm32flash") + "\"")
|
||||
# In-line command with arguments
|
||||
UPLOAD_TOOL="stm32flash"
|
||||
platform = env.PioPlatform()
|
||||
if platform.get_package_dir("tool-stm32duino") != None:
|
||||
UPLOAD_TOOL=expandvars("\"" + join(platform.get_package_dir("tool-stm32duino"),"stm32flash","stm32flash") + "\"")
|
||||
|
||||
env.Replace(
|
||||
UPLOADER=UPLOAD_TOOL,
|
||||
UPLOADCMD=expandvars(UPLOAD_TOOL + " -v -i rts,-dtr,dtr -R -b 115200 -g 0x8000000 -w \"" + join("$BUILD_DIR","${PROGNAME}.hex")+"\"" + " $UPLOAD_PORT")
|
||||
)
|
||||
env.Replace(
|
||||
UPLOADER=UPLOAD_TOOL,
|
||||
UPLOADCMD=expandvars(UPLOAD_TOOL + " -v -i rts,-dtr,dtr -R -b 115200 -g 0x8000000 -w \"" + join("$BUILD_DIR","${PROGNAME}.hex")+"\"" + " $UPLOAD_PORT")
|
||||
)
|
||||
|
@@ -3,30 +3,29 @@
|
||||
#
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import os,shutil,marlin
|
||||
from SCons.Script import DefaultEnvironment
|
||||
from platformio import util
|
||||
import shutil,marlin
|
||||
from pathlib import Path
|
||||
|
||||
env = DefaultEnvironment()
|
||||
platform = env.PioPlatform()
|
||||
board = env.BoardConfig()
|
||||
Import("env")
|
||||
platform = env.PioPlatform()
|
||||
board = env.BoardConfig()
|
||||
|
||||
FRAMEWORK_DIR = platform.get_package_dir("framework-arduinoststm32-maple")
|
||||
assert os.path.isdir(FRAMEWORK_DIR)
|
||||
FRAMEWORK_DIR = Path(platform.get_package_dir("framework-arduinoststm32-maple"))
|
||||
assert FRAMEWORK_DIR.is_dir()
|
||||
|
||||
source_root = os.path.join("buildroot", "share", "PlatformIO", "variants")
|
||||
assert os.path.isdir(source_root)
|
||||
source_root = Path("buildroot/share/PlatformIO/variants")
|
||||
assert source_root.is_dir()
|
||||
|
||||
variant = board.get("build.variant")
|
||||
variant_dir = os.path.join(FRAMEWORK_DIR, "STM32F1", "variants", variant)
|
||||
variant = board.get("build.variant")
|
||||
variant_dir = FRAMEWORK_DIR / "STM32F1/variants" / variant
|
||||
|
||||
source_dir = os.path.join(source_root, variant)
|
||||
assert os.path.isdir(source_dir)
|
||||
source_dir = source_root / variant
|
||||
assert source_dir.is_dir()
|
||||
|
||||
if os.path.isdir(variant_dir):
|
||||
shutil.rmtree(variant_dir)
|
||||
if variant_dir.is_dir():
|
||||
shutil.rmtree(variant_dir)
|
||||
|
||||
if not os.path.isdir(variant_dir):
|
||||
os.mkdir(variant_dir)
|
||||
if not variant_dir.is_dir():
|
||||
variant_dir.mkdir()
|
||||
|
||||
marlin.copytree(source_dir, variant_dir)
|
||||
marlin.copytree(source_dir, variant_dir)
|
||||
|
@@ -4,114 +4,123 @@
|
||||
#
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import os,random,struct,uuid,marlin
|
||||
# Relocate firmware from 0x08000000 to 0x08008800
|
||||
marlin.relocate_firmware("0x08008800")
|
||||
import struct,uuid,marlin
|
||||
|
||||
def calculate_crc(contents, seed):
|
||||
accumulating_xor_value = seed;
|
||||
board = marlin.env.BoardConfig()
|
||||
|
||||
for i in range(0, len(contents), 4):
|
||||
value = struct.unpack('<I', contents[ i : i + 4])[0]
|
||||
accumulating_xor_value = accumulating_xor_value ^ value
|
||||
return accumulating_xor_value
|
||||
def calculate_crc(contents, seed):
|
||||
accumulating_xor_value = seed;
|
||||
|
||||
def xor_block(r0, r1, block_number, block_size, file_key):
|
||||
# This is the loop counter
|
||||
loop_counter = 0x0
|
||||
for i in range(0, len(contents), 4):
|
||||
value = struct.unpack('<I', contents[ i : i + 4])[0]
|
||||
accumulating_xor_value = accumulating_xor_value ^ value
|
||||
return accumulating_xor_value
|
||||
|
||||
# This is the key length
|
||||
key_length = 0x18
|
||||
def xor_block(r0, r1, block_number, block_size, file_key):
|
||||
# This is the loop counter
|
||||
loop_counter = 0x0
|
||||
|
||||
# This is an initial seed
|
||||
xor_seed = 0x4BAD
|
||||
# This is the key length
|
||||
key_length = 0x18
|
||||
|
||||
# This is the block counter
|
||||
block_number = xor_seed * block_number
|
||||
# This is an initial seed
|
||||
xor_seed = 0x4BAD
|
||||
|
||||
#load the xor key from the file
|
||||
r7 = file_key
|
||||
# This is the block counter
|
||||
block_number = xor_seed * block_number
|
||||
|
||||
for loop_counter in range(0, block_size):
|
||||
# meant to make sure different bits of the key are used.
|
||||
xor_seed = int(loop_counter / key_length)
|
||||
#load the xor key from the file
|
||||
r7 = file_key
|
||||
|
||||
# IP is a scratch register / R12
|
||||
ip = loop_counter - (key_length * xor_seed)
|
||||
for loop_counter in range(0, block_size):
|
||||
# meant to make sure different bits of the key are used.
|
||||
xor_seed = int(loop_counter / key_length)
|
||||
|
||||
# xor_seed = (loop_counter * loop_counter) + block_number
|
||||
xor_seed = (loop_counter * loop_counter) + block_number
|
||||
# IP is a scratch register / R12
|
||||
ip = loop_counter - (key_length * xor_seed)
|
||||
|
||||
# shift the xor_seed left by the bits in IP.
|
||||
xor_seed = xor_seed >> ip
|
||||
# xor_seed = (loop_counter * loop_counter) + block_number
|
||||
xor_seed = (loop_counter * loop_counter) + block_number
|
||||
|
||||
# load a byte into IP
|
||||
ip = r0[loop_counter]
|
||||
# shift the xor_seed left by the bits in IP.
|
||||
xor_seed = xor_seed >> ip
|
||||
|
||||
# XOR the seed with r7
|
||||
xor_seed = xor_seed ^ r7
|
||||
# load a byte into IP
|
||||
ip = r0[loop_counter]
|
||||
|
||||
# and then with IP
|
||||
xor_seed = xor_seed ^ ip
|
||||
# XOR the seed with r7
|
||||
xor_seed = xor_seed ^ r7
|
||||
|
||||
#Now store the byte back
|
||||
r1[loop_counter] = xor_seed & 0xFF
|
||||
# and then with IP
|
||||
xor_seed = xor_seed ^ ip
|
||||
|
||||
#increment the loop_counter
|
||||
loop_counter = loop_counter + 1
|
||||
#Now store the byte back
|
||||
r1[loop_counter] = xor_seed & 0xFF
|
||||
|
||||
def encrypt_file(input, output_file, file_length):
|
||||
input_file = bytearray(input.read())
|
||||
block_size = 0x800
|
||||
key_length = 0x18
|
||||
#increment the loop_counter
|
||||
loop_counter = loop_counter + 1
|
||||
|
||||
uid_value = uuid.uuid4()
|
||||
file_key = int(uid_value.hex[0:8], 16)
|
||||
def encrypt_file(input, output_file, file_length):
|
||||
input_file = bytearray(input.read())
|
||||
block_size = 0x800
|
||||
key_length = 0x18
|
||||
|
||||
xor_crc = 0xEF3D4323;
|
||||
uid_value = uuid.uuid4()
|
||||
file_key = int(uid_value.hex[0:8], 16)
|
||||
|
||||
# the input file is exepcted to be in chunks of 0x800
|
||||
# so round the size
|
||||
while len(input_file) % block_size != 0:
|
||||
input_file.extend(b'0x0')
|
||||
xor_crc = 0xEF3D4323;
|
||||
|
||||
# write the file header
|
||||
output_file.write(struct.pack(">I", 0x443D2D3F))
|
||||
# encrypt the contents using a known file header key
|
||||
# the input file is exepcted to be in chunks of 0x800
|
||||
# so round the size
|
||||
while len(input_file) % block_size != 0:
|
||||
input_file.extend(b'0x0')
|
||||
|
||||
# write the file_key
|
||||
output_file.write(struct.pack("<I", file_key))
|
||||
# write the file header
|
||||
output_file.write(struct.pack(">I", 0x443D2D3F))
|
||||
# encrypt the contents using a known file header key
|
||||
|
||||
#TODO - how to enforce that the firmware aligns to block boundaries?
|
||||
block_count = int(len(input_file) / block_size)
|
||||
print ("Block Count is ", block_count)
|
||||
for block_number in range(0, block_count):
|
||||
block_offset = (block_number * block_size)
|
||||
block_end = block_offset + block_size
|
||||
block_array = bytearray(input_file[block_offset: block_end])
|
||||
xor_block(block_array, block_array, block_number, block_size, file_key)
|
||||
for n in range (0, block_size):
|
||||
input_file[block_offset + n] = block_array[n]
|
||||
# write the file_key
|
||||
output_file.write(struct.pack("<I", file_key))
|
||||
|
||||
# update the expected CRC value.
|
||||
xor_crc = calculate_crc(block_array, xor_crc)
|
||||
#TODO - how to enforce that the firmware aligns to block boundaries?
|
||||
block_count = int(len(input_file) / block_size)
|
||||
print ("Block Count is ", block_count)
|
||||
for block_number in range(0, block_count):
|
||||
block_offset = (block_number * block_size)
|
||||
block_end = block_offset + block_size
|
||||
block_array = bytearray(input_file[block_offset: block_end])
|
||||
xor_block(block_array, block_array, block_number, block_size, file_key)
|
||||
for n in range (0, block_size):
|
||||
input_file[block_offset + n] = block_array[n]
|
||||
|
||||
# write CRC
|
||||
output_file.write(struct.pack("<I", xor_crc))
|
||||
# update the expected CRC value.
|
||||
xor_crc = calculate_crc(block_array, xor_crc)
|
||||
|
||||
# finally, append the encrypted results.
|
||||
output_file.write(input_file)
|
||||
return
|
||||
# write CRC
|
||||
output_file.write(struct.pack("<I", xor_crc))
|
||||
|
||||
# Encrypt ${PROGNAME}.bin and save it as 'update.cbd'
|
||||
def encrypt(source, target, env):
|
||||
firmware = open(target[0].path, "rb")
|
||||
update = open(target[0].dir.path + '/update.cbd', "wb")
|
||||
length = os.path.getsize(target[0].path)
|
||||
# finally, append the encrypted results.
|
||||
output_file.write(input_file)
|
||||
return
|
||||
|
||||
encrypt_file(firmware, update, length)
|
||||
# Encrypt ${PROGNAME}.bin and save it as 'update.cbd'
|
||||
def encrypt(source, target, env):
|
||||
from pathlib import Path
|
||||
|
||||
firmware.close()
|
||||
update.close()
|
||||
fwpath = Path(target[0].path)
|
||||
fwsize = fwpath.stat().st_size
|
||||
|
||||
marlin.add_post_action(encrypt);
|
||||
enname = board.get("build.crypt_chitu")
|
||||
enpath = Path(target[0].dir.path)
|
||||
|
||||
fwfile = fwpath.open("rb")
|
||||
enfile = (enpath / enname).open("wb")
|
||||
|
||||
print(f"Encrypting {fwpath} to {enname}")
|
||||
encrypt_file(fwfile, enfile, fwsize)
|
||||
fwfile.close()
|
||||
enfile.close()
|
||||
fwpath.unlink()
|
||||
|
||||
marlin.relocate_firmware("0x08008800")
|
||||
marlin.add_post_action(encrypt);
|
||||
|
@@ -2,38 +2,45 @@
|
||||
# common-cxxflags.py
|
||||
# Convenience script to apply customizations to CPP flags
|
||||
#
|
||||
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
Import("env")
|
||||
Import("env")
|
||||
|
||||
cxxflags = [
|
||||
#"-Wno-incompatible-pointer-types",
|
||||
#"-Wno-unused-const-variable",
|
||||
#"-Wno-maybe-uninitialized",
|
||||
#"-Wno-sign-compare"
|
||||
]
|
||||
if "teensy" not in env['PIOENV']:
|
||||
cxxflags += ["-Wno-register"]
|
||||
env.Append(CXXFLAGS=cxxflags)
|
||||
cxxflags = [
|
||||
# "-Wno-incompatible-pointer-types",
|
||||
# "-Wno-unused-const-variable",
|
||||
# "-Wno-maybe-uninitialized",
|
||||
# "-Wno-sign-compare"
|
||||
]
|
||||
if "teensy" not in env["PIOENV"]:
|
||||
cxxflags += ["-Wno-register"]
|
||||
env.Append(CXXFLAGS=cxxflags)
|
||||
|
||||
#
|
||||
# Add CPU frequency as a compile time constant instead of a runtime variable
|
||||
#
|
||||
def add_cpu_freq():
|
||||
if 'BOARD_F_CPU' in env:
|
||||
env['BUILD_FLAGS'].append('-DBOARD_F_CPU=' + env['BOARD_F_CPU'])
|
||||
#
|
||||
# Add CPU frequency as a compile time constant instead of a runtime variable
|
||||
#
|
||||
def add_cpu_freq():
|
||||
if "BOARD_F_CPU" in env:
|
||||
env["BUILD_FLAGS"].append("-DBOARD_F_CPU=" + env["BOARD_F_CPU"])
|
||||
|
||||
# Useful for JTAG debugging
|
||||
#
|
||||
# It will separate release and debug build folders.
|
||||
# It useful to keep two live versions: a debug version for debugging and another for
|
||||
# release, for flashing when upload is not done automatically by jlink/stlink.
|
||||
# Without this, PIO needs to recompile everything twice for any small change.
|
||||
if env.GetBuildType() == "debug" and env.get('UPLOAD_PROTOCOL') not in ['jlink', 'stlink', 'custom']:
|
||||
env['BUILD_DIR'] = '$PROJECT_BUILD_DIR/$PIOENV/debug'
|
||||
# Useful for JTAG debugging
|
||||
#
|
||||
# It will separate release and debug build folders.
|
||||
# It useful to keep two live versions: a debug version for debugging and another for
|
||||
# release, for flashing when upload is not done automatically by jlink/stlink.
|
||||
# Without this, PIO needs to recompile everything twice for any small change.
|
||||
if env.GetBuildType() == "debug" and env.get("UPLOAD_PROTOCOL") not in ["jlink", "stlink", "custom"]:
|
||||
env["BUILD_DIR"] = "$PROJECT_BUILD_DIR/$PIOENV/debug"
|
||||
|
||||
# On some platform, F_CPU is a runtime variable. Since it's used to convert from ns
|
||||
# to CPU cycles, this adds overhead preventing small delay (in the order of less than
|
||||
# 30 cycles) to be generated correctly. By using a compile time constant instead
|
||||
# the compiler will perform the computation and this overhead will be avoided
|
||||
add_cpu_freq()
|
||||
def on_program_ready(source, target, env):
|
||||
import shutil
|
||||
shutil.copy(target[0].get_abspath(), env.subst("$PROJECT_BUILD_DIR/$PIOENV"))
|
||||
|
||||
env.AddPostAction("$PROGPATH", on_program_ready)
|
||||
|
||||
# On some platform, F_CPU is a runtime variable. Since it's used to convert from ns
|
||||
# to CPU cycles, this adds overhead preventing small delay (in the order of less than
|
||||
# 30 cycles) to be generated correctly. By using a compile time constant instead
|
||||
# the compiler will perform the computation and this overhead will be avoided
|
||||
add_cpu_freq()
|
||||
|
@@ -4,13 +4,13 @@
|
||||
#
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
Import("env", "projenv")
|
||||
Import("env", "projenv")
|
||||
|
||||
def apply_board_build_flags():
|
||||
if not 'BOARD_CUSTOM_BUILD_FLAGS' in env['MARLIN_FEATURES']:
|
||||
return
|
||||
projenv.Append(CCFLAGS=env['MARLIN_FEATURES']['BOARD_CUSTOM_BUILD_FLAGS'].split())
|
||||
def apply_board_build_flags():
|
||||
if not 'BOARD_CUSTOM_BUILD_FLAGS' in env['MARLIN_FEATURES']:
|
||||
return
|
||||
projenv.Append(CCFLAGS=env['MARLIN_FEATURES']['BOARD_CUSTOM_BUILD_FLAGS'].split())
|
||||
|
||||
# We need to add the board build flags in a post script
|
||||
# so the platform build script doesn't overwrite the custom CCFLAGS
|
||||
apply_board_build_flags()
|
||||
# We need to add the board build flags in a post script
|
||||
# so the platform build script doesn't overwrite the custom CCFLAGS
|
||||
apply_board_build_flags()
|
||||
|
@@ -5,247 +5,248 @@
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
|
||||
import subprocess,os,re
|
||||
Import("env")
|
||||
import subprocess,os,re
|
||||
Import("env")
|
||||
|
||||
from platformio.package.meta import PackageSpec
|
||||
from platformio.project.config import ProjectConfig
|
||||
from platformio.package.meta import PackageSpec
|
||||
from platformio.project.config import ProjectConfig
|
||||
|
||||
verbose = 0
|
||||
FEATURE_CONFIG = {}
|
||||
verbose = 0
|
||||
FEATURE_CONFIG = {}
|
||||
|
||||
def validate_pio():
|
||||
PIO_VERSION_MIN = (6, 0, 1)
|
||||
try:
|
||||
from platformio import VERSION as PIO_VERSION
|
||||
weights = (1000, 100, 1)
|
||||
version_min = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION_MIN)])
|
||||
version_cur = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION)])
|
||||
if version_cur < version_min:
|
||||
print()
|
||||
print("**************************************************")
|
||||
print("****** An update to PlatformIO is ******")
|
||||
print("****** required to build Marlin Firmware. ******")
|
||||
print("****** ******")
|
||||
print("****** Minimum version: ", PIO_VERSION_MIN, " ******")
|
||||
print("****** Current Version: ", PIO_VERSION, " ******")
|
||||
print("****** ******")
|
||||
print("****** Update PlatformIO and try again. ******")
|
||||
print("**************************************************")
|
||||
print()
|
||||
exit(1)
|
||||
except SystemExit:
|
||||
exit(1)
|
||||
except:
|
||||
print("Can't detect PlatformIO Version")
|
||||
def validate_pio():
|
||||
PIO_VERSION_MIN = (6, 0, 1)
|
||||
try:
|
||||
from platformio import VERSION as PIO_VERSION
|
||||
weights = (1000, 100, 1)
|
||||
version_min = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION_MIN)])
|
||||
version_cur = sum([x[0] * float(re.sub(r'[^0-9]', '.', str(x[1]))) for x in zip(weights, PIO_VERSION)])
|
||||
if version_cur < version_min:
|
||||
print()
|
||||
print("**************************************************")
|
||||
print("****** An update to PlatformIO is ******")
|
||||
print("****** required to build Marlin Firmware. ******")
|
||||
print("****** ******")
|
||||
print("****** Minimum version: ", PIO_VERSION_MIN, " ******")
|
||||
print("****** Current Version: ", PIO_VERSION, " ******")
|
||||
print("****** ******")
|
||||
print("****** Update PlatformIO and try again. ******")
|
||||
print("**************************************************")
|
||||
print()
|
||||
exit(1)
|
||||
except SystemExit:
|
||||
exit(1)
|
||||
except:
|
||||
print("Can't detect PlatformIO Version")
|
||||
|
||||
def blab(str,level=1):
|
||||
if verbose >= level:
|
||||
print("[deps] %s" % str)
|
||||
def blab(str,level=1):
|
||||
if verbose >= level:
|
||||
print("[deps] %s" % str)
|
||||
|
||||
def add_to_feat_cnf(feature, flines):
|
||||
def add_to_feat_cnf(feature, flines):
|
||||
|
||||
try:
|
||||
feat = FEATURE_CONFIG[feature]
|
||||
except:
|
||||
FEATURE_CONFIG[feature] = {}
|
||||
try:
|
||||
feat = FEATURE_CONFIG[feature]
|
||||
except:
|
||||
FEATURE_CONFIG[feature] = {}
|
||||
|
||||
# Get a reference to the FEATURE_CONFIG under construction
|
||||
feat = FEATURE_CONFIG[feature]
|
||||
# Get a reference to the FEATURE_CONFIG under construction
|
||||
feat = FEATURE_CONFIG[feature]
|
||||
|
||||
# Split up passed lines on commas or newlines and iterate
|
||||
# Add common options to the features config under construction
|
||||
# For lib_deps replace a previous instance of the same library
|
||||
atoms = re.sub(r',\s*', '\n', flines).strip().split('\n')
|
||||
for line in atoms:
|
||||
parts = line.split('=')
|
||||
name = parts.pop(0)
|
||||
if name in ['build_flags', 'extra_scripts', 'src_filter', 'lib_ignore']:
|
||||
feat[name] = '='.join(parts)
|
||||
blab("[%s] %s=%s" % (feature, name, feat[name]), 3)
|
||||
else:
|
||||
for dep in re.split(r',\s*', line):
|
||||
lib_name = re.sub(r'@([~^]|[<>]=?)?[\d.]+', '', dep.strip()).split('=').pop(0)
|
||||
lib_re = re.compile('(?!^' + lib_name + '\\b)')
|
||||
feat['lib_deps'] = list(filter(lib_re.match, feat['lib_deps'])) + [dep]
|
||||
blab("[%s] lib_deps = %s" % (feature, dep), 3)
|
||||
# Split up passed lines on commas or newlines and iterate
|
||||
# Add common options to the features config under construction
|
||||
# For lib_deps replace a previous instance of the same library
|
||||
atoms = re.sub(r',\s*', '\n', flines).strip().split('\n')
|
||||
for line in atoms:
|
||||
parts = line.split('=')
|
||||
name = parts.pop(0)
|
||||
if name in ['build_flags', 'extra_scripts', 'src_filter', 'lib_ignore']:
|
||||
feat[name] = '='.join(parts)
|
||||
blab("[%s] %s=%s" % (feature, name, feat[name]), 3)
|
||||
else:
|
||||
for dep in re.split(r',\s*', line):
|
||||
lib_name = re.sub(r'@([~^]|[<>]=?)?[\d.]+', '', dep.strip()).split('=').pop(0)
|
||||
lib_re = re.compile('(?!^' + lib_name + '\\b)')
|
||||
if not 'lib_deps' in feat: feat['lib_deps'] = {}
|
||||
feat['lib_deps'] = list(filter(lib_re.match, feat['lib_deps'])) + [dep]
|
||||
blab("[%s] lib_deps = %s" % (feature, dep), 3)
|
||||
|
||||
def load_features():
|
||||
blab("========== Gather [features] entries...")
|
||||
for key in ProjectConfig().items('features'):
|
||||
feature = key[0].upper()
|
||||
if not feature in FEATURE_CONFIG:
|
||||
FEATURE_CONFIG[feature] = { 'lib_deps': [] }
|
||||
add_to_feat_cnf(feature, key[1])
|
||||
def load_features():
|
||||
blab("========== Gather [features] entries...")
|
||||
for key in ProjectConfig().items('features'):
|
||||
feature = key[0].upper()
|
||||
if not feature in FEATURE_CONFIG:
|
||||
FEATURE_CONFIG[feature] = { 'lib_deps': [] }
|
||||
add_to_feat_cnf(feature, key[1])
|
||||
|
||||
# Add options matching custom_marlin.MY_OPTION to the pile
|
||||
blab("========== Gather custom_marlin entries...")
|
||||
for n in env.GetProjectOptions():
|
||||
key = n[0]
|
||||
mat = re.match(r'custom_marlin\.(.+)', key)
|
||||
if mat:
|
||||
try:
|
||||
val = env.GetProjectOption(key)
|
||||
except:
|
||||
val = None
|
||||
if val:
|
||||
opt = mat[1].upper()
|
||||
blab("%s.custom_marlin.%s = '%s'" % ( env['PIOENV'], opt, val ))
|
||||
add_to_feat_cnf(opt, val)
|
||||
# Add options matching custom_marlin.MY_OPTION to the pile
|
||||
blab("========== Gather custom_marlin entries...")
|
||||
for n in env.GetProjectOptions():
|
||||
key = n[0]
|
||||
mat = re.match(r'custom_marlin\.(.+)', key)
|
||||
if mat:
|
||||
try:
|
||||
val = env.GetProjectOption(key)
|
||||
except:
|
||||
val = None
|
||||
if val:
|
||||
opt = mat[1].upper()
|
||||
blab("%s.custom_marlin.%s = '%s'" % ( env['PIOENV'], opt, val ))
|
||||
add_to_feat_cnf(opt, val)
|
||||
|
||||
def get_all_known_libs():
|
||||
known_libs = []
|
||||
for feature in FEATURE_CONFIG:
|
||||
feat = FEATURE_CONFIG[feature]
|
||||
if not 'lib_deps' in feat:
|
||||
continue
|
||||
for dep in feat['lib_deps']:
|
||||
known_libs.append(PackageSpec(dep).name)
|
||||
return known_libs
|
||||
def get_all_known_libs():
|
||||
known_libs = []
|
||||
for feature in FEATURE_CONFIG:
|
||||
feat = FEATURE_CONFIG[feature]
|
||||
if not 'lib_deps' in feat:
|
||||
continue
|
||||
for dep in feat['lib_deps']:
|
||||
known_libs.append(PackageSpec(dep).name)
|
||||
return known_libs
|
||||
|
||||
def get_all_env_libs():
|
||||
env_libs = []
|
||||
lib_deps = env.GetProjectOption('lib_deps')
|
||||
for dep in lib_deps:
|
||||
env_libs.append(PackageSpec(dep).name)
|
||||
return env_libs
|
||||
def get_all_env_libs():
|
||||
env_libs = []
|
||||
lib_deps = env.GetProjectOption('lib_deps')
|
||||
for dep in lib_deps:
|
||||
env_libs.append(PackageSpec(dep).name)
|
||||
return env_libs
|
||||
|
||||
def set_env_field(field, value):
|
||||
proj = env.GetProjectConfig()
|
||||
proj.set("env:" + env['PIOENV'], field, value)
|
||||
def set_env_field(field, value):
|
||||
proj = env.GetProjectConfig()
|
||||
proj.set("env:" + env['PIOENV'], field, value)
|
||||
|
||||
# All unused libs should be ignored so that if a library
|
||||
# exists in .pio/lib_deps it will not break compilation.
|
||||
def force_ignore_unused_libs():
|
||||
env_libs = get_all_env_libs()
|
||||
known_libs = get_all_known_libs()
|
||||
diff = (list(set(known_libs) - set(env_libs)))
|
||||
lib_ignore = env.GetProjectOption('lib_ignore') + diff
|
||||
blab("Ignore libraries: %s" % lib_ignore)
|
||||
set_env_field('lib_ignore', lib_ignore)
|
||||
# All unused libs should be ignored so that if a library
|
||||
# exists in .pio/lib_deps it will not break compilation.
|
||||
def force_ignore_unused_libs():
|
||||
env_libs = get_all_env_libs()
|
||||
known_libs = get_all_known_libs()
|
||||
diff = (list(set(known_libs) - set(env_libs)))
|
||||
lib_ignore = env.GetProjectOption('lib_ignore') + diff
|
||||
blab("Ignore libraries: %s" % lib_ignore)
|
||||
set_env_field('lib_ignore', lib_ignore)
|
||||
|
||||
def apply_features_config():
|
||||
load_features()
|
||||
blab("========== Apply enabled features...")
|
||||
for feature in FEATURE_CONFIG:
|
||||
if not env.MarlinHas(feature):
|
||||
continue
|
||||
def apply_features_config():
|
||||
load_features()
|
||||
blab("========== Apply enabled features...")
|
||||
for feature in FEATURE_CONFIG:
|
||||
if not env.MarlinHas(feature):
|
||||
continue
|
||||
|
||||
feat = FEATURE_CONFIG[feature]
|
||||
feat = FEATURE_CONFIG[feature]
|
||||
|
||||
if 'lib_deps' in feat and len(feat['lib_deps']):
|
||||
blab("========== Adding lib_deps for %s... " % feature, 2)
|
||||
if 'lib_deps' in feat and len(feat['lib_deps']):
|
||||
blab("========== Adding lib_deps for %s... " % feature, 2)
|
||||
|
||||
# feat to add
|
||||
deps_to_add = {}
|
||||
for dep in feat['lib_deps']:
|
||||
deps_to_add[PackageSpec(dep).name] = dep
|
||||
blab("==================== %s... " % dep, 2)
|
||||
# feat to add
|
||||
deps_to_add = {}
|
||||
for dep in feat['lib_deps']:
|
||||
deps_to_add[PackageSpec(dep).name] = dep
|
||||
blab("==================== %s... " % dep, 2)
|
||||
|
||||
# Does the env already have the dependency?
|
||||
deps = env.GetProjectOption('lib_deps')
|
||||
for dep in deps:
|
||||
name = PackageSpec(dep).name
|
||||
if name in deps_to_add:
|
||||
del deps_to_add[name]
|
||||
# Does the env already have the dependency?
|
||||
deps = env.GetProjectOption('lib_deps')
|
||||
for dep in deps:
|
||||
name = PackageSpec(dep).name
|
||||
if name in deps_to_add:
|
||||
del deps_to_add[name]
|
||||
|
||||
# Are there any libraries that should be ignored?
|
||||
lib_ignore = env.GetProjectOption('lib_ignore')
|
||||
for dep in deps:
|
||||
name = PackageSpec(dep).name
|
||||
if name in deps_to_add:
|
||||
del deps_to_add[name]
|
||||
# Are there any libraries that should be ignored?
|
||||
lib_ignore = env.GetProjectOption('lib_ignore')
|
||||
for dep in deps:
|
||||
name = PackageSpec(dep).name
|
||||
if name in deps_to_add:
|
||||
del deps_to_add[name]
|
||||
|
||||
# Is there anything left?
|
||||
if len(deps_to_add) > 0:
|
||||
# Only add the missing dependencies
|
||||
set_env_field('lib_deps', deps + list(deps_to_add.values()))
|
||||
# Is there anything left?
|
||||
if len(deps_to_add) > 0:
|
||||
# Only add the missing dependencies
|
||||
set_env_field('lib_deps', deps + list(deps_to_add.values()))
|
||||
|
||||
if 'build_flags' in feat:
|
||||
f = feat['build_flags']
|
||||
blab("========== Adding build_flags for %s: %s" % (feature, f), 2)
|
||||
new_flags = env.GetProjectOption('build_flags') + [ f ]
|
||||
env.Replace(BUILD_FLAGS=new_flags)
|
||||
if 'build_flags' in feat:
|
||||
f = feat['build_flags']
|
||||
blab("========== Adding build_flags for %s: %s" % (feature, f), 2)
|
||||
new_flags = env.GetProjectOption('build_flags') + [ f ]
|
||||
env.Replace(BUILD_FLAGS=new_flags)
|
||||
|
||||
if 'extra_scripts' in feat:
|
||||
blab("Running extra_scripts for %s... " % feature, 2)
|
||||
env.SConscript(feat['extra_scripts'], exports="env")
|
||||
if 'extra_scripts' in feat:
|
||||
blab("Running extra_scripts for %s... " % feature, 2)
|
||||
env.SConscript(feat['extra_scripts'], exports="env")
|
||||
|
||||
if 'src_filter' in feat:
|
||||
blab("========== Adding build_src_filter for %s... " % feature, 2)
|
||||
src_filter = ' '.join(env.GetProjectOption('src_filter'))
|
||||
# first we need to remove the references to the same folder
|
||||
my_srcs = re.findall(r'[+-](<.*?>)', feat['src_filter'])
|
||||
cur_srcs = re.findall(r'[+-](<.*?>)', src_filter)
|
||||
for d in my_srcs:
|
||||
if d in cur_srcs:
|
||||
src_filter = re.sub(r'[+-]' + d, '', src_filter)
|
||||
if 'src_filter' in feat:
|
||||
blab("========== Adding build_src_filter for %s... " % feature, 2)
|
||||
src_filter = ' '.join(env.GetProjectOption('src_filter'))
|
||||
# first we need to remove the references to the same folder
|
||||
my_srcs = re.findall(r'[+-](<.*?>)', feat['src_filter'])
|
||||
cur_srcs = re.findall(r'[+-](<.*?>)', src_filter)
|
||||
for d in my_srcs:
|
||||
if d in cur_srcs:
|
||||
src_filter = re.sub(r'[+-]' + d, '', src_filter)
|
||||
|
||||
src_filter = feat['src_filter'] + ' ' + src_filter
|
||||
set_env_field('build_src_filter', [src_filter])
|
||||
env.Replace(SRC_FILTER=src_filter)
|
||||
src_filter = feat['src_filter'] + ' ' + src_filter
|
||||
set_env_field('build_src_filter', [src_filter])
|
||||
env.Replace(SRC_FILTER=src_filter)
|
||||
|
||||
if 'lib_ignore' in feat:
|
||||
blab("========== Adding lib_ignore for %s... " % feature, 2)
|
||||
lib_ignore = env.GetProjectOption('lib_ignore') + [feat['lib_ignore']]
|
||||
set_env_field('lib_ignore', lib_ignore)
|
||||
if 'lib_ignore' in feat:
|
||||
blab("========== Adding lib_ignore for %s... " % feature, 2)
|
||||
lib_ignore = env.GetProjectOption('lib_ignore') + [feat['lib_ignore']]
|
||||
set_env_field('lib_ignore', lib_ignore)
|
||||
|
||||
#
|
||||
# Use the compiler to get a list of all enabled features
|
||||
#
|
||||
def load_marlin_features():
|
||||
if 'MARLIN_FEATURES' in env:
|
||||
return
|
||||
#
|
||||
# Use the compiler to get a list of all enabled features
|
||||
#
|
||||
def load_marlin_features():
|
||||
if 'MARLIN_FEATURES' in env:
|
||||
return
|
||||
|
||||
# Process defines
|
||||
from preprocessor import run_preprocessor
|
||||
define_list = run_preprocessor(env)
|
||||
marlin_features = {}
|
||||
for define in define_list:
|
||||
feature = define[8:].strip().decode().split(' ')
|
||||
feature, definition = feature[0], ' '.join(feature[1:])
|
||||
marlin_features[feature] = definition
|
||||
env['MARLIN_FEATURES'] = marlin_features
|
||||
# Process defines
|
||||
from preprocessor import run_preprocessor
|
||||
define_list = run_preprocessor(env)
|
||||
marlin_features = {}
|
||||
for define in define_list:
|
||||
feature = define[8:].strip().decode().split(' ')
|
||||
feature, definition = feature[0], ' '.join(feature[1:])
|
||||
marlin_features[feature] = definition
|
||||
env['MARLIN_FEATURES'] = marlin_features
|
||||
|
||||
#
|
||||
# Return True if a matching feature is enabled
|
||||
#
|
||||
def MarlinHas(env, feature):
|
||||
load_marlin_features()
|
||||
r = re.compile('^' + feature + '$')
|
||||
found = list(filter(r.match, env['MARLIN_FEATURES']))
|
||||
#
|
||||
# Return True if a matching feature is enabled
|
||||
#
|
||||
def MarlinHas(env, feature):
|
||||
load_marlin_features()
|
||||
r = re.compile('^' + feature + '$')
|
||||
found = list(filter(r.match, env['MARLIN_FEATURES']))
|
||||
|
||||
# Defines could still be 'false' or '0', so check
|
||||
some_on = False
|
||||
if len(found):
|
||||
for f in found:
|
||||
val = env['MARLIN_FEATURES'][f]
|
||||
if val in [ '', '1', 'true' ]:
|
||||
some_on = True
|
||||
elif val in env['MARLIN_FEATURES']:
|
||||
some_on = env.MarlinHas(val)
|
||||
# Defines could still be 'false' or '0', so check
|
||||
some_on = False
|
||||
if len(found):
|
||||
for f in found:
|
||||
val = env['MARLIN_FEATURES'][f]
|
||||
if val in [ '', '1', 'true' ]:
|
||||
some_on = True
|
||||
elif val in env['MARLIN_FEATURES']:
|
||||
some_on = env.MarlinHas(val)
|
||||
|
||||
return some_on
|
||||
return some_on
|
||||
|
||||
validate_pio()
|
||||
validate_pio()
|
||||
|
||||
try:
|
||||
verbose = int(env.GetProjectOption('custom_verbose'))
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
verbose = int(env.GetProjectOption('custom_verbose'))
|
||||
except:
|
||||
pass
|
||||
|
||||
#
|
||||
# Add a method for other PIO scripts to query enabled features
|
||||
#
|
||||
env.AddMethod(MarlinHas)
|
||||
#
|
||||
# Add a method for other PIO scripts to query enabled features
|
||||
#
|
||||
env.AddMethod(MarlinHas)
|
||||
|
||||
#
|
||||
# Add dependencies for enabled Marlin features
|
||||
#
|
||||
apply_features_config()
|
||||
force_ignore_unused_libs()
|
||||
#
|
||||
# Add dependencies for enabled Marlin features
|
||||
#
|
||||
apply_features_config()
|
||||
force_ignore_unused_libs()
|
||||
|
||||
#print(env.Dump())
|
||||
#print(env.Dump())
|
||||
|
||||
from signature import compute_build_signature
|
||||
compute_build_signature(env)
|
||||
from signature import compute_build_signature
|
||||
compute_build_signature(env)
|
||||
|
240
buildroot/share/PlatformIO/scripts/configuration.py
Normal file
240
buildroot/share/PlatformIO/scripts/configuration.py
Normal file
@@ -0,0 +1,240 @@
|
||||
#
|
||||
# configuration.py
|
||||
# Apply options from config.ini to the existing Configuration headers
|
||||
#
|
||||
import re, shutil, configparser
|
||||
from pathlib import Path
|
||||
|
||||
verbose = 0
|
||||
def blab(str,level=1):
|
||||
if verbose >= level: print(f"[config] {str}")
|
||||
|
||||
def config_path(cpath):
|
||||
return Path("Marlin", cpath, encoding='utf-8')
|
||||
|
||||
# Apply a single name = on/off ; name = value ; etc.
|
||||
# TODO: Limit to the given (optional) configuration
|
||||
def apply_opt(name, val, conf=None):
|
||||
if name == "lcd": name, val = val, "on"
|
||||
|
||||
# Create a regex to match the option and capture parts of the line
|
||||
regex = re.compile(rf'^(\s*)(//\s*)?(#define\s+)({name}\b)(\s*)(.*?)(\s*)(//.*)?$', re.IGNORECASE)
|
||||
|
||||
# Find and enable and/or update all matches
|
||||
for file in ("Configuration.h", "Configuration_adv.h"):
|
||||
fullpath = config_path(file)
|
||||
lines = fullpath.read_text(encoding='utf-8').split('\n')
|
||||
found = False
|
||||
for i in range(len(lines)):
|
||||
line = lines[i]
|
||||
match = regex.match(line)
|
||||
if match and match[4].upper() == name.upper():
|
||||
found = True
|
||||
# For boolean options un/comment the define
|
||||
if val in ("on", "", None):
|
||||
newline = re.sub(r'^(\s*)//+\s*(#define)(\s{1,3})?(\s*)', r'\1\2 \4', line)
|
||||
elif val == "off":
|
||||
newline = re.sub(r'^(\s*)(#define)(\s{1,3})?(\s*)', r'\1//\2 \4', line)
|
||||
else:
|
||||
# For options with values, enable and set the value
|
||||
newline = match[1] + match[3] + match[4] + match[5] + val
|
||||
if match[8]:
|
||||
sp = match[7] if match[7] else ' '
|
||||
newline += sp + match[8]
|
||||
lines[i] = newline
|
||||
blab(f"Set {name} to {val}")
|
||||
|
||||
# If the option was found, write the modified lines
|
||||
if found:
|
||||
fullpath.write_text('\n'.join(lines), encoding='utf-8')
|
||||
break
|
||||
|
||||
# If the option didn't appear in either config file, add it
|
||||
if not found:
|
||||
# OFF options are added as disabled items so they appear
|
||||
# in config dumps. Useful for custom settings.
|
||||
prefix = ""
|
||||
if val == "off":
|
||||
prefix, val = "//", "" # Item doesn't appear in config dump
|
||||
#val = "false" # Item appears in config dump
|
||||
|
||||
# Uppercase the option unless already mixed/uppercase
|
||||
added = name.upper() if name.islower() else name
|
||||
|
||||
# Add the provided value after the name
|
||||
if val != "on" and val != "" and val is not None:
|
||||
added += " " + val
|
||||
|
||||
# Prepend the new option after the first set of #define lines
|
||||
fullpath = config_path("Configuration.h")
|
||||
with fullpath.open(encoding='utf-8') as f:
|
||||
lines = f.readlines()
|
||||
linenum = 0
|
||||
gotdef = False
|
||||
for line in lines:
|
||||
isdef = line.startswith("#define")
|
||||
if not gotdef:
|
||||
gotdef = isdef
|
||||
elif not isdef:
|
||||
break
|
||||
linenum += 1
|
||||
lines.insert(linenum, f"{prefix}#define {added:30} // Added by config.ini\n")
|
||||
fullpath.write_text(''.join(lines), encoding='utf-8')
|
||||
|
||||
# Fetch configuration files from GitHub given the path.
|
||||
# Return True if any files were fetched.
|
||||
def fetch_example(url):
|
||||
if url.endswith("/"): url = url[:-1]
|
||||
if not url.startswith('http'):
|
||||
brch = "bugfix-2.1.x"
|
||||
if '@' in url: url, brch = map(str.strip, url.split('@'))
|
||||
if url == 'examples/default': url = 'default'
|
||||
url = f"https://raw.githubusercontent.com/MarlinFirmware/Configurations/{brch}/config/{url}"
|
||||
url = url.replace("%", "%25").replace(" ", "%20")
|
||||
|
||||
# Find a suitable fetch command
|
||||
if shutil.which("curl") is not None:
|
||||
fetch = "curl -L -s -S -f -o"
|
||||
elif shutil.which("wget") is not None:
|
||||
fetch = "wget -q -O"
|
||||
else:
|
||||
blab("Couldn't find curl or wget", -1)
|
||||
return False
|
||||
|
||||
import os
|
||||
|
||||
# Reset configurations to default
|
||||
os.system("git checkout HEAD Marlin/*.h")
|
||||
|
||||
# Try to fetch the remote files
|
||||
gotfile = False
|
||||
for fn in ("Configuration.h", "Configuration_adv.h", "_Bootscreen.h", "_Statusscreen.h"):
|
||||
if os.system(f"{fetch} wgot {url}/{fn} >/dev/null 2>&1") == 0:
|
||||
shutil.move('wgot', config_path(fn))
|
||||
gotfile = True
|
||||
|
||||
if Path('wgot').exists(): shutil.rmtree('wgot')
|
||||
|
||||
return gotfile
|
||||
|
||||
def section_items(cp, sectkey):
|
||||
return cp.items(sectkey) if sectkey in cp.sections() else []
|
||||
|
||||
# Apply all items from a config section
|
||||
def apply_ini_by_name(cp, sect):
|
||||
iniok = True
|
||||
if sect in ('config:base', 'config:root'):
|
||||
iniok = False
|
||||
items = section_items(cp, 'config:base') + section_items(cp, 'config:root')
|
||||
else:
|
||||
items = section_items(cp, sect)
|
||||
|
||||
for item in items:
|
||||
if iniok or not item[0].startswith('ini_'):
|
||||
apply_opt(item[0], item[1])
|
||||
|
||||
# Apply all config sections from a parsed file
|
||||
def apply_all_sections(cp):
|
||||
for sect in cp.sections():
|
||||
if sect.startswith('config:'):
|
||||
apply_ini_by_name(cp, sect)
|
||||
|
||||
# Apply certain config sections from a parsed file
|
||||
def apply_sections(cp, ckey='all'):
|
||||
blab(f"Apply section key: {ckey}")
|
||||
if ckey == 'all':
|
||||
apply_all_sections(cp)
|
||||
else:
|
||||
# Apply the base/root config.ini settings after external files are done
|
||||
if ckey in ('base', 'root'):
|
||||
apply_ini_by_name(cp, 'config:base')
|
||||
|
||||
# Apply historically 'Configuration.h' settings everywhere
|
||||
if ckey == 'basic':
|
||||
apply_ini_by_name(cp, 'config:basic')
|
||||
|
||||
# Apply historically Configuration_adv.h settings everywhere
|
||||
# (Some of which rely on defines in 'Conditionals_LCD.h')
|
||||
elif ckey in ('adv', 'advanced'):
|
||||
apply_ini_by_name(cp, 'config:advanced')
|
||||
|
||||
# Apply a specific config:<name> section directly
|
||||
elif ckey.startswith('config:'):
|
||||
apply_ini_by_name(cp, ckey)
|
||||
|
||||
# Apply settings from a top level config.ini
|
||||
def apply_config_ini(cp):
|
||||
blab("=" * 20 + " Gather 'config.ini' entries...")
|
||||
|
||||
# Pre-scan for ini_use_config to get config_keys
|
||||
base_items = section_items(cp, 'config:base') + section_items(cp, 'config:root')
|
||||
config_keys = ['base']
|
||||
for ikey, ival in base_items:
|
||||
if ikey == 'ini_use_config':
|
||||
config_keys = map(str.strip, ival.split(','))
|
||||
|
||||
# For each ini_use_config item perform an action
|
||||
for ckey in config_keys:
|
||||
addbase = False
|
||||
|
||||
# For a key ending in .ini load and parse another .ini file
|
||||
if ckey.endswith('.ini'):
|
||||
sect = 'base'
|
||||
if '@' in ckey: sect, ckey = map(str.strip, ckey.split('@'))
|
||||
cp2 = configparser.ConfigParser()
|
||||
cp2.read(config_path(ckey))
|
||||
apply_sections(cp2, sect)
|
||||
ckey = 'base';
|
||||
|
||||
# (Allow 'example/' as a shortcut for 'examples/')
|
||||
elif ckey.startswith('example/'):
|
||||
ckey = 'examples' + ckey[7:]
|
||||
|
||||
# For 'examples/<path>' fetch an example set from GitHub.
|
||||
# For https?:// do a direct fetch of the URL.
|
||||
if ckey.startswith('examples/') or ckey.startswith('http'):
|
||||
fetch_example(ckey)
|
||||
ckey = 'base'
|
||||
|
||||
if ckey == 'all':
|
||||
apply_sections(cp)
|
||||
|
||||
else:
|
||||
# Apply keyed sections after external files are done
|
||||
apply_sections(cp, 'config:' + ckey)
|
||||
|
||||
if __name__ == "__main__":
|
||||
#
|
||||
# From command line use the given file name
|
||||
#
|
||||
import sys
|
||||
args = sys.argv[1:]
|
||||
if len(args) > 0:
|
||||
if args[0].endswith('.ini'):
|
||||
ini_file = args[0]
|
||||
else:
|
||||
print("Usage: %s <.ini file>" % sys.argv[0])
|
||||
else:
|
||||
ini_file = config_path('config.ini')
|
||||
|
||||
if ini_file:
|
||||
user_ini = configparser.ConfigParser()
|
||||
user_ini.read(ini_file)
|
||||
apply_config_ini(user_ini)
|
||||
|
||||
else:
|
||||
#
|
||||
# From within PlatformIO use the loaded INI file
|
||||
#
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
|
||||
Import("env")
|
||||
|
||||
try:
|
||||
verbose = int(env.GetProjectOption('custom_verbose'))
|
||||
except:
|
||||
pass
|
||||
|
||||
from platformio.project.config import ProjectConfig
|
||||
apply_config_ini(ProjectConfig())
|
@@ -6,13 +6,13 @@
|
||||
#
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import marlin
|
||||
board = marlin.env.BoardConfig()
|
||||
import marlin
|
||||
board = marlin.env.BoardConfig()
|
||||
|
||||
address = board.get("build.address", "")
|
||||
if address:
|
||||
marlin.relocate_firmware(address)
|
||||
address = board.get("build.address", "")
|
||||
if address:
|
||||
marlin.relocate_firmware(address)
|
||||
|
||||
ldscript = board.get("build.ldscript", "")
|
||||
if ldscript:
|
||||
marlin.custom_ld_script(ldscript)
|
||||
ldscript = board.get("build.ldscript", "")
|
||||
if ldscript:
|
||||
marlin.custom_ld_script(ldscript)
|
||||
|
@@ -4,46 +4,50 @@
|
||||
#
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
Import("env")
|
||||
import os,requests,zipfile,tempfile,shutil
|
||||
Import("env")
|
||||
import requests,zipfile,tempfile,shutil
|
||||
from pathlib import Path
|
||||
|
||||
url = "https://github.com/makerbase-mks/Mks-Robin-Nano-Marlin2.0-Firmware/archive/0263cdaccf.zip"
|
||||
deps_path = env.Dictionary("PROJECT_LIBDEPS_DIR")
|
||||
zip_path = os.path.join(deps_path, "mks-assets.zip")
|
||||
assets_path = os.path.join(env.Dictionary("PROJECT_BUILD_DIR"), env.Dictionary("PIOENV"), "assets")
|
||||
url = "https://github.com/makerbase-mks/Mks-Robin-Nano-Marlin2.0-Firmware/archive/0263cdaccf.zip"
|
||||
deps_path = Path(env.Dictionary("PROJECT_LIBDEPS_DIR"))
|
||||
zip_path = deps_path / "mks-assets.zip"
|
||||
assets_path = Path(env.Dictionary("PROJECT_BUILD_DIR"), env.Dictionary("PIOENV"), "assets")
|
||||
|
||||
def download_mks_assets():
|
||||
print("Downloading MKS Assets")
|
||||
r = requests.get(url, stream=True)
|
||||
# the user may have a very clean workspace,
|
||||
# so create the PROJECT_LIBDEPS_DIR directory if not exits
|
||||
if os.path.exists(deps_path) == False:
|
||||
os.mkdir(deps_path)
|
||||
with open(zip_path, 'wb') as fd:
|
||||
for chunk in r.iter_content(chunk_size=128):
|
||||
fd.write(chunk)
|
||||
def download_mks_assets():
|
||||
print("Downloading MKS Assets")
|
||||
r = requests.get(url, stream=True)
|
||||
# the user may have a very clean workspace,
|
||||
# so create the PROJECT_LIBDEPS_DIR directory if not exits
|
||||
if not deps_path.exists():
|
||||
deps_path.mkdir()
|
||||
with zip_path.open('wb') as fd:
|
||||
for chunk in r.iter_content(chunk_size=128):
|
||||
fd.write(chunk)
|
||||
|
||||
def copy_mks_assets():
|
||||
print("Copying MKS Assets")
|
||||
output_path = tempfile.mkdtemp()
|
||||
zip_obj = zipfile.ZipFile(zip_path, 'r')
|
||||
zip_obj.extractall(output_path)
|
||||
zip_obj.close()
|
||||
if os.path.exists(assets_path) == True and os.path.isdir(assets_path) == False:
|
||||
os.unlink(assets_path)
|
||||
if os.path.exists(assets_path) == False:
|
||||
os.mkdir(assets_path)
|
||||
base_path = ''
|
||||
for filename in os.listdir(output_path):
|
||||
base_path = filename
|
||||
for filename in os.listdir(os.path.join(output_path, base_path, 'Firmware', 'mks_font')):
|
||||
shutil.copy(os.path.join(output_path, base_path, 'Firmware', 'mks_font', filename), assets_path)
|
||||
for filename in os.listdir(os.path.join(output_path, base_path, 'Firmware', 'mks_pic')):
|
||||
shutil.copy(os.path.join(output_path, base_path, 'Firmware', 'mks_pic', filename), assets_path)
|
||||
shutil.rmtree(output_path, ignore_errors=True)
|
||||
def copy_mks_assets():
|
||||
print("Copying MKS Assets")
|
||||
output_path = Path(tempfile.mkdtemp())
|
||||
zip_obj = zipfile.ZipFile(zip_path, 'r')
|
||||
zip_obj.extractall(output_path)
|
||||
zip_obj.close()
|
||||
if assets_path.exists() and not assets_path.is_dir():
|
||||
assets_path.unlink()
|
||||
if not assets_path.exists():
|
||||
assets_path.mkdir()
|
||||
base_path = ''
|
||||
for filename in output_path.iterdir():
|
||||
base_path = filename
|
||||
fw_path = (output_path / base_path / 'Firmware')
|
||||
font_path = fw_path / 'mks_font'
|
||||
for filename in font_path.iterdir():
|
||||
shutil.copy(font_path / filename, assets_path)
|
||||
pic_path = fw_path / 'mks_pic'
|
||||
for filename in pic_path.iterdir():
|
||||
shutil.copy(pic_path / filename, assets_path)
|
||||
shutil.rmtree(output_path, ignore_errors=True)
|
||||
|
||||
if os.path.exists(zip_path) == False:
|
||||
download_mks_assets()
|
||||
if not zip_path.exists():
|
||||
download_mks_assets()
|
||||
|
||||
if os.path.exists(assets_path) == False:
|
||||
copy_mks_assets()
|
||||
if not assets_path.exists():
|
||||
copy_mks_assets()
|
||||
|
@@ -4,32 +4,32 @@
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
|
||||
import shutil
|
||||
from os.path import join, isfile
|
||||
from pprint import pprint
|
||||
import shutil
|
||||
from os.path import join, isfile
|
||||
from pprint import pprint
|
||||
|
||||
Import("env")
|
||||
Import("env")
|
||||
|
||||
if env.MarlinHas("POSTMORTEM_DEBUGGING"):
|
||||
FRAMEWORK_DIR = env.PioPlatform().get_package_dir("framework-arduinoststm32-maple")
|
||||
patchflag_path = join(FRAMEWORK_DIR, ".exc-patching-done")
|
||||
if env.MarlinHas("POSTMORTEM_DEBUGGING"):
|
||||
FRAMEWORK_DIR = env.PioPlatform().get_package_dir("framework-arduinoststm32-maple")
|
||||
patchflag_path = join(FRAMEWORK_DIR, ".exc-patching-done")
|
||||
|
||||
# patch file only if we didn't do it before
|
||||
if not isfile(patchflag_path):
|
||||
print("Patching libmaple exception handlers")
|
||||
original_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S")
|
||||
backup_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S.bak")
|
||||
src_file = join("buildroot", "share", "PlatformIO", "scripts", "exc.S")
|
||||
# patch file only if we didn't do it before
|
||||
if not isfile(patchflag_path):
|
||||
print("Patching libmaple exception handlers")
|
||||
original_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S")
|
||||
backup_file = join(FRAMEWORK_DIR, "STM32F1", "cores", "maple", "libmaple", "exc.S.bak")
|
||||
src_file = join("buildroot", "share", "PlatformIO", "scripts", "exc.S")
|
||||
|
||||
assert isfile(original_file) and isfile(src_file)
|
||||
shutil.copyfile(original_file, backup_file)
|
||||
shutil.copyfile(src_file, original_file);
|
||||
assert isfile(original_file) and isfile(src_file)
|
||||
shutil.copyfile(original_file, backup_file)
|
||||
shutil.copyfile(src_file, original_file);
|
||||
|
||||
def _touch(path):
|
||||
with open(path, "w") as fp:
|
||||
fp.write("")
|
||||
def _touch(path):
|
||||
with open(path, "w") as fp:
|
||||
fp.write("")
|
||||
|
||||
env.Execute(lambda *args, **kwargs: _touch(patchflag_path))
|
||||
print("Done patching exception handler")
|
||||
env.Execute(lambda *args, **kwargs: _touch(patchflag_path))
|
||||
print("Done patching exception handler")
|
||||
|
||||
print("Libmaple modified and ready for post mortem debugging")
|
||||
print("Libmaple modified and ready for post mortem debugging")
|
||||
|
@@ -7,54 +7,52 @@
|
||||
#
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import os,shutil,marlin
|
||||
from SCons.Script import DefaultEnvironment
|
||||
from platformio import util
|
||||
import shutil,marlin
|
||||
from pathlib import Path
|
||||
|
||||
env = DefaultEnvironment()
|
||||
#
|
||||
# Get the platform name from the 'platform_packages' option,
|
||||
# or look it up by the platform.class.name.
|
||||
#
|
||||
env = marlin.env
|
||||
platform = env.PioPlatform()
|
||||
|
||||
#
|
||||
# Get the platform name from the 'platform_packages' option,
|
||||
# or look it up by the platform.class.name.
|
||||
#
|
||||
platform = env.PioPlatform()
|
||||
from platformio.package.meta import PackageSpec
|
||||
platform_packages = env.GetProjectOption('platform_packages')
|
||||
|
||||
from platformio.package.meta import PackageSpec
|
||||
platform_packages = env.GetProjectOption('platform_packages')
|
||||
# Remove all tool items from platform_packages
|
||||
platform_packages = [x for x in platform_packages if not x.startswith("platformio/tool-")]
|
||||
|
||||
# Remove all tool items from platform_packages
|
||||
platform_packages = [x for x in platform_packages if not x.startswith("platformio/tool-")]
|
||||
if len(platform_packages) == 0:
|
||||
framewords = {
|
||||
"Ststm32Platform": "framework-arduinoststm32",
|
||||
"AtmelavrPlatform": "framework-arduino-avr"
|
||||
}
|
||||
platform_name = framewords[platform.__class__.__name__]
|
||||
else:
|
||||
platform_name = PackageSpec(platform_packages[0]).name
|
||||
|
||||
if len(platform_packages) == 0:
|
||||
framewords = {
|
||||
"Ststm32Platform": "framework-arduinoststm32",
|
||||
"AtmelavrPlatform": "framework-arduino-avr"
|
||||
}
|
||||
platform_name = framewords[platform.__class__.__name__]
|
||||
else:
|
||||
platform_name = PackageSpec(platform_packages[0]).name
|
||||
if platform_name in [ "usb-host-msc", "usb-host-msc-cdc-msc", "usb-host-msc-cdc-msc-2", "usb-host-msc-cdc-msc-3", "tool-stm32duino", "biqu-bx-workaround", "main" ]:
|
||||
platform_name = "framework-arduinoststm32"
|
||||
|
||||
if platform_name in [ "usb-host-msc", "usb-host-msc-cdc-msc", "usb-host-msc-cdc-msc-2", "usb-host-msc-cdc-msc-3", "tool-stm32duino", "biqu-bx-workaround", "main" ]:
|
||||
platform_name = "framework-arduinoststm32"
|
||||
FRAMEWORK_DIR = Path(platform.get_package_dir(platform_name))
|
||||
assert FRAMEWORK_DIR.is_dir()
|
||||
|
||||
FRAMEWORK_DIR = platform.get_package_dir(platform_name)
|
||||
assert os.path.isdir(FRAMEWORK_DIR)
|
||||
board = env.BoardConfig()
|
||||
|
||||
board = env.BoardConfig()
|
||||
#mcu_type = board.get("build.mcu")[:-2]
|
||||
variant = board.get("build.variant")
|
||||
#series = mcu_type[:7].upper() + "xx"
|
||||
|
||||
#mcu_type = board.get("build.mcu")[:-2]
|
||||
variant = board.get("build.variant")
|
||||
#series = mcu_type[:7].upper() + "xx"
|
||||
# Prepare a new empty folder at the destination
|
||||
variant_dir = FRAMEWORK_DIR / "variants" / variant
|
||||
if variant_dir.is_dir():
|
||||
shutil.rmtree(variant_dir)
|
||||
if not variant_dir.is_dir():
|
||||
variant_dir.mkdir()
|
||||
|
||||
# Prepare a new empty folder at the destination
|
||||
variant_dir = os.path.join(FRAMEWORK_DIR, "variants", variant)
|
||||
if os.path.isdir(variant_dir):
|
||||
shutil.rmtree(variant_dir)
|
||||
if not os.path.isdir(variant_dir):
|
||||
os.mkdir(variant_dir)
|
||||
# Source dir is a local variant sub-folder
|
||||
source_dir = Path("buildroot/share/PlatformIO/variants", variant)
|
||||
assert source_dir.is_dir()
|
||||
|
||||
# Source dir is a local variant sub-folder
|
||||
source_dir = os.path.join("buildroot/share/PlatformIO/variants", variant)
|
||||
assert os.path.isdir(source_dir)
|
||||
|
||||
marlin.copytree(source_dir, variant_dir)
|
||||
marlin.copytree(source_dir, variant_dir)
|
||||
|
@@ -4,37 +4,32 @@
|
||||
#
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import os,marlin
|
||||
# Append ${PROGNAME}.bin firmware after bootloader and save it as 'jgaurora_firmware.bin'
|
||||
def addboot(source, target, env):
|
||||
firmware = open(target[0].path, "rb")
|
||||
lengthfirmware = os.path.getsize(target[0].path)
|
||||
bootloader_bin = "buildroot/share/PlatformIO/scripts/" + "jgaurora_bootloader.bin"
|
||||
bootloader = open(bootloader_bin, "rb")
|
||||
lengthbootloader = os.path.getsize(bootloader_bin)
|
||||
|
||||
firmware_with_boothloader_bin = target[0].dir.path + '/firmware_with_bootloader.bin'
|
||||
if os.path.exists(firmware_with_boothloader_bin):
|
||||
os.remove(firmware_with_boothloader_bin)
|
||||
firmwareimage = open(firmware_with_boothloader_bin, "wb")
|
||||
position = 0
|
||||
while position < lengthbootloader:
|
||||
byte = bootloader.read(1)
|
||||
firmwareimage.write(byte)
|
||||
position += 1
|
||||
position = 0
|
||||
while position < lengthfirmware:
|
||||
byte = firmware.read(1)
|
||||
firmwareimage.write(byte)
|
||||
position += 1
|
||||
bootloader.close()
|
||||
firmware.close()
|
||||
firmwareimage.close()
|
||||
# Append ${PROGNAME}.bin firmware after bootloader and save it as 'jgaurora_firmware.bin'
|
||||
def addboot(source, target, env):
|
||||
from pathlib import Path
|
||||
|
||||
firmware_without_bootloader_bin = target[0].dir.path + '/firmware_for_sd_upload.bin'
|
||||
if os.path.exists(firmware_without_bootloader_bin):
|
||||
os.remove(firmware_without_bootloader_bin)
|
||||
os.rename(target[0].path, firmware_without_bootloader_bin)
|
||||
#os.rename(target[0].dir.path+'/firmware_with_bootloader.bin', target[0].dir.path+'/firmware.bin')
|
||||
fw_path = Path(target[0].path)
|
||||
fwb_path = fw_path.parent / 'firmware_with_bootloader.bin'
|
||||
with fwb_path.open("wb") as fwb_file:
|
||||
bl_path = Path("buildroot/share/PlatformIO/scripts/jgaurora_bootloader.bin")
|
||||
bl_file = bl_path.open("rb")
|
||||
while True:
|
||||
b = bl_file.read(1)
|
||||
if b == b'': break
|
||||
else: fwb_file.write(b)
|
||||
|
||||
marlin.add_post_action(addboot);
|
||||
with fw_path.open("rb") as fw_file:
|
||||
while True:
|
||||
b = fw_file.read(1)
|
||||
if b == b'': break
|
||||
else: fwb_file.write(b)
|
||||
|
||||
fws_path = Path(target[0].dir.path, 'firmware_for_sd_upload.bin')
|
||||
if fws_path.exists():
|
||||
fws_path.unlink()
|
||||
|
||||
fw_path.rename(fws_path)
|
||||
|
||||
import marlin
|
||||
marlin.add_post_action(addboot);
|
||||
|
@@ -7,43 +7,41 @@
|
||||
#
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import os,marlin
|
||||
Import("env")
|
||||
import os,marlin
|
||||
|
||||
from SCons.Script import DefaultEnvironment
|
||||
board = DefaultEnvironment().BoardConfig()
|
||||
board = marlin.env.BoardConfig()
|
||||
|
||||
def encryptByte(byte):
|
||||
byte = 0xFF & ((byte << 6) | (byte >> 2))
|
||||
i = 0x58 + byte
|
||||
j = 0x05 + byte + (i >> 8)
|
||||
byte = (0xF8 & i) | (0x07 & j)
|
||||
return byte
|
||||
def encryptByte(byte):
|
||||
byte = 0xFF & ((byte << 6) | (byte >> 2))
|
||||
i = 0x58 + byte
|
||||
j = 0x05 + byte + (i >> 8)
|
||||
byte = (0xF8 & i) | (0x07 & j)
|
||||
return byte
|
||||
|
||||
def encrypt_file(input, output_file, file_length):
|
||||
input_file = bytearray(input.read())
|
||||
for i in range(len(input_file)):
|
||||
input_file[i] = encryptByte(input_file[i])
|
||||
output_file.write(input_file)
|
||||
def encrypt_file(input, output_file, file_length):
|
||||
input_file = bytearray(input.read())
|
||||
for i in range(len(input_file)):
|
||||
input_file[i] = encryptByte(input_file[i])
|
||||
output_file.write(input_file)
|
||||
|
||||
# Encrypt ${PROGNAME}.bin and save it with the name given in build.crypt_lerdge
|
||||
def encrypt(source, target, env):
|
||||
fwpath = target[0].path
|
||||
enname = board.get("build.crypt_lerdge")
|
||||
print("Encrypting %s to %s" % (fwpath, enname))
|
||||
fwfile = open(fwpath, "rb")
|
||||
enfile = open(target[0].dir.path + "/" + enname, "wb")
|
||||
length = os.path.getsize(fwpath)
|
||||
# Encrypt ${PROGNAME}.bin and save it with the name given in build.crypt_lerdge
|
||||
def encrypt(source, target, env):
|
||||
fwpath = target[0].path
|
||||
enname = board.get("build.crypt_lerdge")
|
||||
print("Encrypting %s to %s" % (fwpath, enname))
|
||||
fwfile = open(fwpath, "rb")
|
||||
enfile = open(target[0].dir.path + "/" + enname, "wb")
|
||||
length = os.path.getsize(fwpath)
|
||||
|
||||
encrypt_file(fwfile, enfile, length)
|
||||
encrypt_file(fwfile, enfile, length)
|
||||
|
||||
fwfile.close()
|
||||
enfile.close()
|
||||
os.remove(fwpath)
|
||||
fwfile.close()
|
||||
enfile.close()
|
||||
os.remove(fwpath)
|
||||
|
||||
if 'crypt_lerdge' in board.get("build").keys():
|
||||
if board.get("build.crypt_lerdge") != "":
|
||||
marlin.add_post_action(encrypt)
|
||||
else:
|
||||
print("LERDGE builds require output file via board_build.crypt_lerdge = 'filename' parameter")
|
||||
exit(1)
|
||||
if 'crypt_lerdge' in board.get("build").keys():
|
||||
if board.get("build.crypt_lerdge") != "":
|
||||
marlin.add_post_action(encrypt)
|
||||
else:
|
||||
print("LERDGE builds require output file via board_build.crypt_lerdge = 'filename' parameter")
|
||||
exit(1)
|
||||
|
@@ -2,74 +2,71 @@
|
||||
# marlin.py
|
||||
# Helper module with some commonly-used functions
|
||||
#
|
||||
import os,shutil
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from SCons.Script import DefaultEnvironment
|
||||
env = DefaultEnvironment()
|
||||
|
||||
from os.path import join
|
||||
|
||||
def copytree(src, dst, symlinks=False, ignore=None):
|
||||
for item in os.listdir(src):
|
||||
s = join(src, item)
|
||||
d = join(dst, item)
|
||||
if os.path.isdir(s):
|
||||
shutil.copytree(s, d, symlinks, ignore)
|
||||
else:
|
||||
shutil.copy2(s, d)
|
||||
for item in src.iterdir():
|
||||
if item.is_dir():
|
||||
shutil.copytree(item, dst / item.name, symlinks, ignore)
|
||||
else:
|
||||
shutil.copy2(item, dst / item.name)
|
||||
|
||||
def replace_define(field, value):
|
||||
for define in env['CPPDEFINES']:
|
||||
if define[0] == field:
|
||||
env['CPPDEFINES'].remove(define)
|
||||
env['CPPDEFINES'].append((field, value))
|
||||
for define in env['CPPDEFINES']:
|
||||
if define[0] == field:
|
||||
env['CPPDEFINES'].remove(define)
|
||||
env['CPPDEFINES'].append((field, value))
|
||||
|
||||
# Relocate the firmware to a new address, such as "0x08005000"
|
||||
def relocate_firmware(address):
|
||||
replace_define("VECT_TAB_ADDR", address)
|
||||
replace_define("VECT_TAB_ADDR", address)
|
||||
|
||||
# Relocate the vector table with a new offset
|
||||
def relocate_vtab(address):
|
||||
replace_define("VECT_TAB_OFFSET", address)
|
||||
replace_define("VECT_TAB_OFFSET", address)
|
||||
|
||||
# Replace the existing -Wl,-T with the given ldscript path
|
||||
def custom_ld_script(ldname):
|
||||
apath = os.path.abspath("buildroot/share/PlatformIO/ldscripts/" + ldname)
|
||||
for i, flag in enumerate(env["LINKFLAGS"]):
|
||||
if "-Wl,-T" in flag:
|
||||
env["LINKFLAGS"][i] = "-Wl,-T" + apath
|
||||
elif flag == "-T":
|
||||
env["LINKFLAGS"][i + 1] = apath
|
||||
apath = str(Path("buildroot/share/PlatformIO/ldscripts", ldname).resolve())
|
||||
for i, flag in enumerate(env["LINKFLAGS"]):
|
||||
if "-Wl,-T" in flag:
|
||||
env["LINKFLAGS"][i] = "-Wl,-T" + apath
|
||||
elif flag == "-T":
|
||||
env["LINKFLAGS"][i + 1] = apath
|
||||
|
||||
# Encrypt ${PROGNAME}.bin and save it with a new name. This applies (mostly) to MKS boards
|
||||
# This PostAction is set up by offset_and_rename.py for envs with 'build.encrypt_mks'.
|
||||
def encrypt_mks(source, target, env, new_name):
|
||||
import sys
|
||||
import sys
|
||||
|
||||
key = [0xA3, 0xBD, 0xAD, 0x0D, 0x41, 0x11, 0xBB, 0x8D, 0xDC, 0x80, 0x2D, 0xD0, 0xD2, 0xC4, 0x9B, 0x1E, 0x26, 0xEB, 0xE3, 0x33, 0x4A, 0x15, 0xE4, 0x0A, 0xB3, 0xB1, 0x3C, 0x93, 0xBB, 0xAF, 0xF7, 0x3E]
|
||||
key = [0xA3, 0xBD, 0xAD, 0x0D, 0x41, 0x11, 0xBB, 0x8D, 0xDC, 0x80, 0x2D, 0xD0, 0xD2, 0xC4, 0x9B, 0x1E, 0x26, 0xEB, 0xE3, 0x33, 0x4A, 0x15, 0xE4, 0x0A, 0xB3, 0xB1, 0x3C, 0x93, 0xBB, 0xAF, 0xF7, 0x3E]
|
||||
|
||||
# If FIRMWARE_BIN is defined by config, override all
|
||||
mf = env["MARLIN_FEATURES"]
|
||||
if "FIRMWARE_BIN" in mf: new_name = mf["FIRMWARE_BIN"]
|
||||
# If FIRMWARE_BIN is defined by config, override all
|
||||
mf = env["MARLIN_FEATURES"]
|
||||
if "FIRMWARE_BIN" in mf: new_name = mf["FIRMWARE_BIN"]
|
||||
|
||||
fwpath = target[0].path
|
||||
fwfile = open(fwpath, "rb")
|
||||
enfile = open(target[0].dir.path + "/" + new_name, "wb")
|
||||
length = os.path.getsize(fwpath)
|
||||
position = 0
|
||||
try:
|
||||
while position < length:
|
||||
byte = fwfile.read(1)
|
||||
if position >= 320 and position < 31040:
|
||||
byte = chr(ord(byte) ^ key[position & 31])
|
||||
if sys.version_info[0] > 2:
|
||||
byte = bytes(byte, 'latin1')
|
||||
enfile.write(byte)
|
||||
position += 1
|
||||
finally:
|
||||
fwfile.close()
|
||||
enfile.close()
|
||||
os.remove(fwpath)
|
||||
fwpath = Path(target[0].path)
|
||||
fwfile = fwpath.open("rb")
|
||||
enfile = Path(target[0].dir.path, new_name).open("wb")
|
||||
length = fwpath.stat().st_size
|
||||
position = 0
|
||||
try:
|
||||
while position < length:
|
||||
byte = fwfile.read(1)
|
||||
if 320 <= position < 31040:
|
||||
byte = chr(ord(byte) ^ key[position & 31])
|
||||
if sys.version_info[0] > 2:
|
||||
byte = bytes(byte, 'latin1')
|
||||
enfile.write(byte)
|
||||
position += 1
|
||||
finally:
|
||||
fwfile.close()
|
||||
enfile.close()
|
||||
fwpath.unlink()
|
||||
|
||||
def add_post_action(action):
|
||||
env.AddPostAction(join("$BUILD_DIR", "${PROGNAME}.bin"), action);
|
||||
env.AddPostAction(str(Path("$BUILD_DIR", "${PROGNAME}.bin")), action);
|
||||
|
@@ -5,65 +5,64 @@
|
||||
import json
|
||||
import sys
|
||||
import shutil
|
||||
import re
|
||||
|
||||
opt_output = '--opt' in sys.argv
|
||||
output_suffix = '.sh' if opt_output else '' if '--bare-output' in sys.argv else '.gen'
|
||||
|
||||
try:
|
||||
with open('marlin_config.json', 'r') as infile:
|
||||
conf = json.load(infile)
|
||||
for key in conf:
|
||||
# We don't care about the hash when restoring here
|
||||
if key == '__INITIAL_HASH':
|
||||
continue
|
||||
if key == 'VERSION':
|
||||
for k, v in sorted(conf[key].items()):
|
||||
print(k + ': ' + v)
|
||||
continue
|
||||
# The key is the file name, so let's build it now
|
||||
outfile = open('Marlin/' + key + output_suffix, 'w')
|
||||
for k, v in sorted(conf[key].items()):
|
||||
# Make define line now
|
||||
if opt_output:
|
||||
if v != '':
|
||||
if '"' in v:
|
||||
v = "'%s'" % v
|
||||
elif ' ' in v:
|
||||
v = '"%s"' % v
|
||||
define = 'opt_set ' + k + ' ' + v + '\n'
|
||||
else:
|
||||
define = 'opt_enable ' + k + '\n'
|
||||
else:
|
||||
define = '#define ' + k + ' ' + v + '\n'
|
||||
outfile.write(define)
|
||||
outfile.close()
|
||||
with open('marlin_config.json', 'r') as infile:
|
||||
conf = json.load(infile)
|
||||
for key in conf:
|
||||
# We don't care about the hash when restoring here
|
||||
if key == '__INITIAL_HASH':
|
||||
continue
|
||||
if key == 'VERSION':
|
||||
for k, v in sorted(conf[key].items()):
|
||||
print(k + ': ' + v)
|
||||
continue
|
||||
# The key is the file name, so let's build it now
|
||||
outfile = open('Marlin/' + key + output_suffix, 'w')
|
||||
for k, v in sorted(conf[key].items()):
|
||||
# Make define line now
|
||||
if opt_output:
|
||||
if v != '':
|
||||
if '"' in v:
|
||||
v = "'%s'" % v
|
||||
elif ' ' in v:
|
||||
v = '"%s"' % v
|
||||
define = 'opt_set ' + k + ' ' + v + '\n'
|
||||
else:
|
||||
define = 'opt_enable ' + k + '\n'
|
||||
else:
|
||||
define = '#define ' + k + ' ' + v + '\n'
|
||||
outfile.write(define)
|
||||
outfile.close()
|
||||
|
||||
# Try to apply changes to the actual configuration file (in order to keep useful comments)
|
||||
if output_suffix != '':
|
||||
# Move the existing configuration so it doesn't interfere
|
||||
shutil.move('Marlin/' + key, 'Marlin/' + key + '.orig')
|
||||
infile_lines = open('Marlin/' + key + '.orig', 'r').read().split('\n')
|
||||
outfile = open('Marlin/' + key, 'w')
|
||||
for line in infile_lines:
|
||||
sline = line.strip(" \t\n\r")
|
||||
if sline[:7] == "#define":
|
||||
# Extract the key here (we don't care about the value)
|
||||
kv = sline[8:].strip().split(' ')
|
||||
if kv[0] in conf[key]:
|
||||
outfile.write('#define ' + kv[0] + ' ' + conf[key][kv[0]] + '\n')
|
||||
# Remove the key from the dict, so we can still write all missing keys at the end of the file
|
||||
del conf[key][kv[0]]
|
||||
else:
|
||||
outfile.write(line + '\n')
|
||||
else:
|
||||
outfile.write(line + '\n')
|
||||
# Process any remaining defines here
|
||||
for k, v in sorted(conf[key].items()):
|
||||
define = '#define ' + k + ' ' + v + '\n'
|
||||
outfile.write(define)
|
||||
outfile.close()
|
||||
# Try to apply changes to the actual configuration file (in order to keep useful comments)
|
||||
if output_suffix != '':
|
||||
# Move the existing configuration so it doesn't interfere
|
||||
shutil.move('Marlin/' + key, 'Marlin/' + key + '.orig')
|
||||
infile_lines = open('Marlin/' + key + '.orig', 'r').read().split('\n')
|
||||
outfile = open('Marlin/' + key, 'w')
|
||||
for line in infile_lines:
|
||||
sline = line.strip(" \t\n\r")
|
||||
if sline[:7] == "#define":
|
||||
# Extract the key here (we don't care about the value)
|
||||
kv = sline[8:].strip().split(' ')
|
||||
if kv[0] in conf[key]:
|
||||
outfile.write('#define ' + kv[0] + ' ' + conf[key][kv[0]] + '\n')
|
||||
# Remove the key from the dict, so we can still write all missing keys at the end of the file
|
||||
del conf[key][kv[0]]
|
||||
else:
|
||||
outfile.write(line + '\n')
|
||||
else:
|
||||
outfile.write(line + '\n')
|
||||
# Process any remaining defines here
|
||||
for k, v in sorted(conf[key].items()):
|
||||
define = '#define ' + k + ' ' + v + '\n'
|
||||
outfile.write(define)
|
||||
outfile.close()
|
||||
|
||||
print('Output configuration written to: ' + 'Marlin/' + key + output_suffix)
|
||||
print('Output configuration written to: ' + 'Marlin/' + key + output_suffix)
|
||||
except:
|
||||
print('No marlin_config.json found.')
|
||||
print('No marlin_config.json found.')
|
||||
|
@@ -2,61 +2,64 @@
|
||||
# offset_and_rename.py
|
||||
#
|
||||
# - If 'build.offset' is provided, either by JSON or by the environment...
|
||||
# - Set linker flag LD_FLASH_OFFSET and relocate the VTAB based on 'build.offset'.
|
||||
# - Set linker flag LD_MAX_DATA_SIZE based on 'build.maximum_ram_size'.
|
||||
# - Define STM32_FLASH_SIZE from 'upload.maximum_size' for use by Flash-based EEPROM emulation.
|
||||
# - Set linker flag LD_FLASH_OFFSET and relocate the VTAB based on 'build.offset'.
|
||||
# - Set linker flag LD_MAX_DATA_SIZE based on 'build.maximum_ram_size'.
|
||||
# - Define STM32_FLASH_SIZE from 'upload.maximum_size' for use by Flash-based EEPROM emulation.
|
||||
#
|
||||
# - For 'board_build.rename' add a post-action to rename the firmware file.
|
||||
#
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import os,sys,marlin
|
||||
Import("env")
|
||||
import marlin
|
||||
|
||||
from SCons.Script import DefaultEnvironment
|
||||
board = DefaultEnvironment().BoardConfig()
|
||||
env = marlin.env
|
||||
board = env.BoardConfig()
|
||||
board_keys = board.get("build").keys()
|
||||
|
||||
board_keys = board.get("build").keys()
|
||||
#
|
||||
# For build.offset define LD_FLASH_OFFSET, used by ldscript.ld
|
||||
#
|
||||
if 'offset' in board_keys:
|
||||
LD_FLASH_OFFSET = board.get("build.offset")
|
||||
marlin.relocate_vtab(LD_FLASH_OFFSET)
|
||||
|
||||
#
|
||||
# For build.offset define LD_FLASH_OFFSET, used by ldscript.ld
|
||||
#
|
||||
if 'offset' in board_keys:
|
||||
LD_FLASH_OFFSET = board.get("build.offset")
|
||||
marlin.relocate_vtab(LD_FLASH_OFFSET)
|
||||
# Flash size
|
||||
maximum_flash_size = int(board.get("upload.maximum_size") / 1024)
|
||||
marlin.replace_define('STM32_FLASH_SIZE', maximum_flash_size)
|
||||
|
||||
# Flash size
|
||||
maximum_flash_size = int(board.get("upload.maximum_size") / 1024)
|
||||
marlin.replace_define('STM32_FLASH_SIZE', maximum_flash_size)
|
||||
# Get upload.maximum_ram_size (defined by /buildroot/share/PlatformIO/boards/VARIOUS.json)
|
||||
maximum_ram_size = board.get("upload.maximum_ram_size")
|
||||
|
||||
# Get upload.maximum_ram_size (defined by /buildroot/share/PlatformIO/boards/VARIOUS.json)
|
||||
maximum_ram_size = board.get("upload.maximum_ram_size")
|
||||
for i, flag in enumerate(env["LINKFLAGS"]):
|
||||
if "-Wl,--defsym=LD_FLASH_OFFSET" in flag:
|
||||
env["LINKFLAGS"][i] = "-Wl,--defsym=LD_FLASH_OFFSET=" + LD_FLASH_OFFSET
|
||||
if "-Wl,--defsym=LD_MAX_DATA_SIZE" in flag:
|
||||
env["LINKFLAGS"][i] = "-Wl,--defsym=LD_MAX_DATA_SIZE=" + str(maximum_ram_size - 40)
|
||||
|
||||
for i, flag in enumerate(env["LINKFLAGS"]):
|
||||
if "-Wl,--defsym=LD_FLASH_OFFSET" in flag:
|
||||
env["LINKFLAGS"][i] = "-Wl,--defsym=LD_FLASH_OFFSET=" + LD_FLASH_OFFSET
|
||||
if "-Wl,--defsym=LD_MAX_DATA_SIZE" in flag:
|
||||
env["LINKFLAGS"][i] = "-Wl,--defsym=LD_MAX_DATA_SIZE=" + str(maximum_ram_size - 40)
|
||||
#
|
||||
# For build.encrypt_mks rename and encode the firmware file.
|
||||
#
|
||||
if 'encrypt_mks' in board_keys:
|
||||
|
||||
#
|
||||
# For build.encrypt_mks rename and encode the firmware file.
|
||||
#
|
||||
if 'encrypt_mks' in board_keys:
|
||||
# Encrypt ${PROGNAME}.bin and save it with the name given in build.encrypt_mks
|
||||
def encrypt(source, target, env):
|
||||
marlin.encrypt_mks(source, target, env, board.get("build.encrypt_mks"))
|
||||
|
||||
# Encrypt ${PROGNAME}.bin and save it with the name given in build.encrypt_mks
|
||||
def encrypt(source, target, env):
|
||||
marlin.encrypt_mks(source, target, env, board.get("build.encrypt_mks"))
|
||||
if board.get("build.encrypt_mks") != "":
|
||||
marlin.add_post_action(encrypt)
|
||||
|
||||
if board.get("build.encrypt_mks") != "":
|
||||
marlin.add_post_action(encrypt)
|
||||
#
|
||||
# For build.rename simply rename the firmware file.
|
||||
#
|
||||
if 'rename' in board_keys:
|
||||
|
||||
#
|
||||
# For build.rename simply rename the firmware file.
|
||||
#
|
||||
if 'rename' in board_keys:
|
||||
# If FIRMWARE_BIN is defined by config, override all
|
||||
mf = env["MARLIN_FEATURES"]
|
||||
if "FIRMWARE_BIN" in mf: new_name = mf["FIRMWARE_BIN"]
|
||||
else: new_name = board.get("build.rename")
|
||||
|
||||
def rename_target(source, target, env):
|
||||
firmware = os.path.join(target[0].dir.path, board.get("build.rename"))
|
||||
os.replace(target[0].path, firmware)
|
||||
def rename_target(source, target, env):
|
||||
from pathlib import Path
|
||||
Path(target[0].path).replace(Path(target[0].dir.path, new_name))
|
||||
|
||||
marlin.add_post_action(rename_target)
|
||||
marlin.add_post_action(rename_target)
|
||||
|
@@ -3,18 +3,17 @@
|
||||
#
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
import os,sys
|
||||
from os.path import join
|
||||
from os.path import join
|
||||
|
||||
Import("env")
|
||||
Import("env")
|
||||
|
||||
board = env.BoardConfig()
|
||||
board_keys = board.get("build").keys()
|
||||
if 'encode' in board_keys:
|
||||
env.AddPostAction(
|
||||
join("$BUILD_DIR", "${PROGNAME}.bin"),
|
||||
env.VerboseAction(" ".join([
|
||||
"$OBJCOPY", "-O", "srec",
|
||||
"\"$BUILD_DIR/${PROGNAME}.elf\"", "\"" + join("$BUILD_DIR", board.get("build.encode")) + "\""
|
||||
]), "Building " + board.get("build.encode"))
|
||||
)
|
||||
board = env.BoardConfig()
|
||||
board_keys = board.get("build").keys()
|
||||
if 'encode' in board_keys:
|
||||
env.AddPostAction(
|
||||
join("$BUILD_DIR", "${PROGNAME}.bin"),
|
||||
env.VerboseAction(" ".join([
|
||||
"$OBJCOPY", "-O", "srec",
|
||||
"\"$BUILD_DIR/${PROGNAME}.elf\"", "\"" + join("$BUILD_DIR", board.get("build.encode")) + "\""
|
||||
]), "Building " + board.get("build.encode"))
|
||||
)
|
||||
|
@@ -4,10 +4,11 @@
|
||||
|
||||
# Make sure 'vscode init' is not the current command
|
||||
def is_pio_build():
|
||||
from SCons.Script import DefaultEnvironment
|
||||
env = DefaultEnvironment()
|
||||
return not env.IsIntegrationDump()
|
||||
from SCons.Script import DefaultEnvironment
|
||||
env = DefaultEnvironment()
|
||||
if "IsCleanTarget" in dir(env) and env.IsCleanTarget(): return False
|
||||
return not env.IsIntegrationDump()
|
||||
|
||||
def get_pio_version():
|
||||
from platformio import util
|
||||
return util.pioversion_to_intstr()
|
||||
from platformio import util
|
||||
return util.pioversion_to_intstr()
|
||||
|
@@ -5,115 +5,132 @@
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
|
||||
import os,re,sys
|
||||
Import("env")
|
||||
import re,sys
|
||||
from pathlib import Path
|
||||
Import("env")
|
||||
|
||||
def get_envs_for_board(board):
|
||||
with open(os.path.join("Marlin", "src", "pins", "pins.h"), "r") as file:
|
||||
def get_envs_for_board(board):
|
||||
ppath = Path("Marlin/src/pins/pins.h")
|
||||
with ppath.open() as file:
|
||||
|
||||
if sys.platform == 'win32':
|
||||
envregex = r"(?:env|win):"
|
||||
elif sys.platform == 'darwin':
|
||||
envregex = r"(?:env|mac|uni):"
|
||||
elif sys.platform == 'linux':
|
||||
envregex = r"(?:env|lin|uni):"
|
||||
else:
|
||||
envregex = r"(?:env):"
|
||||
if sys.platform == 'win32':
|
||||
envregex = r"(?:env|win):"
|
||||
elif sys.platform == 'darwin':
|
||||
envregex = r"(?:env|mac|uni):"
|
||||
elif sys.platform == 'linux':
|
||||
envregex = r"(?:env|lin|uni):"
|
||||
else:
|
||||
envregex = r"(?:env):"
|
||||
|
||||
r = re.compile(r"if\s+MB\((.+)\)")
|
||||
if board.startswith("BOARD_"):
|
||||
board = board[6:]
|
||||
r = re.compile(r"if\s+MB\((.+)\)")
|
||||
if board.startswith("BOARD_"):
|
||||
board = board[6:]
|
||||
|
||||
for line in file:
|
||||
mbs = r.findall(line)
|
||||
if mbs and board in re.split(r",\s*", mbs[0]):
|
||||
line = file.readline()
|
||||
found_envs = re.match(r"\s*#include .+" + envregex, line)
|
||||
if found_envs:
|
||||
envlist = re.findall(envregex + r"(\w+)", line)
|
||||
return [ "env:"+s for s in envlist ]
|
||||
return []
|
||||
for line in file:
|
||||
mbs = r.findall(line)
|
||||
if mbs and board in re.split(r",\s*", mbs[0]):
|
||||
line = file.readline()
|
||||
found_envs = re.match(r"\s*#include .+" + envregex, line)
|
||||
if found_envs:
|
||||
envlist = re.findall(envregex + r"(\w+)", line)
|
||||
return [ "env:"+s for s in envlist ]
|
||||
return []
|
||||
|
||||
def check_envs(build_env, board_envs, config):
|
||||
if build_env in board_envs:
|
||||
return True
|
||||
ext = config.get(build_env, 'extends', default=None)
|
||||
if ext:
|
||||
if isinstance(ext, str):
|
||||
return check_envs(ext, board_envs, config)
|
||||
elif isinstance(ext, list):
|
||||
for ext_env in ext:
|
||||
if check_envs(ext_env, board_envs, config):
|
||||
return True
|
||||
return False
|
||||
def check_envs(build_env, board_envs, config):
|
||||
if build_env in board_envs:
|
||||
return True
|
||||
ext = config.get(build_env, 'extends', default=None)
|
||||
if ext:
|
||||
if isinstance(ext, str):
|
||||
return check_envs(ext, board_envs, config)
|
||||
elif isinstance(ext, list):
|
||||
for ext_env in ext:
|
||||
if check_envs(ext_env, board_envs, config):
|
||||
return True
|
||||
return False
|
||||
|
||||
def sanity_check_target():
|
||||
# Sanity checks:
|
||||
if 'PIOENV' not in env:
|
||||
raise SystemExit("Error: PIOENV is not defined. This script is intended to be used with PlatformIO")
|
||||
def sanity_check_target():
|
||||
# Sanity checks:
|
||||
if 'PIOENV' not in env:
|
||||
raise SystemExit("Error: PIOENV is not defined. This script is intended to be used with PlatformIO")
|
||||
|
||||
if 'MARLIN_FEATURES' not in env:
|
||||
raise SystemExit("Error: this script should be used after common Marlin scripts")
|
||||
# Require PlatformIO 6.1.1 or later
|
||||
vers = pioutil.get_pio_version()
|
||||
if vers < [6, 1, 1]:
|
||||
raise SystemExit("Error: Marlin requires PlatformIO >= 6.1.1. Use 'pio upgrade' to get a newer version.")
|
||||
|
||||
if 'MOTHERBOARD' not in env['MARLIN_FEATURES']:
|
||||
raise SystemExit("Error: MOTHERBOARD is not defined in Configuration.h")
|
||||
if 'MARLIN_FEATURES' not in env:
|
||||
raise SystemExit("Error: this script should be used after common Marlin scripts.")
|
||||
|
||||
build_env = env['PIOENV']
|
||||
motherboard = env['MARLIN_FEATURES']['MOTHERBOARD']
|
||||
board_envs = get_envs_for_board(motherboard)
|
||||
config = env.GetProjectConfig()
|
||||
result = check_envs("env:"+build_env, board_envs, config)
|
||||
if len(env['MARLIN_FEATURES']) == 0:
|
||||
raise SystemExit("Error: Failed to parse Marlin features. See previous error messages.")
|
||||
|
||||
if not result:
|
||||
err = "Error: Build environment '%s' is incompatible with %s. Use one of these: %s" % \
|
||||
( build_env, motherboard, ", ".join([ e[4:] for e in board_envs if e.startswith("env:") ]) )
|
||||
raise SystemExit(err)
|
||||
build_env = env['PIOENV']
|
||||
motherboard = env['MARLIN_FEATURES']['MOTHERBOARD']
|
||||
board_envs = get_envs_for_board(motherboard)
|
||||
config = env.GetProjectConfig()
|
||||
result = check_envs("env:"+build_env, board_envs, config)
|
||||
|
||||
#
|
||||
# Check for Config files in two common incorrect places
|
||||
#
|
||||
for p in [ env['PROJECT_DIR'], os.path.join(env['PROJECT_DIR'], "config") ]:
|
||||
for f in [ "Configuration.h", "Configuration_adv.h" ]:
|
||||
if os.path.isfile(os.path.join(p, f)):
|
||||
err = "ERROR: Config files found in directory %s. Please move them into the Marlin subfolder." % p
|
||||
raise SystemExit(err)
|
||||
if not result:
|
||||
err = "Error: Build environment '%s' is incompatible with %s. Use one of these: %s" % \
|
||||
( build_env, motherboard, ", ".join([ e[4:] for e in board_envs if e.startswith("env:") ]) )
|
||||
raise SystemExit(err)
|
||||
|
||||
#
|
||||
# Find the name.cpp.o or name.o and remove it
|
||||
#
|
||||
def rm_ofile(subdir, name):
|
||||
build_dir = os.path.join(env['PROJECT_BUILD_DIR'], build_env);
|
||||
for outdir in [ build_dir, os.path.join(build_dir, "debug") ]:
|
||||
for ext in [ ".cpp.o", ".o" ]:
|
||||
fpath = os.path.join(outdir, "src", "src", subdir, name + ext)
|
||||
if os.path.exists(fpath):
|
||||
os.remove(fpath)
|
||||
#
|
||||
# Check for Config files in two common incorrect places
|
||||
#
|
||||
epath = Path(env['PROJECT_DIR'])
|
||||
for p in [ epath, epath / "config" ]:
|
||||
for f in ("Configuration.h", "Configuration_adv.h"):
|
||||
if (p / f).is_file():
|
||||
err = "ERROR: Config files found in directory %s. Please move them into the Marlin subfolder." % p
|
||||
raise SystemExit(err)
|
||||
|
||||
#
|
||||
# Give warnings on every build
|
||||
#
|
||||
rm_ofile("inc", "Warnings")
|
||||
#
|
||||
# Find the name.cpp.o or name.o and remove it
|
||||
#
|
||||
def rm_ofile(subdir, name):
|
||||
build_dir = Path(env['PROJECT_BUILD_DIR'], build_env);
|
||||
for outdir in (build_dir, build_dir / "debug"):
|
||||
for ext in (".cpp.o", ".o"):
|
||||
fpath = outdir / "src/src" / subdir / (name + ext)
|
||||
if fpath.exists():
|
||||
fpath.unlink()
|
||||
|
||||
#
|
||||
# Rebuild 'settings.cpp' for EEPROM_INIT_NOW
|
||||
#
|
||||
if 'EEPROM_INIT_NOW' in env['MARLIN_FEATURES']:
|
||||
rm_ofile("module", "settings")
|
||||
#
|
||||
# Give warnings on every build
|
||||
#
|
||||
rm_ofile("inc", "Warnings")
|
||||
|
||||
#
|
||||
# Check for old files indicating an entangled Marlin (mixing old and new code)
|
||||
#
|
||||
mixedin = []
|
||||
p = os.path.join(env['PROJECT_DIR'], "Marlin", "src", "lcd", "dogm")
|
||||
for f in [ "ultralcd_DOGM.cpp", "ultralcd_DOGM.h" ]:
|
||||
if os.path.isfile(os.path.join(p, f)):
|
||||
mixedin += [ f ]
|
||||
p = os.path.join(env['PROJECT_DIR'], "Marlin", "src", "feature", "bedlevel", "abl")
|
||||
for f in [ "abl.cpp", "abl.h" ]:
|
||||
if os.path.isfile(os.path.join(p, f)):
|
||||
mixedin += [ f ]
|
||||
if mixedin:
|
||||
err = "ERROR: Old files fell into your Marlin folder. Remove %s and try again" % ", ".join(mixedin)
|
||||
raise SystemExit(err)
|
||||
#
|
||||
# Rebuild 'settings.cpp' for EEPROM_INIT_NOW
|
||||
#
|
||||
if 'EEPROM_INIT_NOW' in env['MARLIN_FEATURES']:
|
||||
rm_ofile("module", "settings")
|
||||
|
||||
sanity_check_target()
|
||||
#
|
||||
# Check for old files indicating an entangled Marlin (mixing old and new code)
|
||||
#
|
||||
mixedin = []
|
||||
p = Path(env['PROJECT_DIR'], "Marlin/src/lcd/dogm")
|
||||
for f in [ "ultralcd_DOGM.cpp", "ultralcd_DOGM.h" ]:
|
||||
if (p / f).is_file():
|
||||
mixedin += [ f ]
|
||||
p = Path(env['PROJECT_DIR'], "Marlin/src/feature/bedlevel/abl")
|
||||
for f in [ "abl.cpp", "abl.h" ]:
|
||||
if (p / f).is_file():
|
||||
mixedin += [ f ]
|
||||
if mixedin:
|
||||
err = "ERROR: Old files fell into your Marlin folder. Remove %s and try again" % ", ".join(mixedin)
|
||||
raise SystemExit(err)
|
||||
|
||||
#
|
||||
# Check FILAMENT_RUNOUT_SCRIPT has a %c parammeter when required
|
||||
#
|
||||
if 'FILAMENT_RUNOUT_SENSOR' in env['MARLIN_FEATURES'] and 'NUM_RUNOUT_SENSORS' in env['MARLIN_FEATURES']:
|
||||
if env['MARLIN_FEATURES']['NUM_RUNOUT_SENSORS'].isdigit() and int(env['MARLIN_FEATURES']['NUM_RUNOUT_SENSORS']) > 1:
|
||||
if 'FILAMENT_RUNOUT_SCRIPT' in env['MARLIN_FEATURES'] and "%c" not in env['MARLIN_FEATURES']['FILAMENT_RUNOUT_SCRIPT']:
|
||||
err = "ERROR: FILAMENT_RUNOUT_SCRIPT needs a %c parameter (e.g., 'M600 T%c') when NUM_RUNOUT_SENSORS is > 1."
|
||||
raise SystemExit(err)
|
||||
|
||||
sanity_check_target()
|
||||
|
@@ -1,14 +1,14 @@
|
||||
#
|
||||
# preprocessor.py
|
||||
#
|
||||
import subprocess,os,re
|
||||
import subprocess
|
||||
|
||||
nocache = 1
|
||||
verbose = 0
|
||||
|
||||
def blab(str):
|
||||
if verbose:
|
||||
print(str)
|
||||
if verbose:
|
||||
print(str)
|
||||
|
||||
################################################################################
|
||||
#
|
||||
@@ -16,36 +16,36 @@ def blab(str):
|
||||
#
|
||||
preprocessor_cache = {}
|
||||
def run_preprocessor(env, fn=None):
|
||||
filename = fn or 'buildroot/share/PlatformIO/scripts/common-dependencies.h'
|
||||
if filename in preprocessor_cache:
|
||||
return preprocessor_cache[filename]
|
||||
filename = fn or 'buildroot/share/PlatformIO/scripts/common-dependencies.h'
|
||||
if filename in preprocessor_cache:
|
||||
return preprocessor_cache[filename]
|
||||
|
||||
# Process defines
|
||||
build_flags = env.get('BUILD_FLAGS')
|
||||
build_flags = env.ParseFlagsExtended(build_flags)
|
||||
# Process defines
|
||||
build_flags = env.get('BUILD_FLAGS')
|
||||
build_flags = env.ParseFlagsExtended(build_flags)
|
||||
|
||||
cxx = search_compiler(env)
|
||||
cmd = ['"' + cxx + '"']
|
||||
cxx = search_compiler(env)
|
||||
cmd = ['"' + cxx + '"']
|
||||
|
||||
# Build flags from board.json
|
||||
#if 'BOARD' in env:
|
||||
# cmd += [env.BoardConfig().get("build.extra_flags")]
|
||||
for s in build_flags['CPPDEFINES']:
|
||||
if isinstance(s, tuple):
|
||||
cmd += ['-D' + s[0] + '=' + str(s[1])]
|
||||
else:
|
||||
cmd += ['-D' + s]
|
||||
# Build flags from board.json
|
||||
#if 'BOARD' in env:
|
||||
# cmd += [env.BoardConfig().get("build.extra_flags")]
|
||||
for s in build_flags['CPPDEFINES']:
|
||||
if isinstance(s, tuple):
|
||||
cmd += ['-D' + s[0] + '=' + str(s[1])]
|
||||
else:
|
||||
cmd += ['-D' + s]
|
||||
|
||||
cmd += ['-D__MARLIN_DEPS__ -w -dM -E -x c++']
|
||||
depcmd = cmd + [ filename ]
|
||||
cmd = ' '.join(depcmd)
|
||||
blab(cmd)
|
||||
try:
|
||||
define_list = subprocess.check_output(cmd, shell=True).splitlines()
|
||||
except:
|
||||
define_list = {}
|
||||
preprocessor_cache[filename] = define_list
|
||||
return define_list
|
||||
cmd += ['-D__MARLIN_DEPS__ -w -dM -E -x c++']
|
||||
depcmd = cmd + [ filename ]
|
||||
cmd = ' '.join(depcmd)
|
||||
blab(cmd)
|
||||
try:
|
||||
define_list = subprocess.check_output(cmd, shell=True).splitlines()
|
||||
except:
|
||||
define_list = {}
|
||||
preprocessor_cache[filename] = define_list
|
||||
return define_list
|
||||
|
||||
|
||||
################################################################################
|
||||
@@ -54,51 +54,41 @@ def run_preprocessor(env, fn=None):
|
||||
#
|
||||
def search_compiler(env):
|
||||
|
||||
ENV_BUILD_PATH = os.path.join(env['PROJECT_BUILD_DIR'], env['PIOENV'])
|
||||
GCC_PATH_CACHE = os.path.join(ENV_BUILD_PATH, ".gcc_path")
|
||||
from pathlib import Path, PurePath
|
||||
|
||||
try:
|
||||
filepath = env.GetProjectOption('custom_gcc')
|
||||
blab("Getting compiler from env")
|
||||
return filepath
|
||||
except:
|
||||
pass
|
||||
ENV_BUILD_PATH = Path(env['PROJECT_BUILD_DIR'], env['PIOENV'])
|
||||
GCC_PATH_CACHE = ENV_BUILD_PATH / ".gcc_path"
|
||||
|
||||
# Warning: The cached .gcc_path will obscure a newly-installed toolkit
|
||||
if not nocache and os.path.exists(GCC_PATH_CACHE):
|
||||
blab("Getting g++ path from cache")
|
||||
with open(GCC_PATH_CACHE, 'r') as f:
|
||||
return f.read()
|
||||
try:
|
||||
gccpath = env.GetProjectOption('custom_gcc')
|
||||
blab("Getting compiler from env")
|
||||
return gccpath
|
||||
except:
|
||||
pass
|
||||
|
||||
# Find the current platform compiler by searching the $PATH
|
||||
# which will be in a platformio toolchain bin folder
|
||||
path_regex = re.escape(env['PROJECT_PACKAGES_DIR'])
|
||||
gcc = "g++"
|
||||
if env['PLATFORM'] == 'win32':
|
||||
path_separator = ';'
|
||||
path_regex += r'.*\\bin'
|
||||
gcc += ".exe"
|
||||
else:
|
||||
path_separator = ':'
|
||||
path_regex += r'/.+/bin'
|
||||
# Warning: The cached .gcc_path will obscure a newly-installed toolkit
|
||||
if not nocache and GCC_PATH_CACHE.exists():
|
||||
blab("Getting g++ path from cache")
|
||||
return GCC_PATH_CACHE.read_text()
|
||||
|
||||
# Search for the compiler
|
||||
for pathdir in env['ENV']['PATH'].split(path_separator):
|
||||
if not re.search(path_regex, pathdir, re.IGNORECASE):
|
||||
continue
|
||||
for filepath in os.listdir(pathdir):
|
||||
if not filepath.endswith(gcc):
|
||||
continue
|
||||
# Use entire path to not rely on env PATH
|
||||
filepath = os.path.sep.join([pathdir, filepath])
|
||||
# Cache the g++ path to no search always
|
||||
if not nocache and os.path.exists(ENV_BUILD_PATH):
|
||||
blab("Caching g++ for current env")
|
||||
with open(GCC_PATH_CACHE, 'w+') as f:
|
||||
f.write(filepath)
|
||||
# Use any item in $PATH corresponding to a platformio toolchain bin folder
|
||||
path_separator = ':'
|
||||
gcc_exe = '*g++'
|
||||
if env['PLATFORM'] == 'win32':
|
||||
path_separator = ';'
|
||||
gcc_exe += ".exe"
|
||||
|
||||
return filepath
|
||||
# Search for the compiler in PATH
|
||||
for ppath in map(Path, env['ENV']['PATH'].split(path_separator)):
|
||||
if ppath.match(env['PROJECT_PACKAGES_DIR'] + "/**/bin"):
|
||||
for gpath in ppath.glob(gcc_exe):
|
||||
gccpath = str(gpath.resolve())
|
||||
# Cache the g++ path to no search always
|
||||
if not nocache and ENV_BUILD_PATH.exists():
|
||||
blab("Caching g++ for current env")
|
||||
GCC_PATH_CACHE.write_text(gccpath)
|
||||
return gccpath
|
||||
|
||||
filepath = env.get('CXX')
|
||||
blab("Couldn't find a compiler! Fallback to %s" % filepath)
|
||||
return filepath
|
||||
gccpath = env.get('CXX')
|
||||
blab("Couldn't find a compiler! Fallback to %s" % gccpath)
|
||||
return gccpath
|
||||
|
@@ -4,6 +4,6 @@
|
||||
#
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
from datetime import datetime
|
||||
Import("env")
|
||||
env['PROGNAME'] = datetime.now().strftime("firmware-%Y%m%d-%H%M%S")
|
||||
from datetime import datetime
|
||||
Import("env")
|
||||
env['PROGNAME'] = datetime.now().strftime("firmware-%Y%m%d-%H%M%S")
|
||||
|
421
buildroot/share/PlatformIO/scripts/schema.py
Normal file
421
buildroot/share/PlatformIO/scripts/schema.py
Normal file
@@ -0,0 +1,421 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# schema.py
|
||||
#
|
||||
# Used by signature.py via common-dependencies.py to generate a schema file during the PlatformIO build.
|
||||
# This script can also be run standalone from within the Marlin repo to generate all schema files.
|
||||
#
|
||||
import re,json
|
||||
from pathlib import Path
|
||||
|
||||
def extend_dict(d:dict, k:tuple):
|
||||
if len(k) >= 1 and k[0] not in d:
|
||||
d[k[0]] = {}
|
||||
if len(k) >= 2 and k[1] not in d[k[0]]:
|
||||
d[k[0]][k[1]] = {}
|
||||
if len(k) >= 3 and k[2] not in d[k[0]][k[1]]:
|
||||
d[k[0]][k[1]][k[2]] = {}
|
||||
|
||||
grouping_patterns = [
|
||||
re.compile(r'^([XYZIJKUVW]|[XYZ]2|Z[34]|E[0-7])$'),
|
||||
re.compile(r'^AXIS\d$'),
|
||||
re.compile(r'^(MIN|MAX)$'),
|
||||
re.compile(r'^[0-8]$'),
|
||||
re.compile(r'^HOTEND[0-7]$'),
|
||||
re.compile(r'^(HOTENDS|BED|PROBE|COOLER)$'),
|
||||
re.compile(r'^[XYZIJKUVW]M(IN|AX)$')
|
||||
]
|
||||
# If the indexed part of the option name matches a pattern
|
||||
# then add it to the dictionary.
|
||||
def find_grouping(gdict, filekey, sectkey, optkey, pindex):
|
||||
optparts = optkey.split('_')
|
||||
if 1 < len(optparts) > pindex:
|
||||
for patt in grouping_patterns:
|
||||
if patt.match(optparts[pindex]):
|
||||
subkey = optparts[pindex]
|
||||
modkey = '_'.join(optparts)
|
||||
optparts[pindex] = '*'
|
||||
wildkey = '_'.join(optparts)
|
||||
kkey = f'{filekey}|{sectkey}|{wildkey}'
|
||||
if kkey not in gdict: gdict[kkey] = []
|
||||
gdict[kkey].append((subkey, modkey))
|
||||
|
||||
# Build a list of potential groups. Only those with multiple items will be grouped.
|
||||
def group_options(schema):
|
||||
for pindex in range(10, -1, -1):
|
||||
found_groups = {}
|
||||
for filekey, f in schema.items():
|
||||
for sectkey, s in f.items():
|
||||
for optkey in s:
|
||||
find_grouping(found_groups, filekey, sectkey, optkey, pindex)
|
||||
|
||||
fkeys = [ k for k in found_groups.keys() ]
|
||||
for kkey in fkeys:
|
||||
items = found_groups[kkey]
|
||||
if len(items) > 1:
|
||||
f, s, w = kkey.split('|')
|
||||
extend_dict(schema, (f, s, w)) # Add wildcard group to schema
|
||||
for subkey, optkey in items: # Add all items to wildcard group
|
||||
schema[f][s][w][subkey] = schema[f][s][optkey] # Move non-wildcard item to wildcard group
|
||||
del schema[f][s][optkey]
|
||||
del found_groups[kkey]
|
||||
|
||||
# Extract all board names from boards.h
|
||||
def load_boards():
|
||||
bpath = Path("Marlin/src/core/boards.h")
|
||||
if bpath.is_file():
|
||||
with bpath.open() as bfile:
|
||||
boards = []
|
||||
for line in bfile:
|
||||
if line.startswith("#define BOARD_"):
|
||||
bname = line.split()[1]
|
||||
if bname != "BOARD_UNKNOWN": boards.append(bname)
|
||||
return "['" + "','".join(boards) + "']"
|
||||
return ''
|
||||
|
||||
#
|
||||
# Extract a schema from the current configuration files
|
||||
#
|
||||
def extract():
|
||||
# Load board names from boards.h
|
||||
boards = load_boards()
|
||||
|
||||
# Parsing states
|
||||
class Parse:
|
||||
NORMAL = 0 # No condition yet
|
||||
BLOCK_COMMENT = 1 # Looking for the end of the block comment
|
||||
EOL_COMMENT = 2 # EOL comment started, maybe add the next comment?
|
||||
GET_SENSORS = 3 # Gathering temperature sensor options
|
||||
ERROR = 9 # Syntax error
|
||||
|
||||
# List of files to process, with shorthand
|
||||
filekey = { 'Configuration.h':'basic', 'Configuration_adv.h':'advanced' }
|
||||
# A JSON object to store the data
|
||||
sch_out = { 'basic':{}, 'advanced':{} }
|
||||
# Regex for #define NAME [VALUE] [COMMENT] with sanitized line
|
||||
defgrep = re.compile(r'^(//)?\s*(#define)\s+([A-Za-z0-9_]+)\s*(.*?)\s*(//.+)?$')
|
||||
# Defines to ignore
|
||||
ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXAMPLES_DIR', 'CONFIG_EXPORT')
|
||||
# Start with unknown state
|
||||
state = Parse.NORMAL
|
||||
# Serial ID
|
||||
sid = 0
|
||||
# Loop through files and parse them line by line
|
||||
for fn, fk in filekey.items():
|
||||
with Path("Marlin", fn).open() as fileobj:
|
||||
section = 'none' # Current Settings section
|
||||
line_number = 0 # Counter for the line number of the file
|
||||
conditions = [] # Create a condition stack for the current file
|
||||
comment_buff = [] # A temporary buffer for comments
|
||||
options_json = '' # A buffer for the most recent options JSON found
|
||||
eol_options = False # The options came from end of line, so only apply once
|
||||
join_line = False # A flag that the line should be joined with the previous one
|
||||
line = '' # A line buffer to handle \ continuation
|
||||
last_added_ref = None # Reference to the last added item
|
||||
# Loop through the lines in the file
|
||||
for the_line in fileobj.readlines():
|
||||
line_number += 1
|
||||
|
||||
# Clean the line for easier parsing
|
||||
the_line = the_line.strip()
|
||||
|
||||
if join_line: # A previous line is being made longer
|
||||
line += (' ' if line else '') + the_line
|
||||
else: # Otherwise, start the line anew
|
||||
line, line_start = the_line, line_number
|
||||
|
||||
# If the resulting line ends with a \, don't process now.
|
||||
# Strip the end off. The next line will be joined with it.
|
||||
join_line = line.endswith("\\")
|
||||
if join_line:
|
||||
line = line[:-1].strip()
|
||||
continue
|
||||
else:
|
||||
line_end = line_number
|
||||
|
||||
defmatch = defgrep.match(line)
|
||||
|
||||
# Special handling for EOL comments after a #define.
|
||||
# At this point the #define is already digested and inserted,
|
||||
# so we have to extend it
|
||||
if state == Parse.EOL_COMMENT:
|
||||
# If the line is not a comment, we're done with the EOL comment
|
||||
if not defmatch and the_line.startswith('//'):
|
||||
comment_buff.append(the_line[2:].strip())
|
||||
else:
|
||||
last_added_ref['comment'] = ' '.join(comment_buff)
|
||||
comment_buff = []
|
||||
state = Parse.NORMAL
|
||||
|
||||
def use_comment(c, opt, sec, bufref):
|
||||
if c.startswith(':'): # If the comment starts with : then it has magic JSON
|
||||
d = c[1:].strip() # Strip the leading :
|
||||
cbr = c.rindex('}') if d.startswith('{') else c.rindex(']') if d.startswith('[') else 0
|
||||
if cbr:
|
||||
opt, cmt = c[1:cbr+1].strip(), c[cbr+1:].strip()
|
||||
if cmt != '': bufref.append(cmt)
|
||||
else:
|
||||
opt = c[1:].strip()
|
||||
elif c.startswith('@section'): # Start a new section
|
||||
sec = c[8:].strip()
|
||||
elif not c.startswith('========'):
|
||||
bufref.append(c)
|
||||
return opt, sec
|
||||
|
||||
# In a block comment, capture lines up to the end of the comment.
|
||||
# Assume nothing follows the comment closure.
|
||||
if state in (Parse.BLOCK_COMMENT, Parse.GET_SENSORS):
|
||||
endpos = line.find('*/')
|
||||
if endpos < 0:
|
||||
cline = line
|
||||
else:
|
||||
cline, line = line[:endpos].strip(), line[endpos+2:].strip()
|
||||
|
||||
# Temperature sensors are done
|
||||
if state == Parse.GET_SENSORS:
|
||||
options_json = f'[ {options_json[:-2]} ]'
|
||||
|
||||
state = Parse.NORMAL
|
||||
|
||||
# Strip the leading '*' from block comments
|
||||
if cline.startswith('*'): cline = cline[1:].strip()
|
||||
|
||||
# Collect temperature sensors
|
||||
if state == Parse.GET_SENSORS:
|
||||
sens = re.match(r'^(-?\d+)\s*:\s*(.+)$', cline)
|
||||
if sens:
|
||||
s2 = sens[2].replace("'","''")
|
||||
options_json += f"{sens[1]}:'{s2}', "
|
||||
|
||||
elif state == Parse.BLOCK_COMMENT:
|
||||
|
||||
# Look for temperature sensors
|
||||
if cline == "Temperature sensors available:":
|
||||
state, cline = Parse.GET_SENSORS, "Temperature Sensors"
|
||||
|
||||
options_json, section = use_comment(cline, options_json, section, comment_buff)
|
||||
|
||||
# For the normal state we're looking for any non-blank line
|
||||
elif state == Parse.NORMAL:
|
||||
# Skip a commented define when evaluating comment opening
|
||||
st = 2 if re.match(r'^//\s*#define', line) else 0
|
||||
cpos1 = line.find('/*') # Start a block comment on the line?
|
||||
cpos2 = line.find('//', st) # Start an end of line comment on the line?
|
||||
|
||||
# Only the first comment starter gets evaluated
|
||||
cpos = -1
|
||||
if cpos1 != -1 and (cpos1 < cpos2 or cpos2 == -1):
|
||||
cpos = cpos1
|
||||
comment_buff = []
|
||||
state = Parse.BLOCK_COMMENT
|
||||
eol_options = False
|
||||
|
||||
elif cpos2 != -1 and (cpos2 < cpos1 or cpos1 == -1):
|
||||
cpos = cpos2
|
||||
|
||||
# Comment after a define may be continued on the following lines
|
||||
if defmatch != None and cpos > 10:
|
||||
state = Parse.EOL_COMMENT
|
||||
comment_buff = []
|
||||
|
||||
# Process the start of a new comment
|
||||
if cpos != -1:
|
||||
cline, line = line[cpos+2:].strip(), line[:cpos].strip()
|
||||
|
||||
if state == Parse.BLOCK_COMMENT:
|
||||
# Strip leading '*' from block comments
|
||||
if cline.startswith('*'): cline = cline[1:].strip()
|
||||
else:
|
||||
# Expire end-of-line options after first use
|
||||
if cline.startswith(':'): eol_options = True
|
||||
|
||||
# Buffer a non-empty comment start
|
||||
if cline != '':
|
||||
options_json, section = use_comment(cline, options_json, section, comment_buff)
|
||||
|
||||
# If the line has nothing before the comment, go to the next line
|
||||
if line == '':
|
||||
options_json = ''
|
||||
continue
|
||||
|
||||
# Parenthesize the given expression if needed
|
||||
def atomize(s):
|
||||
if s == '' \
|
||||
or re.match(r'^[A-Za-z0-9_]*(\([^)]+\))?$', s) \
|
||||
or re.match(r'^[A-Za-z0-9_]+ == \d+?$', s):
|
||||
return s
|
||||
return f'({s})'
|
||||
|
||||
#
|
||||
# The conditions stack is an array containing condition-arrays.
|
||||
# Each condition-array lists the conditions for the current block.
|
||||
# IF/N/DEF adds a new condition-array to the stack.
|
||||
# ELSE/ELIF/ENDIF pop the condition-array.
|
||||
# ELSE/ELIF negate the last item in the popped condition-array.
|
||||
# ELIF adds a new condition to the end of the array.
|
||||
# ELSE/ELIF re-push the condition-array.
|
||||
#
|
||||
cparts = line.split()
|
||||
iselif, iselse = cparts[0] == '#elif', cparts[0] == '#else'
|
||||
if iselif or iselse or cparts[0] == '#endif':
|
||||
if len(conditions) == 0:
|
||||
raise Exception(f'no #if block at line {line_number}')
|
||||
|
||||
# Pop the last condition-array from the stack
|
||||
prev = conditions.pop()
|
||||
|
||||
if iselif or iselse:
|
||||
prev[-1] = '!' + prev[-1] # Invert the last condition
|
||||
if iselif: prev.append(atomize(line[5:].strip()))
|
||||
conditions.append(prev)
|
||||
|
||||
elif cparts[0] == '#if':
|
||||
conditions.append([ atomize(line[3:].strip()) ])
|
||||
elif cparts[0] == '#ifdef':
|
||||
conditions.append([ f'defined({line[6:].strip()})' ])
|
||||
elif cparts[0] == '#ifndef':
|
||||
conditions.append([ f'!defined({line[7:].strip()})' ])
|
||||
|
||||
# Handle a complete #define line
|
||||
elif defmatch != None:
|
||||
|
||||
# Get the match groups into vars
|
||||
enabled, define_name, val = defmatch[1] == None, defmatch[3], defmatch[4]
|
||||
|
||||
# Increment the serial ID
|
||||
sid += 1
|
||||
|
||||
# Create a new dictionary for the current #define
|
||||
define_info = {
|
||||
'section': section,
|
||||
'name': define_name,
|
||||
'enabled': enabled,
|
||||
'line': line_start,
|
||||
'sid': sid
|
||||
}
|
||||
|
||||
# Type is based on the value
|
||||
if val == '':
|
||||
value_type = 'switch'
|
||||
elif re.match(r'^(true|false)$', val):
|
||||
value_type = 'bool'
|
||||
val = val == 'true'
|
||||
elif re.match(r'^[-+]?\s*\d+$', val):
|
||||
value_type = 'int'
|
||||
val = int(val)
|
||||
elif re.match(r'[-+]?\s*(\d+\.|\d*\.\d+)([eE][-+]?\d+)?[fF]?', val):
|
||||
value_type = 'float'
|
||||
val = float(val.replace('f',''))
|
||||
else:
|
||||
value_type = 'string' if val[0] == '"' \
|
||||
else 'char' if val[0] == "'" \
|
||||
else 'state' if re.match(r'^(LOW|HIGH)$', val) \
|
||||
else 'enum' if re.match(r'^[A-Za-z0-9_]{3,}$', val) \
|
||||
else 'int[]' if re.match(r'^{(\s*[-+]?\s*\d+\s*(,\s*)?)+}$', val) \
|
||||
else 'float[]' if re.match(r'^{(\s*[-+]?\s*(\d+\.|\d*\.\d+)([eE][-+]?\d+)?[fF]?\s*(,\s*)?)+}$', val) \
|
||||
else 'array' if val[0] == '{' \
|
||||
else ''
|
||||
|
||||
if val != '': define_info['value'] = val
|
||||
if value_type != '': define_info['type'] = value_type
|
||||
|
||||
# Join up accumulated conditions with &&
|
||||
if conditions: define_info['requires'] = ' && '.join(sum(conditions, []))
|
||||
|
||||
# If the comment_buff is not empty, add the comment to the info
|
||||
if comment_buff:
|
||||
full_comment = '\n'.join(comment_buff)
|
||||
|
||||
# An EOL comment will be added later
|
||||
# The handling could go here instead of above
|
||||
if state == Parse.EOL_COMMENT:
|
||||
define_info['comment'] = ''
|
||||
else:
|
||||
define_info['comment'] = full_comment
|
||||
comment_buff = []
|
||||
|
||||
# If the comment specifies units, add that to the info
|
||||
units = re.match(r'^\(([^)]+)\)', full_comment)
|
||||
if units:
|
||||
units = units[1]
|
||||
if units == 's' or units == 'sec': units = 'seconds'
|
||||
define_info['units'] = units
|
||||
|
||||
# Set the options for the current #define
|
||||
if define_name == "MOTHERBOARD" and boards != '':
|
||||
define_info['options'] = boards
|
||||
elif options_json != '':
|
||||
define_info['options'] = options_json
|
||||
if eol_options: options_json = ''
|
||||
|
||||
# Create section dict if it doesn't exist yet
|
||||
if section not in sch_out[fk]: sch_out[fk][section] = {}
|
||||
|
||||
# If define has already been seen...
|
||||
if define_name in sch_out[fk][section]:
|
||||
info = sch_out[fk][section][define_name]
|
||||
if isinstance(info, dict): info = [ info ] # Convert a single dict into a list
|
||||
info.append(define_info) # Add to the list
|
||||
else:
|
||||
# Add the define dict with name as key
|
||||
sch_out[fk][section][define_name] = define_info
|
||||
|
||||
if state == Parse.EOL_COMMENT:
|
||||
last_added_ref = define_info
|
||||
|
||||
return sch_out
|
||||
|
||||
def dump_json(schema:dict, jpath:Path):
|
||||
with jpath.open('w') as jfile:
|
||||
json.dump(schema, jfile, ensure_ascii=False, indent=2)
|
||||
|
||||
def dump_yaml(schema:dict, ypath:Path):
|
||||
import yaml
|
||||
with ypath.open('w') as yfile:
|
||||
yaml.dump(schema, yfile, default_flow_style=False, width=120, indent=2)
|
||||
|
||||
def main():
|
||||
try:
|
||||
schema = extract()
|
||||
except Exception as exc:
|
||||
print("Error: " + str(exc))
|
||||
schema = None
|
||||
|
||||
if schema:
|
||||
|
||||
# Get the first command line argument
|
||||
import sys
|
||||
if len(sys.argv) > 1:
|
||||
arg = sys.argv[1]
|
||||
else:
|
||||
arg = 'some'
|
||||
|
||||
# JSON schema
|
||||
if arg in ['some', 'json', 'jsons']:
|
||||
print("Generating JSON ...")
|
||||
dump_json(schema, Path('schema.json'))
|
||||
|
||||
# JSON schema (wildcard names)
|
||||
if arg in ['group', 'jsons']:
|
||||
group_options(schema)
|
||||
dump_json(schema, Path('schema_grouped.json'))
|
||||
|
||||
# YAML
|
||||
if arg in ['some', 'yml', 'yaml']:
|
||||
try:
|
||||
import yaml
|
||||
except ImportError:
|
||||
print("Installing YAML module ...")
|
||||
import subprocess
|
||||
try:
|
||||
subprocess.run(['python3', '-m', 'pip', 'install', 'pyyaml'])
|
||||
import yaml
|
||||
except:
|
||||
print("Failed to install YAML module")
|
||||
return
|
||||
|
||||
print("Generating YML ...")
|
||||
dump_yaml(schema, Path('schema.yml'))
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@@ -1,7 +1,11 @@
|
||||
#
|
||||
# signature.py
|
||||
#
|
||||
import os,subprocess,re,json,hashlib
|
||||
import schema
|
||||
|
||||
import subprocess,re,json,hashlib
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
#
|
||||
# Return all macro names in a header as an array, so we can take
|
||||
@@ -12,32 +16,32 @@ import os,subprocess,re,json,hashlib
|
||||
# resulting config.ini to produce more exact configuration files.
|
||||
#
|
||||
def extract_defines(filepath):
|
||||
f = open(filepath, encoding="utf8").read().split("\n")
|
||||
a = []
|
||||
for line in f:
|
||||
sline = line.strip()
|
||||
if sline[:7] == "#define":
|
||||
# Extract the key here (we don't care about the value)
|
||||
kv = sline[8:].strip().split()
|
||||
a.append(kv[0])
|
||||
return a
|
||||
f = open(filepath, encoding="utf8").read().split("\n")
|
||||
a = []
|
||||
for line in f:
|
||||
sline = line.strip()
|
||||
if sline[:7] == "#define":
|
||||
# Extract the key here (we don't care about the value)
|
||||
kv = sline[8:].strip().split()
|
||||
a.append(kv[0])
|
||||
return a
|
||||
|
||||
# Compute the SHA256 hash of a file
|
||||
def get_file_sha256sum(filepath):
|
||||
sha256_hash = hashlib.sha256()
|
||||
with open(filepath,"rb") as f:
|
||||
# Read and update hash string value in blocks of 4K
|
||||
for byte_block in iter(lambda: f.read(4096),b""):
|
||||
sha256_hash.update(byte_block)
|
||||
return sha256_hash.hexdigest()
|
||||
sha256_hash = hashlib.sha256()
|
||||
with open(filepath,"rb") as f:
|
||||
# Read and update hash string value in blocks of 4K
|
||||
for byte_block in iter(lambda: f.read(4096),b""):
|
||||
sha256_hash.update(byte_block)
|
||||
return sha256_hash.hexdigest()
|
||||
|
||||
#
|
||||
# Compress a JSON file into a zip file
|
||||
#
|
||||
import zipfile
|
||||
def compress_file(filepath, outputbase):
|
||||
with zipfile.ZipFile(outputbase + '.zip', 'w', compression=zipfile.ZIP_BZIP2, compresslevel=9) as zipf:
|
||||
zipf.write(filepath, compress_type=zipfile.ZIP_BZIP2, compresslevel=9)
|
||||
def compress_file(filepath, outpath):
|
||||
with zipfile.ZipFile(outpath, 'w', compression=zipfile.ZIP_BZIP2, compresslevel=9) as zipf:
|
||||
zipf.write(filepath, compress_type=zipfile.ZIP_BZIP2, compresslevel=9)
|
||||
|
||||
#
|
||||
# Compute the build signature. The idea is to extract all defines in the configuration headers
|
||||
@@ -45,139 +49,228 @@ def compress_file(filepath, outputbase):
|
||||
# We can reverse the signature to get a 1:1 equivalent configuration file
|
||||
#
|
||||
def compute_build_signature(env):
|
||||
if 'BUILD_SIGNATURE' in env:
|
||||
return
|
||||
if 'BUILD_SIGNATURE' in env:
|
||||
return
|
||||
|
||||
# Definitions from these files will be kept
|
||||
files_to_keep = [ 'Marlin/Configuration.h', 'Marlin/Configuration_adv.h' ]
|
||||
# Definitions from these files will be kept
|
||||
files_to_keep = [ 'Marlin/Configuration.h', 'Marlin/Configuration_adv.h' ]
|
||||
|
||||
build_dir = os.path.join(env['PROJECT_BUILD_DIR'], env['PIOENV'])
|
||||
build_path = Path(env['PROJECT_BUILD_DIR'], env['PIOENV'])
|
||||
|
||||
# Check if we can skip processing
|
||||
hashes = ''
|
||||
for header in files_to_keep:
|
||||
hashes += get_file_sha256sum(header)[0:10]
|
||||
# Check if we can skip processing
|
||||
hashes = ''
|
||||
for header in files_to_keep:
|
||||
hashes += get_file_sha256sum(header)[0:10]
|
||||
|
||||
marlin_json = os.path.join(build_dir, 'marlin_config.json')
|
||||
marlin_zip = os.path.join(build_dir, 'mc')
|
||||
marlin_json = build_path / 'marlin_config.json'
|
||||
marlin_zip = build_path / 'mc.zip'
|
||||
|
||||
# Read existing config file
|
||||
try:
|
||||
with open(marlin_json, 'r') as infile:
|
||||
conf = json.load(infile)
|
||||
if conf['__INITIAL_HASH'] == hashes:
|
||||
# Same configuration, skip recomputing the building signature
|
||||
compress_file(marlin_json, marlin_zip)
|
||||
return
|
||||
except:
|
||||
pass
|
||||
# Read existing config file
|
||||
try:
|
||||
with marlin_json.open() as infile:
|
||||
conf = json.load(infile)
|
||||
if conf['__INITIAL_HASH'] == hashes:
|
||||
# Same configuration, skip recomputing the building signature
|
||||
compress_file(marlin_json, marlin_zip)
|
||||
return
|
||||
except:
|
||||
pass
|
||||
|
||||
# Get enabled config options based on preprocessor
|
||||
from preprocessor import run_preprocessor
|
||||
complete_cfg = run_preprocessor(env)
|
||||
# Get enabled config options based on preprocessor
|
||||
from preprocessor import run_preprocessor
|
||||
complete_cfg = run_preprocessor(env)
|
||||
|
||||
# Dumb #define extraction from the configuration files
|
||||
conf_defines = {}
|
||||
all_defines = []
|
||||
for header in files_to_keep:
|
||||
defines = extract_defines(header)
|
||||
# To filter only the define we want
|
||||
all_defines += defines
|
||||
# To remember from which file it cames from
|
||||
conf_defines[header.split('/')[-1]] = defines
|
||||
# Dumb #define extraction from the configuration files
|
||||
conf_defines = {}
|
||||
all_defines = []
|
||||
for header in files_to_keep:
|
||||
defines = extract_defines(header)
|
||||
# To filter only the define we want
|
||||
all_defines += defines
|
||||
# To remember from which file it cames from
|
||||
conf_defines[header.split('/')[-1]] = defines
|
||||
|
||||
r = re.compile(r"\(+(\s*-*\s*_.*)\)+")
|
||||
r = re.compile(r"\(+(\s*-*\s*_.*)\)+")
|
||||
|
||||
# First step is to collect all valid macros
|
||||
defines = {}
|
||||
for line in complete_cfg:
|
||||
# First step is to collect all valid macros
|
||||
defines = {}
|
||||
for line in complete_cfg:
|
||||
|
||||
# Split the define from the value
|
||||
key_val = line[8:].strip().decode().split(' ')
|
||||
key, value = key_val[0], ' '.join(key_val[1:])
|
||||
# Split the define from the value
|
||||
key_val = line[8:].strip().decode().split(' ')
|
||||
key, value = key_val[0], ' '.join(key_val[1:])
|
||||
|
||||
# Ignore values starting with two underscore, since it's low level
|
||||
if len(key) > 2 and key[0:2] == "__" :
|
||||
continue
|
||||
# Ignore values containing a parenthesis (likely a function macro)
|
||||
if '(' in key and ')' in key:
|
||||
continue
|
||||
# Ignore values starting with two underscore, since it's low level
|
||||
if len(key) > 2 and key[0:2] == "__" :
|
||||
continue
|
||||
# Ignore values containing a parenthesis (likely a function macro)
|
||||
if '(' in key and ')' in key:
|
||||
continue
|
||||
|
||||
# Then filter dumb values
|
||||
if r.match(value):
|
||||
continue
|
||||
# Then filter dumb values
|
||||
if r.match(value):
|
||||
continue
|
||||
|
||||
defines[key] = value if len(value) else ""
|
||||
defines[key] = value if len(value) else ""
|
||||
|
||||
if not 'CONFIGURATION_EMBEDDING' in defines:
|
||||
return
|
||||
#
|
||||
# Continue to gather data for CONFIGURATION_EMBEDDING or CONFIG_EXPORT
|
||||
#
|
||||
if not ('CONFIGURATION_EMBEDDING' in defines or 'CONFIG_EXPORT' in defines):
|
||||
return
|
||||
|
||||
# Second step is to filter useless macro
|
||||
resolved_defines = {}
|
||||
for key in defines:
|
||||
# Remove all boards now
|
||||
if key.startswith("BOARD_") and key != "BOARD_INFO_NAME":
|
||||
continue
|
||||
# Remove all keys ending by "_NAME" as it does not make a difference to the configuration
|
||||
if key.endswith("_NAME") and key != "CUSTOM_MACHINE_NAME":
|
||||
continue
|
||||
# Remove all keys ending by "_T_DECLARED" as it's a copy of extraneous system stuff
|
||||
if key.endswith("_T_DECLARED"):
|
||||
continue
|
||||
# Remove keys that are not in the #define list in the Configuration list
|
||||
if key not in all_defines + [ 'DETAILED_BUILD_VERSION', 'STRING_DISTRIBUTION_DATE' ]:
|
||||
continue
|
||||
# Second step is to filter useless macro
|
||||
resolved_defines = {}
|
||||
for key in defines:
|
||||
# Remove all boards now
|
||||
if key.startswith("BOARD_") and key != "BOARD_INFO_NAME":
|
||||
continue
|
||||
# Remove all keys ending by "_NAME" as it does not make a difference to the configuration
|
||||
if key.endswith("_NAME") and key != "CUSTOM_MACHINE_NAME":
|
||||
continue
|
||||
# Remove all keys ending by "_T_DECLARED" as it's a copy of extraneous system stuff
|
||||
if key.endswith("_T_DECLARED"):
|
||||
continue
|
||||
# Remove keys that are not in the #define list in the Configuration list
|
||||
if key not in all_defines + [ 'DETAILED_BUILD_VERSION', 'STRING_DISTRIBUTION_DATE' ]:
|
||||
continue
|
||||
|
||||
# Don't be that smart guy here
|
||||
resolved_defines[key] = defines[key]
|
||||
# Don't be that smart guy here
|
||||
resolved_defines[key] = defines[key]
|
||||
|
||||
# Generate a build signature now
|
||||
# We are making an object that's a bit more complex than a basic dictionary here
|
||||
data = {}
|
||||
data['__INITIAL_HASH'] = hashes
|
||||
# First create a key for each header here
|
||||
for header in conf_defines:
|
||||
data[header] = {}
|
||||
# Generate a build signature now
|
||||
# We are making an object that's a bit more complex than a basic dictionary here
|
||||
data = {}
|
||||
data['__INITIAL_HASH'] = hashes
|
||||
# First create a key for each header here
|
||||
for header in conf_defines:
|
||||
data[header] = {}
|
||||
|
||||
# Then populate the object where each key is going to (that's a O(N^2) algorithm here...)
|
||||
for key in resolved_defines:
|
||||
for header in conf_defines:
|
||||
if key in conf_defines[header]:
|
||||
data[header][key] = resolved_defines[key]
|
||||
# Then populate the object where each key is going to (that's a O(N^2) algorithm here...)
|
||||
for key in resolved_defines:
|
||||
for header in conf_defines:
|
||||
if key in conf_defines[header]:
|
||||
data[header][key] = resolved_defines[key]
|
||||
|
||||
# Append the source code version and date
|
||||
data['VERSION'] = {}
|
||||
data['VERSION']['DETAILED_BUILD_VERSION'] = resolved_defines['DETAILED_BUILD_VERSION']
|
||||
data['VERSION']['STRING_DISTRIBUTION_DATE'] = resolved_defines['STRING_DISTRIBUTION_DATE']
|
||||
try:
|
||||
curver = subprocess.check_output(["git", "describe", "--match=NeVeRmAtCh", "--always"]).strip()
|
||||
data['VERSION']['GIT_REF'] = curver.decode()
|
||||
except:
|
||||
pass
|
||||
# Every python needs this toy
|
||||
def tryint(key):
|
||||
try:
|
||||
return int(defines[key])
|
||||
except:
|
||||
return 0
|
||||
|
||||
#
|
||||
# Produce a JSON file for CONFIGURATION_EMBEDDING or CONFIG_DUMP > 0
|
||||
#
|
||||
with open(marlin_json, 'w') as outfile:
|
||||
json.dump(data, outfile, separators=(',', ':'))
|
||||
config_dump = tryint('CONFIG_EXPORT')
|
||||
|
||||
# Compress the JSON file as much as we can
|
||||
compress_file(marlin_json, marlin_zip)
|
||||
#
|
||||
# Produce an INI file if CONFIG_EXPORT == 2
|
||||
#
|
||||
if config_dump == 2:
|
||||
print("Generating config.ini ...")
|
||||
config_ini = build_path / 'config.ini'
|
||||
with config_ini.open('w') as outfile:
|
||||
ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXPORT')
|
||||
filegrp = { 'Configuration.h':'config:basic', 'Configuration_adv.h':'config:advanced' }
|
||||
vers = defines["CONFIGURATION_H_VERSION"]
|
||||
dt_string = datetime.now().strftime("%Y-%m-%d at %H:%M:%S")
|
||||
ini_fmt = '{0:40}{1}\n'
|
||||
outfile.write(
|
||||
'#\n'
|
||||
+ '# Marlin Firmware\n'
|
||||
+ '# config.ini - Options to apply before the build\n'
|
||||
+ '#\n'
|
||||
+ f'# Generated by Marlin build on {dt_string}\n'
|
||||
+ '#\n'
|
||||
+ '\n'
|
||||
+ '[config:base]\n'
|
||||
+ ini_fmt.format('ini_use_config', ' = all')
|
||||
+ ini_fmt.format('ini_config_vers', f' = {vers}')
|
||||
)
|
||||
# Loop through the data array of arrays
|
||||
for header in data:
|
||||
if header.startswith('__'):
|
||||
continue
|
||||
outfile.write('\n[' + filegrp[header] + ']\n')
|
||||
for key in sorted(data[header]):
|
||||
if key not in ignore:
|
||||
val = 'on' if data[header][key] == '' else data[header][key]
|
||||
outfile.write(ini_fmt.format(key.lower(), ' = ' + val))
|
||||
|
||||
# Generate a C source file for storing this array
|
||||
with open('Marlin/src/mczip.h','wb') as result_file:
|
||||
result_file.write(
|
||||
b'#ifndef NO_CONFIGURATION_EMBEDDING_WARNING\n'
|
||||
+ b' #warning "Generated file \'mc.zip\' is embedded (Define NO_CONFIGURATION_EMBEDDING_WARNING to suppress this warning.)"\n'
|
||||
+ b'#endif\n'
|
||||
+ b'const unsigned char mc_zip[] PROGMEM = {\n '
|
||||
)
|
||||
count = 0
|
||||
for b in open(os.path.join(build_dir, 'mc.zip'), 'rb').read():
|
||||
result_file.write(b' 0x%02X,' % b)
|
||||
count += 1
|
||||
if (count % 16 == 0):
|
||||
result_file.write(b'\n ')
|
||||
if (count % 16):
|
||||
result_file.write(b'\n')
|
||||
result_file.write(b'};\n')
|
||||
#
|
||||
# Produce a schema.json file if CONFIG_EXPORT == 3
|
||||
#
|
||||
if config_dump >= 3:
|
||||
try:
|
||||
conf_schema = schema.extract()
|
||||
except Exception as exc:
|
||||
print("Error: " + str(exc))
|
||||
conf_schema = None
|
||||
|
||||
if conf_schema:
|
||||
#
|
||||
# Produce a schema.json file if CONFIG_EXPORT == 3
|
||||
#
|
||||
if config_dump in (3, 13):
|
||||
print("Generating schema.json ...")
|
||||
schema.dump_json(conf_schema, build_path / 'schema.json')
|
||||
if config_dump == 13:
|
||||
schema.group_options(conf_schema)
|
||||
schema.dump_json(conf_schema, build_path / 'schema_grouped.json')
|
||||
|
||||
#
|
||||
# Produce a schema.yml file if CONFIG_EXPORT == 4
|
||||
#
|
||||
elif config_dump == 4:
|
||||
print("Generating schema.yml ...")
|
||||
try:
|
||||
import yaml
|
||||
except ImportError:
|
||||
env.Execute(env.VerboseAction(
|
||||
'$PYTHONEXE -m pip install "pyyaml"',
|
||||
"Installing YAML for schema.yml export",
|
||||
))
|
||||
import yaml
|
||||
schema.dump_yaml(conf_schema, build_path / 'schema.yml')
|
||||
|
||||
# Append the source code version and date
|
||||
data['VERSION'] = {}
|
||||
data['VERSION']['DETAILED_BUILD_VERSION'] = resolved_defines['DETAILED_BUILD_VERSION']
|
||||
data['VERSION']['STRING_DISTRIBUTION_DATE'] = resolved_defines['STRING_DISTRIBUTION_DATE']
|
||||
try:
|
||||
curver = subprocess.check_output(["git", "describe", "--match=NeVeRmAtCh", "--always"]).strip()
|
||||
data['VERSION']['GIT_REF'] = curver.decode()
|
||||
except:
|
||||
pass
|
||||
|
||||
#
|
||||
# Produce a JSON file for CONFIGURATION_EMBEDDING or CONFIG_EXPORT == 1
|
||||
#
|
||||
if config_dump == 1 or 'CONFIGURATION_EMBEDDING' in defines:
|
||||
with marlin_json.open('w') as outfile:
|
||||
json.dump(data, outfile, separators=(',', ':'))
|
||||
|
||||
#
|
||||
# The rest only applies to CONFIGURATION_EMBEDDING
|
||||
#
|
||||
if not 'CONFIGURATION_EMBEDDING' in defines:
|
||||
return
|
||||
|
||||
# Compress the JSON file as much as we can
|
||||
compress_file(marlin_json, marlin_zip)
|
||||
|
||||
# Generate a C source file for storing this array
|
||||
with open('Marlin/src/mczip.h','wb') as result_file:
|
||||
result_file.write(
|
||||
b'#ifndef NO_CONFIGURATION_EMBEDDING_WARNING\n'
|
||||
+ b' #warning "Generated file \'mc.zip\' is embedded (Define NO_CONFIGURATION_EMBEDDING_WARNING to suppress this warning.)"\n'
|
||||
+ b'#endif\n'
|
||||
+ b'const unsigned char mc_zip[] PROGMEM = {\n '
|
||||
)
|
||||
count = 0
|
||||
for b in (build_path / 'mc.zip').open('rb').read():
|
||||
result_file.write(b' 0x%02X,' % b)
|
||||
count += 1
|
||||
if count % 16 == 0:
|
||||
result_file.write(b'\n ')
|
||||
if count % 16:
|
||||
result_file.write(b'\n')
|
||||
result_file.write(b'};\n')
|
||||
|
@@ -2,51 +2,52 @@
|
||||
# simulator.py
|
||||
# PlatformIO pre: script for simulator builds
|
||||
#
|
||||
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
# Get the environment thus far for the build
|
||||
Import("env")
|
||||
# Get the environment thus far for the build
|
||||
Import("env")
|
||||
|
||||
#print(env.Dump())
|
||||
#print(env.Dump())
|
||||
|
||||
#
|
||||
# Give the binary a distinctive name
|
||||
#
|
||||
#
|
||||
# Give the binary a distinctive name
|
||||
#
|
||||
|
||||
env['PROGNAME'] = "MarlinSimulator"
|
||||
env['PROGNAME'] = "MarlinSimulator"
|
||||
|
||||
#
|
||||
# If Xcode is installed add the path to its Frameworks folder,
|
||||
# or if Mesa is installed try to use its GL/gl.h.
|
||||
#
|
||||
#
|
||||
# If Xcode is installed add the path to its Frameworks folder,
|
||||
# or if Mesa is installed try to use its GL/gl.h.
|
||||
#
|
||||
|
||||
import sys
|
||||
if sys.platform == 'darwin':
|
||||
import sys
|
||||
if sys.platform == 'darwin':
|
||||
|
||||
#
|
||||
# Silence half of the ranlib warnings. (No equivalent for 'ARFLAGS')
|
||||
#
|
||||
env['RANLIBFLAGS'] += [ "-no_warning_for_no_symbols" ]
|
||||
#
|
||||
# Silence half of the ranlib warnings. (No equivalent for 'ARFLAGS')
|
||||
#
|
||||
env['RANLIBFLAGS'] += [ "-no_warning_for_no_symbols" ]
|
||||
|
||||
# Default paths for Xcode and a lucky GL/gl.h dropped by Mesa
|
||||
xcode_path = "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks"
|
||||
mesa_path = "/opt/local/include/GL/gl.h"
|
||||
# Default paths for Xcode and a lucky GL/gl.h dropped by Mesa
|
||||
xcode_path = "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/Frameworks"
|
||||
mesa_path = "/opt/local/include/GL/gl.h"
|
||||
|
||||
import os.path
|
||||
import os.path
|
||||
|
||||
if os.path.exists(xcode_path):
|
||||
if os.path.exists(xcode_path):
|
||||
|
||||
env['BUILD_FLAGS'] += [ "-F" + xcode_path ]
|
||||
print("Using OpenGL framework headers from Xcode.app")
|
||||
env['BUILD_FLAGS'] += [ "-F" + xcode_path ]
|
||||
print("Using OpenGL framework headers from Xcode.app")
|
||||
|
||||
elif os.path.exists(mesa_path):
|
||||
elif os.path.exists(mesa_path):
|
||||
|
||||
env['BUILD_FLAGS'] += [ '-D__MESA__' ]
|
||||
print("Using OpenGL header from", mesa_path)
|
||||
env['BUILD_FLAGS'] += [ '-D__MESA__' ]
|
||||
print("Using OpenGL header from", mesa_path)
|
||||
|
||||
else:
|
||||
else:
|
||||
|
||||
print("\n\nNo OpenGL headers found. Install Xcode for matching headers, or use 'sudo port install mesa' to get a GL/gl.h.\n\n")
|
||||
print("\n\nNo OpenGL headers found. Install Xcode for matching headers, or use 'sudo port install mesa' to get a GL/gl.h.\n\n")
|
||||
|
||||
# Break out of the PIO build immediately
|
||||
sys.exit(1)
|
||||
# Break out of the PIO build immediately
|
||||
sys.exit(1)
|
||||
|
@@ -3,59 +3,59 @@
|
||||
#
|
||||
import pioutil
|
||||
if pioutil.is_pio_build():
|
||||
Import("env")
|
||||
Import("env")
|
||||
|
||||
# Get a build flag's value or None
|
||||
def getBuildFlagValue(name):
|
||||
for flag in build_flags:
|
||||
if isinstance(flag, list) and flag[0] == name:
|
||||
return flag[1]
|
||||
# Get a build flag's value or None
|
||||
def getBuildFlagValue(name):
|
||||
for flag in build_flags:
|
||||
if isinstance(flag, list) and flag[0] == name:
|
||||
return flag[1]
|
||||
|
||||
return None
|
||||
return None
|
||||
|
||||
# Get an overriding buffer size for RX or TX from the build flags
|
||||
def getInternalSize(side):
|
||||
return getBuildFlagValue(f"MF_{side}_BUFFER_SIZE") or \
|
||||
getBuildFlagValue(f"SERIAL_{side}_BUFFER_SIZE") or \
|
||||
getBuildFlagValue(f"USART_{side}_BUF_SIZE")
|
||||
# Get an overriding buffer size for RX or TX from the build flags
|
||||
def getInternalSize(side):
|
||||
return getBuildFlagValue(f"MF_{side}_BUFFER_SIZE") or \
|
||||
getBuildFlagValue(f"SERIAL_{side}_BUFFER_SIZE") or \
|
||||
getBuildFlagValue(f"USART_{side}_BUF_SIZE")
|
||||
|
||||
# Get the largest defined buffer size for RX or TX
|
||||
def getBufferSize(side, default):
|
||||
# Get a build flag value or fall back to the given default
|
||||
internal = int(getInternalSize(side) or default)
|
||||
flag = side + "_BUFFER_SIZE"
|
||||
# Return the largest value
|
||||
return max(int(mf[flag]), internal) if flag in mf else internal
|
||||
# Get the largest defined buffer size for RX or TX
|
||||
def getBufferSize(side, default):
|
||||
# Get a build flag value or fall back to the given default
|
||||
internal = int(getInternalSize(side) or default)
|
||||
flag = side + "_BUFFER_SIZE"
|
||||
# Return the largest value
|
||||
return max(int(mf[flag]), internal) if flag in mf else internal
|
||||
|
||||
# Add a build flag if it's not already defined
|
||||
def tryAddFlag(name, value):
|
||||
if getBuildFlagValue(name) is None:
|
||||
env.Append(BUILD_FLAGS=[f"-D{name}={value}"])
|
||||
# Add a build flag if it's not already defined
|
||||
def tryAddFlag(name, value):
|
||||
if getBuildFlagValue(name) is None:
|
||||
env.Append(BUILD_FLAGS=[f"-D{name}={value}"])
|
||||
|
||||
# Marlin uses the `RX_BUFFER_SIZE` \ `TX_BUFFER_SIZE` options to
|
||||
# configure buffer sizes for receiving \ transmitting serial data.
|
||||
# Stm32duino uses another set of defines for the same purpose, so this
|
||||
# script gets the values from the configuration and uses them to define
|
||||
# `SERIAL_RX_BUFFER_SIZE` and `SERIAL_TX_BUFFER_SIZE` as global build
|
||||
# flags so they are available for use by the platform.
|
||||
#
|
||||
# The script will set the value as the default one (64 bytes)
|
||||
# or the user-configured one, whichever is higher.
|
||||
#
|
||||
# Marlin's default buffer sizes are 128 for RX and 32 for TX.
|
||||
# The highest value is taken (128/64).
|
||||
#
|
||||
# If MF_*_BUFFER_SIZE, SERIAL_*_BUFFER_SIZE, USART_*_BUF_SIZE, are
|
||||
# defined, the first of these values will be used as the minimum.
|
||||
build_flags = env.ParseFlags(env.get('BUILD_FLAGS'))["CPPDEFINES"]
|
||||
mf = env["MARLIN_FEATURES"]
|
||||
# Marlin uses the `RX_BUFFER_SIZE` \ `TX_BUFFER_SIZE` options to
|
||||
# configure buffer sizes for receiving \ transmitting serial data.
|
||||
# Stm32duino uses another set of defines for the same purpose, so this
|
||||
# script gets the values from the configuration and uses them to define
|
||||
# `SERIAL_RX_BUFFER_SIZE` and `SERIAL_TX_BUFFER_SIZE` as global build
|
||||
# flags so they are available for use by the platform.
|
||||
#
|
||||
# The script will set the value as the default one (64 bytes)
|
||||
# or the user-configured one, whichever is higher.
|
||||
#
|
||||
# Marlin's default buffer sizes are 128 for RX and 32 for TX.
|
||||
# The highest value is taken (128/64).
|
||||
#
|
||||
# If MF_*_BUFFER_SIZE, SERIAL_*_BUFFER_SIZE, USART_*_BUF_SIZE, are
|
||||
# defined, the first of these values will be used as the minimum.
|
||||
build_flags = env.ParseFlags(env.get('BUILD_FLAGS'))["CPPDEFINES"]
|
||||
mf = env["MARLIN_FEATURES"]
|
||||
|
||||
# Get the largest defined buffer sizes for RX or TX, using defaults for undefined
|
||||
rxBuf = getBufferSize("RX", 128)
|
||||
txBuf = getBufferSize("TX", 64)
|
||||
# Get the largest defined buffer sizes for RX or TX, using defaults for undefined
|
||||
rxBuf = getBufferSize("RX", 128)
|
||||
txBuf = getBufferSize("TX", 64)
|
||||
|
||||
# Provide serial buffer sizes to the stm32duino platform
|
||||
tryAddFlag("SERIAL_RX_BUFFER_SIZE", rxBuf)
|
||||
tryAddFlag("SERIAL_TX_BUFFER_SIZE", txBuf)
|
||||
tryAddFlag("USART_RX_BUF_SIZE", rxBuf)
|
||||
tryAddFlag("USART_TX_BUF_SIZE", txBuf)
|
||||
# Provide serial buffer sizes to the stm32duino platform
|
||||
tryAddFlag("SERIAL_RX_BUFFER_SIZE", rxBuf)
|
||||
tryAddFlag("SERIAL_TX_BUFFER_SIZE", txBuf)
|
||||
tryAddFlag("USART_RX_BUF_SIZE", rxBuf)
|
||||
tryAddFlag("USART_TX_BUF_SIZE", txBuf)
|
||||
|
@@ -144,7 +144,7 @@ class DWIN_ICO_File():
|
||||
# process each file:
|
||||
try:
|
||||
index = int(dirEntry.name[0:3])
|
||||
if (index < 0) or (index > 255):
|
||||
if not (0 <= index <= 255):
|
||||
print('...Ignoring invalid index on', dirEntry.path)
|
||||
continue
|
||||
#dirEntry.path is iconDir/name
|
||||
|
@@ -18,7 +18,6 @@
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#----------------------------------------------------------------
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import argparse
|
||||
import DWIN_ICO
|
||||
|
@@ -18,7 +18,6 @@
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
#----------------------------------------------------------------
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import argparse
|
||||
import DWIN_ICO
|
||||
|
@@ -47,152 +47,152 @@ different_out_dir = not (output_examples_dir == input_examples_dir)
|
||||
#----------------------------------------------
|
||||
def process_file(subdir: str, filename: str):
|
||||
#----------------------------------------------
|
||||
global filenum
|
||||
filenum += 1
|
||||
global filenum
|
||||
filenum += 1
|
||||
|
||||
print(str(filenum) + ' ' + filename + ': ' + subdir)
|
||||
print(str(filenum) + ' ' + filename + ': ' + subdir)
|
||||
|
||||
def_line = (def_macro_name + ' "' + subdir.replace('\\', '/') + '"')
|
||||
def_line = (def_macro_name + ' "' + subdir.replace('\\', '/') + '"')
|
||||
|
||||
#------------------------
|
||||
# Read file
|
||||
#------------------------
|
||||
lines = []
|
||||
infilepath = os.path.join(input_examples_dir, subdir, filename)
|
||||
try:
|
||||
# UTF-8 because some files contain unicode chars
|
||||
with open(infilepath, 'rt', encoding="utf-8") as infile:
|
||||
lines = infile.readlines()
|
||||
#------------------------
|
||||
# Read file
|
||||
#------------------------
|
||||
lines = []
|
||||
infilepath = os.path.join(input_examples_dir, subdir, filename)
|
||||
try:
|
||||
# UTF-8 because some files contain unicode chars
|
||||
with open(infilepath, 'rt', encoding="utf-8") as infile:
|
||||
lines = infile.readlines()
|
||||
|
||||
except Exception as e:
|
||||
print('Failed to read file: ' + str(e) )
|
||||
raise Exception
|
||||
except Exception as e:
|
||||
print('Failed to read file: ' + str(e) )
|
||||
raise Exception
|
||||
|
||||
lines = [line.rstrip('\r\n') for line in lines]
|
||||
lines = [line.rstrip('\r\n') for line in lines]
|
||||
|
||||
#------------------------
|
||||
# Process lines
|
||||
#------------------------
|
||||
file_modified = False
|
||||
#------------------------
|
||||
# Process lines
|
||||
#------------------------
|
||||
file_modified = False
|
||||
|
||||
# region state machine
|
||||
# -1 = before pragma once;
|
||||
# 0 = region to place define;
|
||||
# 1 = past region to place define
|
||||
region = -1
|
||||
# region state machine
|
||||
# -1 = before pragma once;
|
||||
# 0 = region to place define;
|
||||
# 1 = past region to place define
|
||||
region = -1
|
||||
|
||||
outlines = []
|
||||
for line in lines:
|
||||
outline = line
|
||||
outlines = []
|
||||
for line in lines:
|
||||
outline = line
|
||||
|
||||
if (region == -1) and (def_macro_name in line):
|
||||
outline = None
|
||||
file_modified = True
|
||||
if (region == -1) and (def_macro_name in line):
|
||||
outline = None
|
||||
file_modified = True
|
||||
|
||||
elif (region == -1) and ('pragma once' in line):
|
||||
region = 0
|
||||
elif (region == -1) and ('pragma once' in line):
|
||||
region = 0
|
||||
|
||||
elif (region == 0):
|
||||
if (line.strip() == ''):
|
||||
pass
|
||||
elif (def_macro_name in line):
|
||||
region = 1
|
||||
if line == def_line: # leave it as is
|
||||
pass
|
||||
else:
|
||||
outline = def_line
|
||||
file_modified = True
|
||||
else: # some other string
|
||||
outlines.append(def_line)
|
||||
outlines.append('')
|
||||
region = 1
|
||||
file_modified = True
|
||||
elif (region == 0):
|
||||
if (line.strip() == ''):
|
||||
pass
|
||||
elif (def_macro_name in line):
|
||||
region = 1
|
||||
if line == def_line: # leave it as is
|
||||
pass
|
||||
else:
|
||||
outline = def_line
|
||||
file_modified = True
|
||||
else: # some other string
|
||||
outlines.append(def_line)
|
||||
outlines.append('')
|
||||
region = 1
|
||||
file_modified = True
|
||||
|
||||
elif (region == 1):
|
||||
if (def_macro_name in line):
|
||||
outline = None
|
||||
file_modified = True
|
||||
else:
|
||||
pass
|
||||
elif (region == 1):
|
||||
if (def_macro_name in line):
|
||||
outline = None
|
||||
file_modified = True
|
||||
else:
|
||||
pass
|
||||
|
||||
# end if
|
||||
if outline is not None:
|
||||
outlines.append(outline)
|
||||
# end for
|
||||
# end if
|
||||
if outline is not None:
|
||||
outlines.append(outline)
|
||||
# end for
|
||||
|
||||
#-------------------------
|
||||
# Output file
|
||||
#-------------------------
|
||||
outdir = os.path.join(output_examples_dir, subdir)
|
||||
outfilepath = os.path.join(outdir, filename)
|
||||
#-------------------------
|
||||
# Output file
|
||||
#-------------------------
|
||||
outdir = os.path.join(output_examples_dir, subdir)
|
||||
outfilepath = os.path.join(outdir, filename)
|
||||
|
||||
if file_modified:
|
||||
# Note: no need to create output dirs, as the initial copy_tree
|
||||
# will do that.
|
||||
if file_modified:
|
||||
# Note: no need to create output dirs, as the initial copy_tree
|
||||
# will do that.
|
||||
|
||||
print(' writing ' + str(outfilepath))
|
||||
try:
|
||||
# Preserve unicode chars; Avoid CR-LF on Windows.
|
||||
with open(outfilepath, "w", encoding="utf-8", newline='\n') as outfile:
|
||||
outfile.write("\n".join(outlines))
|
||||
outfile.write("\n")
|
||||
print(' writing ' + str(outfilepath))
|
||||
try:
|
||||
# Preserve unicode chars; Avoid CR-LF on Windows.
|
||||
with open(outfilepath, "w", encoding="utf-8", newline='\n') as outfile:
|
||||
outfile.write("\n".join(outlines))
|
||||
outfile.write("\n")
|
||||
|
||||
except Exception as e:
|
||||
print('Failed to write file: ' + str(e) )
|
||||
raise Exception
|
||||
else:
|
||||
print(' no change for ' + str(outfilepath))
|
||||
except Exception as e:
|
||||
print('Failed to write file: ' + str(e) )
|
||||
raise Exception
|
||||
else:
|
||||
print(' no change for ' + str(outfilepath))
|
||||
|
||||
#----------
|
||||
def main():
|
||||
#----------
|
||||
global filenum
|
||||
global input_examples_dir
|
||||
global output_examples_dir
|
||||
filenum = 0
|
||||
global filenum
|
||||
global input_examples_dir
|
||||
global output_examples_dir
|
||||
filenum = 0
|
||||
|
||||
#--------------------------------
|
||||
# Check for requirements
|
||||
#--------------------------------
|
||||
input_examples_dir = input_examples_dir.strip()
|
||||
input_examples_dir = input_examples_dir.rstrip('\\/')
|
||||
output_examples_dir = output_examples_dir.strip()
|
||||
output_examples_dir = output_examples_dir.rstrip('\\/')
|
||||
#--------------------------------
|
||||
# Check for requirements
|
||||
#--------------------------------
|
||||
input_examples_dir = input_examples_dir.strip()
|
||||
input_examples_dir = input_examples_dir.rstrip('\\/')
|
||||
output_examples_dir = output_examples_dir.strip()
|
||||
output_examples_dir = output_examples_dir.rstrip('\\/')
|
||||
|
||||
for dir in [input_examples_dir, output_examples_dir]:
|
||||
if not (os.path.exists(dir)):
|
||||
print('Directory not found: ' + dir)
|
||||
sys.exit(1)
|
||||
for dir in [input_examples_dir, output_examples_dir]:
|
||||
if not (os.path.exists(dir)):
|
||||
print('Directory not found: ' + dir)
|
||||
sys.exit(1)
|
||||
|
||||
#--------------------------------
|
||||
# Copy tree if necessary.
|
||||
#--------------------------------
|
||||
# This includes files that are not otherwise included in the
|
||||
# insertion of the define statement.
|
||||
#
|
||||
if different_out_dir:
|
||||
print('Copying files to new directory: ' + output_examples_dir)
|
||||
try:
|
||||
copy_tree(input_examples_dir, output_examples_dir)
|
||||
except Exception as e:
|
||||
print('Failed to copy directory: ' + str(e) )
|
||||
raise Exception
|
||||
#--------------------------------
|
||||
# Copy tree if necessary.
|
||||
#--------------------------------
|
||||
# This includes files that are not otherwise included in the
|
||||
# insertion of the define statement.
|
||||
#
|
||||
if different_out_dir:
|
||||
print('Copying files to new directory: ' + output_examples_dir)
|
||||
try:
|
||||
copy_tree(input_examples_dir, output_examples_dir)
|
||||
except Exception as e:
|
||||
print('Failed to copy directory: ' + str(e) )
|
||||
raise Exception
|
||||
|
||||
#-----------------------------
|
||||
# Find and process files
|
||||
#-----------------------------
|
||||
len_input_examples_dir = len(input_examples_dir);
|
||||
len_input_examples_dir += 1
|
||||
#-----------------------------
|
||||
# Find and process files
|
||||
#-----------------------------
|
||||
len_input_examples_dir = len(input_examples_dir);
|
||||
len_input_examples_dir += 1
|
||||
|
||||
for filename in files_to_mod:
|
||||
input_path = Path(input_examples_dir)
|
||||
filepathlist = input_path.rglob(filename)
|
||||
for filename in files_to_mod:
|
||||
input_path = Path(input_examples_dir)
|
||||
filepathlist = input_path.rglob(filename)
|
||||
|
||||
for filepath in filepathlist:
|
||||
fulldirpath = str(filepath.parent)
|
||||
subdir = fulldirpath[len_input_examples_dir:]
|
||||
for filepath in filepathlist:
|
||||
fulldirpath = str(filepath.parent)
|
||||
subdir = fulldirpath[len_input_examples_dir:]
|
||||
|
||||
process_file(subdir, filename)
|
||||
process_file(subdir, filename)
|
||||
|
||||
#==============
|
||||
print('--- Starting config-labels ---')
|
||||
|
@@ -50,7 +50,7 @@ class Thermistor:
|
||||
|
||||
if c < 0:
|
||||
print("//////////////////////////////////////////////////////////////////////////////////////")
|
||||
print("// WARNING: negative coefficient 'c'! Something may be wrong with the measurements! //")
|
||||
print("// WARNING: Negative coefficient 'c'! Something may be wrong with the measurements! //")
|
||||
print("//////////////////////////////////////////////////////////////////////////////////////")
|
||||
c = -c
|
||||
self.c1 = a # Steinhart-Hart coefficients
|
||||
@@ -93,8 +93,8 @@ def main(argv):
|
||||
r2 = 1641.9 # resistance at middle temperature (1.6 KOhm)
|
||||
t3 = 250 # high temperature in Kelvin (250 degC)
|
||||
r3 = 226.15 # resistance at high temperature (226.15 Ohm)
|
||||
rp = 4700; # pull-up resistor (4.7 kOhm)
|
||||
num_temps = 36; # number of entries for look-up table
|
||||
rp = 4700 # pull-up resistor (4.7 kOhm)
|
||||
num_temps = 36 # number of entries for look-up table
|
||||
|
||||
try:
|
||||
opts, args = getopt.getopt(argv, "h", ["help", "rp=", "t1=", "t2=", "t3=", "num-temps="])
|
||||
@@ -125,13 +125,13 @@ def main(argv):
|
||||
num_temps = int(arg)
|
||||
|
||||
t = Thermistor(rp, t1, r1, t2, r2, t3, r3)
|
||||
increment = int((ARES-1)/(num_temps-1));
|
||||
step = (TMIN-TMAX) / (num_temps-1)
|
||||
low_bound = t.temp(ARES-1);
|
||||
up_bound = t.temp(1);
|
||||
increment = int((ARES - 1) / (num_temps - 1))
|
||||
step = int((TMIN - TMAX) / (num_temps - 1))
|
||||
low_bound = t.temp(ARES - 1)
|
||||
up_bound = t.temp(1)
|
||||
min_temp = int(TMIN if TMIN > low_bound else low_bound)
|
||||
max_temp = int(TMAX if TMAX < up_bound else up_bound)
|
||||
temps = list(range(max_temp, TMIN+step, step));
|
||||
temps = list(range(max_temp, TMIN + step, step))
|
||||
|
||||
print("// Thermistor lookup table for Marlin")
|
||||
print("// ./createTemperatureLookupMarlin.py --rp=%s --t1=%s:%s --t2=%s:%s --t3=%s:%s --num-temps=%s" % (rp, t1, r1, t2, r2, t3, r3, num_temps))
|
||||
|
@@ -22,42 +22,42 @@
|
||||
|
||||
# Generate Marlin TFT Images from bitmaps/PNG/JPG
|
||||
|
||||
import sys,re,struct
|
||||
from PIL import Image,ImageDraw
|
||||
import sys,struct
|
||||
from PIL import Image
|
||||
|
||||
def image2bin(image, output_file):
|
||||
if output_file.endswith(('.c', '.cpp')):
|
||||
f = open(output_file, 'wt')
|
||||
is_cpp = True
|
||||
f.write("const uint16_t image[%d] = {\n" % (image.size[1] * image.size[0]))
|
||||
else:
|
||||
f = open(output_file, 'wb')
|
||||
is_cpp = False
|
||||
pixs = image.load()
|
||||
for y in range(image.size[1]):
|
||||
for x in range(image.size[0]):
|
||||
R = pixs[x, y][0] >> 3
|
||||
G = pixs[x, y][1] >> 2
|
||||
B = pixs[x, y][2] >> 3
|
||||
rgb = (R << 11) | (G << 5) | B
|
||||
if is_cpp:
|
||||
strHex = '0x{0:04X}, '.format(rgb)
|
||||
f.write(strHex)
|
||||
else:
|
||||
f.write(struct.pack("B", (rgb & 0xFF)))
|
||||
f.write(struct.pack("B", (rgb >> 8) & 0xFF))
|
||||
if is_cpp:
|
||||
f.write("\n")
|
||||
if is_cpp:
|
||||
f.write("};\n")
|
||||
f.close()
|
||||
if output_file.endswith(('.c', '.cpp')):
|
||||
f = open(output_file, 'wt')
|
||||
is_cpp = True
|
||||
f.write("const uint16_t image[%d] = {\n" % (image.size[1] * image.size[0]))
|
||||
else:
|
||||
f = open(output_file, 'wb')
|
||||
is_cpp = False
|
||||
pixs = image.load()
|
||||
for y in range(image.size[1]):
|
||||
for x in range(image.size[0]):
|
||||
R = pixs[x, y][0] >> 3
|
||||
G = pixs[x, y][1] >> 2
|
||||
B = pixs[x, y][2] >> 3
|
||||
rgb = (R << 11) | (G << 5) | B
|
||||
if is_cpp:
|
||||
strHex = '0x{0:04X}, '.format(rgb)
|
||||
f.write(strHex)
|
||||
else:
|
||||
f.write(struct.pack("B", (rgb & 0xFF)))
|
||||
f.write(struct.pack("B", (rgb >> 8) & 0xFF))
|
||||
if is_cpp:
|
||||
f.write("\n")
|
||||
if is_cpp:
|
||||
f.write("};\n")
|
||||
f.close()
|
||||
|
||||
if len(sys.argv) <= 2:
|
||||
print("Utility to export a image in Marlin TFT friendly format.")
|
||||
print("It will dump a raw bin RGB565 image or create a CPP file with an array of 16 bit image pixels.")
|
||||
print("Usage: gen-tft-image.py INPUT_IMAGE.(png|bmp|jpg) OUTPUT_FILE.(cpp|bin)")
|
||||
print("Author: rhapsodyv")
|
||||
exit(1)
|
||||
print("Utility to export a image in Marlin TFT friendly format.")
|
||||
print("It will dump a raw bin RGB565 image or create a CPP file with an array of 16 bit image pixels.")
|
||||
print("Usage: gen-tft-image.py INPUT_IMAGE.(png|bmp|jpg) OUTPUT_FILE.(cpp|bin)")
|
||||
print("Author: rhapsodyv")
|
||||
exit(1)
|
||||
|
||||
output_img = sys.argv[2]
|
||||
img = Image.open(sys.argv[1])
|
||||
|
@@ -189,9 +189,7 @@ def Upload(source, target, env):
|
||||
'BOARD_CREALITY_V427', 'BOARD_CREALITY_V431', 'BOARD_CREALITY_V452', 'BOARD_CREALITY_V453',
|
||||
'BOARD_CREALITY_V24S1']
|
||||
# "upload_random_name": generate a random 8.3 firmware filename to upload
|
||||
upload_random_filename = marlin_motherboard in ['BOARD_CREALITY_V4', 'BOARD_CREALITY_V4210', 'BOARD_CREALITY_V422', 'BOARD_CREALITY_V423',
|
||||
'BOARD_CREALITY_V427', 'BOARD_CREALITY_V431', 'BOARD_CREALITY_V452', 'BOARD_CREALITY_V453',
|
||||
'BOARD_CREALITY_V24S1'] and not marlin_long_filename_host_support
|
||||
upload_random_filename = upload_delete_old_bins and not marlin_long_filename_host_support
|
||||
|
||||
try:
|
||||
|
||||
@@ -304,7 +302,7 @@ def Upload(source, target, env):
|
||||
except KeyboardInterrupt:
|
||||
print('Aborted by user')
|
||||
if filetransfer: filetransfer.abort()
|
||||
if protocol:
|
||||
if protocol:
|
||||
protocol.disconnect()
|
||||
protocol.shutdown()
|
||||
_RollbackUpload(upload_firmware_target_name)
|
||||
@@ -314,7 +312,7 @@ def Upload(source, target, env):
|
||||
except serial.SerialException as se:
|
||||
# This exception is raised only for send_ascii data (not for binary transfer)
|
||||
print(f'Serial excepion: {se}, transfer aborted')
|
||||
if protocol:
|
||||
if protocol:
|
||||
protocol.disconnect()
|
||||
protocol.shutdown()
|
||||
_RollbackUpload(upload_firmware_target_name)
|
||||
@@ -323,7 +321,7 @@ def Upload(source, target, env):
|
||||
|
||||
except MarlinBinaryProtocol.FatalError:
|
||||
print('Too many retries, transfer aborted')
|
||||
if protocol:
|
||||
if protocol:
|
||||
protocol.disconnect()
|
||||
protocol.shutdown()
|
||||
_RollbackUpload(upload_firmware_target_name)
|
||||
@@ -332,7 +330,7 @@ def Upload(source, target, env):
|
||||
|
||||
except Exception as ex:
|
||||
print(f"\nException: {ex}, transfer aborted")
|
||||
if protocol:
|
||||
if protocol:
|
||||
protocol.disconnect()
|
||||
protocol.shutdown()
|
||||
_RollbackUpload(upload_firmware_target_name)
|
||||
|
@@ -252,7 +252,7 @@ def resolve_path(path):
|
||||
while 0 <= path.find('../'):
|
||||
end = path.find('../') - 1
|
||||
start = path.find('/')
|
||||
while 0 <= path.find('/', start) and end > path.find('/', start):
|
||||
while 0 <= path.find('/', start) < end:
|
||||
start = path.find('/', start) + 1
|
||||
path = path[0:start] + path[end + 4:]
|
||||
|
||||
@@ -674,7 +674,7 @@ def line_print(line_input):
|
||||
if 0 == highlight[1]:
|
||||
found_1 = text.find(' ')
|
||||
found_tab = text.find('\t')
|
||||
if found_1 < 0 or found_1 > found_tab:
|
||||
if not (0 <= found_1 <= found_tab):
|
||||
found_1 = found_tab
|
||||
write_to_screen_queue(text[:found_1 + 1])
|
||||
for highlight_2 in highlights:
|
||||
@@ -684,7 +684,7 @@ def line_print(line_input):
|
||||
if found >= 0:
|
||||
found_space = text.find(' ', found_1 + 1)
|
||||
found_tab = text.find('\t', found_1 + 1)
|
||||
if found_space < 0 or found_space > found_tab:
|
||||
if not (0 <= found_space <= found_tab):
|
||||
found_space = found_tab
|
||||
found_right = text.find(']', found + 1)
|
||||
write_to_screen_queue(text[found_1 + 1:found_space + 1], highlight[2])
|
||||
@@ -701,7 +701,7 @@ def line_print(line_input):
|
||||
break
|
||||
if did_something == False:
|
||||
r_loc = text.find('\r') + 1
|
||||
if r_loc > 0 and r_loc < len(text): # need to split this line
|
||||
if 0 < r_loc < len(text): # need to split this line
|
||||
text = text.split('\r')
|
||||
for line in text:
|
||||
if line != '':
|
||||
|
@@ -13,7 +13,7 @@
|
||||
from __future__ import print_function
|
||||
from __future__ import division
|
||||
|
||||
import subprocess,os,sys,platform
|
||||
import subprocess,os,platform
|
||||
from SCons.Script import DefaultEnvironment
|
||||
|
||||
current_OS = platform.system()
|
||||
|
@@ -9,7 +9,7 @@
|
||||
# Will continue on if a COM port isn't found so that the compilation can be done.
|
||||
#
|
||||
|
||||
import os,sys
|
||||
import os
|
||||
from SCons.Script import DefaultEnvironment
|
||||
import platform
|
||||
current_OS = platform.system()
|
||||
|
@@ -1,3 +1,9 @@
|
||||
# Where have all the configurations gone?
|
||||
|
||||
## https://github.com/MarlinFirmware/Configurations/archive/release-2.0.9.5.zip
|
||||
Marlin configurations for specific machines are now maintained in their own repository at:
|
||||
|
||||
## https://github.com/MarlinFirmware/Configurations/tree/bugfix-2.0.x
|
||||
|
||||
Configuration files for use with the nightly `bugfix-2.0.x` branch can be downloaded from:
|
||||
|
||||
## https://github.com/MarlinFirmware/Configurations/archive/bugfix-2.0.x.zip
|
||||
|
@@ -6,7 +6,7 @@ import yaml
|
||||
|
||||
|
||||
with open('.github/workflows/test-builds.yml') as f:
|
||||
github_configuration = yaml.safe_load(f)
|
||||
github_configuration = yaml.safe_load(f)
|
||||
test_platforms = github_configuration\
|
||||
['jobs']['test_builds']['strategy']['matrix']['test-platform']
|
||||
['jobs']['test_builds']['strategy']['matrix']['test-platform']
|
||||
print(' '.join(test_platforms))
|
||||
|
@@ -20,6 +20,7 @@ build_src_filter = ${common.default_src_filter} +<src/HAL/ESP32>
|
||||
lib_ignore = NativeEthernet
|
||||
upload_speed = 500000
|
||||
monitor_speed = 250000
|
||||
monitor_filters = colorize, time, send_on_enter, log2file, esp32_exception_decoder
|
||||
#upload_port = marlinesp.local
|
||||
#board_build.flash_mode = qio
|
||||
|
||||
|
@@ -16,6 +16,7 @@ boards_dir = buildroot/share/PlatformIO/boards
|
||||
default_envs = mega2560
|
||||
include_dir = Marlin
|
||||
extra_configs =
|
||||
Marlin/config.ini
|
||||
ini/avr.ini
|
||||
ini/due.ini
|
||||
ini/esp32.ini
|
||||
@@ -44,6 +45,7 @@ extra_configs =
|
||||
build_flags = -g3 -D__MARLIN_FIRMWARE__ -DNDEBUG
|
||||
-fmax-errors=5
|
||||
extra_scripts =
|
||||
pre:buildroot/share/PlatformIO/scripts/configuration.py
|
||||
pre:buildroot/share/PlatformIO/scripts/common-dependencies.py
|
||||
pre:buildroot/share/PlatformIO/scripts/common-cxxflags.py
|
||||
pre:buildroot/share/PlatformIO/scripts/preflight-checks.py
|
||||
@@ -267,17 +269,10 @@ framework = arduino
|
||||
extra_scripts = ${common.extra_scripts}
|
||||
build_flags = ${common.build_flags}
|
||||
lib_deps = ${common.lib_deps}
|
||||
platform_packages = platformio/tool-dfuutil@^1.11.0
|
||||
monitor_speed = 250000
|
||||
monitor_flags =
|
||||
--quiet
|
||||
--echo
|
||||
--eol
|
||||
LF
|
||||
--filter
|
||||
colorize
|
||||
--filter
|
||||
time
|
||||
monitor_eol = LF
|
||||
monitor_echo = yes
|
||||
monitor_filters = colorize, time, send_on_enter
|
||||
|
||||
#
|
||||
# Just print the dependency tree
|
||||
|
Reference in New Issue
Block a user