Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
706 changes: 405 additions & 301 deletions App/func_loader/fl.c

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions App/func_loader/fl.h
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,9 @@ extern "C" {
#ifndef FL_BUF_SIZE
#define FL_BUF_SIZE 1024
#endif
#ifndef FL_B64_BUF_SIZE
#define FL_B64_BUF_SIZE 2048
#endif

/* Base64 output size: ceil(N/3)*4 + null terminator */
#define FL_B64_BUF_SIZE ((FL_BUF_SIZE + 2) / 3 * 4 + 1)

/* Callback types */
typedef void (*fl_output_cb_t)(void* user, const char* str);
Expand Down
118 changes: 118 additions & 0 deletions App/tests/test_fl.c
Original file line number Diff line number Diff line change
Expand Up @@ -1387,6 +1387,113 @@ void test_loader_cmd_enable_unset_patch(void) {
TEST_ASSERT_EQUAL(0, result);
}

/* ============================================================================
* fl_exec_cmd Tests - Echoback Command
* ============================================================================ */

void test_loader_cmd_echoback_basic(void) {
setup_loader();
fl_init(&test_ctx);

const char* argv[] = {"fl", "--cmd", "echoback", "--len", "16"};
int result = fl_exec_cmd(&test_ctx, 5, argv);

TEST_ASSERT_EQUAL(0, result);
TEST_ASSERT(mock_output_contains("FLOK"));
TEST_ASSERT(mock_output_contains("ECHOBACK 16 bytes"));
TEST_ASSERT(mock_output_contains("crc=0x"));
TEST_ASSERT(mock_output_contains("data="));
}

void test_loader_cmd_echoback_verify_pattern(void) {
setup_loader();
fl_init(&test_ctx);

/* Request 4 bytes: pattern is {0x00, 0x01, 0x02, 0x03} */
const char* argv[] = {"fl", "--cmd", "echoback", "--len", "4"};
int result = fl_exec_cmd(&test_ctx, 5, argv);

TEST_ASSERT_EQUAL(0, result);
TEST_ASSERT(mock_output_contains("ECHOBACK 4 bytes"));

/* Verify the base64 data: {0x00, 0x01, 0x02, 0x03} -> "AAECAw==" */
TEST_ASSERT(mock_output_contains("AAECAw=="));
}

void test_loader_cmd_echoback_verify_crc(void) {
setup_loader();
fl_init(&test_ctx);

/* Request 1 byte: pattern is {0x00}, CRC16 of {0x00} with init 0xFFFF */
const char* argv[] = {"fl", "--cmd", "echoback", "--len", "1"};
int result = fl_exec_cmd(&test_ctx, 5, argv);

TEST_ASSERT_EQUAL(0, result);
TEST_ASSERT(mock_output_contains("ECHOBACK 1 bytes"));
TEST_ASSERT(mock_output_contains("crc=0x"));
}

void test_loader_cmd_echoback_max_len(void) {
setup_loader();
fl_init(&test_ctx);

/* Request FL_BUF_SIZE (1024) bytes — should succeed */
const char* argv[] = {"fl", "--cmd", "echoback", "--len", "1024"};
int result = fl_exec_cmd(&test_ctx, 5, argv);

TEST_ASSERT_EQUAL(0, result);
TEST_ASSERT(mock_output_contains("ECHOBACK 1024 bytes"));
}

void test_loader_cmd_echoback_zero_len(void) {
setup_loader();
fl_init(&test_ctx);

const char* argv[] = {"fl", "--cmd", "echoback", "--len", "0"};
int result = fl_exec_cmd(&test_ctx, 5, argv);

TEST_ASSERT_EQUAL(0, result);
TEST_ASSERT(mock_output_contains("FLERR"));
TEST_ASSERT(mock_output_contains("Invalid length"));
}

void test_loader_cmd_echoback_negative_len(void) {
setup_loader();
fl_init(&test_ctx);

const char* argv[] = {"fl", "--cmd", "echoback", "--len", "-1"};
int result = fl_exec_cmd(&test_ctx, 5, argv);

TEST_ASSERT_EQUAL(0, result);
TEST_ASSERT(mock_output_contains("FLERR"));
TEST_ASSERT(mock_output_contains("Invalid length"));
}

void test_loader_cmd_echoback_over_max(void) {
setup_loader();
fl_init(&test_ctx);

/* Request FL_BUF_SIZE + 1 (1025) bytes — should fail */
const char* argv[] = {"fl", "--cmd", "echoback", "--len", "1025"};
int result = fl_exec_cmd(&test_ctx, 5, argv);

TEST_ASSERT_EQUAL(0, result);
TEST_ASSERT(mock_output_contains("FLERR"));
TEST_ASSERT(mock_output_contains("Invalid length"));
}

void test_loader_cmd_echoback_default_len(void) {
setup_loader();
fl_init(&test_ctx);

/* Without --len, default is 64 */
const char* argv[] = {"fl", "--cmd", "echoback"};
int result = fl_exec_cmd(&test_ctx, 3, argv);

TEST_ASSERT_EQUAL(0, result);
TEST_ASSERT(mock_output_contains("ECHOBACK 64 bytes"));
}

/* ============================================================================
* Test Runner
* ============================================================================ */
Expand Down Expand Up @@ -1507,4 +1614,15 @@ void run_loader_tests(void) {
RUN_TEST(test_loader_cmd_enable_invalid_comp);
RUN_TEST(test_loader_cmd_enable_unset_patch);
TEST_SUITE_END();

TEST_SUITE_BEGIN("func_loader - Echoback Command");
RUN_TEST(test_loader_cmd_echoback_basic);
RUN_TEST(test_loader_cmd_echoback_verify_pattern);
RUN_TEST(test_loader_cmd_echoback_verify_crc);
RUN_TEST(test_loader_cmd_echoback_max_len);
RUN_TEST(test_loader_cmd_echoback_zero_len);
RUN_TEST(test_loader_cmd_echoback_negative_len);
RUN_TEST(test_loader_cmd_echoback_over_max);
RUN_TEST(test_loader_cmd_echoback_default_len);
TEST_SUITE_END();
}
3 changes: 2 additions & 1 deletion Tools/WebServer/app/routes/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,8 @@ def api_status():
"compile_commands_path": device.compile_commands_path,
"watch_dirs": device.watch_dirs,
"patch_mode": device.patch_mode,
"chunk_size": device.chunk_size,
"upload_chunk_size": device.upload_chunk_size,
"download_chunk_size": device.download_chunk_size,
"auto_connect": device.auto_connect,
"auto_compile": device.auto_compile,
"enable_decompile": device.enable_decompile,
Expand Down
43 changes: 32 additions & 11 deletions Tools/WebServer/app/routes/fpb.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,10 +106,11 @@ def do_ping():
@bp.route("/fpb/test-serial", methods=["POST"])
def api_fpb_test_serial():
"""
Test serial throughput to find max single-transfer size.
Test serial throughput with 3-phase probing.

Uses x2 stepping to probe device's receive buffer limit.
Returns max working size and recommended chunk size.
Phase 1: TX Fragment probe - detect if fragmentation is needed.
Phase 2: Upload chunk probe - find device shell buffer limit.
Phase 3: Download chunk probe - find max reliable download size.
"""
log_info, log_success, log_error, _, get_fpb_inject, _ = _get_helpers()

Expand All @@ -120,30 +121,50 @@ def api_fpb_test_serial():

fpb = get_fpb_inject()

log_info("Starting serial throughput test...")
log_info("Starting 3-phase serial throughput test...")

def do_test():
return fpb.test_serial_throughput(
start_size=start_size, max_size=max_size, timeout=timeout
)

result = _run_serial_op(do_test, timeout=30.0)
result = _run_serial_op(do_test, timeout=60.0)

if "error" in result and result.get("error"):
return jsonify({"success": False, "error": result["error"]})

if result.get("success"):
# Phase 1 summary
if result.get("fragment_needed"):
log_info("Phase 1: TX fragmentation may be needed")
else:
log_info("Phase 1: TX fragmentation not needed")

# Phase 2 summary
max_working = result.get("max_working_size", 0)
failed_at = result.get("failed_size", 0)
recommended = result.get("recommended_chunk_size", 64)
rec_upload = result.get("recommended_upload_chunk_size", 64)

if failed_at > 0:
log_info(
f"Max working size: {max_working} bytes, failed at: {failed_at} bytes"
)
log_info(f"Phase 2: Upload max={max_working}B, failed at {failed_at}B")
else:
log_success(f"All tests passed up to {max_working} bytes")
log_info(f"Recommended chunk size: {recommended} bytes")
log_success(f"Phase 2: All upload tests passed up to {max_working}B")
log_info(f"Recommended upload chunk: {rec_upload}B")

# Phase 3 summary
rec_download = result.get("recommended_download_chunk_size", 1024)
phases = result.get("phases", {})
dl_phase = phases.get("download", {})
if dl_phase.get("skipped"):
log_info(f"Phase 3: Skipped ({dl_phase.get('skip_reason', 'unknown')})")
else:
dl_max = dl_phase.get("max_working_size", 0)
dl_fail = dl_phase.get("failed_size", 0)
if dl_fail > 0:
log_info(f"Phase 3: Download max={dl_max}B, failed at {dl_fail}B")
else:
log_success(f"Phase 3: All download tests passed up to {dl_max}B")
log_info(f"Recommended download chunk: {rec_download}B")

return jsonify(result)

Expand Down
6 changes: 5 additions & 1 deletion Tools/WebServer/app/routes/symbols.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,11 @@ def _dynamic_timeout(size):
Assumes ~128 bytes/chunk with ~2s per chunk worst case,
plus generous headroom.
"""
chunk_size = state.device.chunk_size if state.device.chunk_size > 0 else 128
chunk_size = (
state.device.download_chunk_size
if state.device.download_chunk_size > 0
else 1024
)
num_chunks = max(1, (size + chunk_size - 1) // chunk_size)
return max(10.0, num_chunks * 3.0)

Expand Down
21 changes: 13 additions & 8 deletions Tools/WebServer/app/routes/transfer.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,14 +73,19 @@ def _get_file_transfer(log_callback=None):
"""Get FileTransfer instance."""
*_, get_fpb_inject = _get_helpers()
fpb = get_fpb_inject()
chunk_size = state.device.chunk_size or 256
chunk_size = state.device.upload_chunk_size or 128
download_chunk_size = state.device.download_chunk_size or 1024
max_retries = (
state.device.transfer_max_retries
if hasattr(state.device, "transfer_max_retries")
else 10
)
return FileTransfer(
fpb, chunk_size=chunk_size, max_retries=max_retries, log_callback=log_callback
fpb,
upload_chunk_size=chunk_size,
download_chunk_size=download_chunk_size,
max_retries=max_retries,
log_callback=log_callback,
)


Expand Down Expand Up @@ -395,7 +400,7 @@ def do_upload():
return

uploaded = 0
chunk_size = ft.chunk_size
chunk_size = ft.upload_chunk_size
ft.reset_stats() # Reset stats before transfer
while uploaded < total_size:
# Check cancel before each chunk
Expand Down Expand Up @@ -433,8 +438,8 @@ def do_upload():
ft.fclose()
return

# Verify CRC if enabled
if state.device.verify_crc and total_size > 0:
# Always verify CRC
if total_size > 0:
expected_crc = crc16(file_data)
success, dev_size, dev_crc = ft.fcrc(total_size)
if not success:
Expand Down Expand Up @@ -646,7 +651,7 @@ def do_download():
return

file_data = b""
chunk_size = ft.chunk_size
chunk_size = ft.download_chunk_size
current_offset = 0
ft.reset_stats() # Reset stats before transfer
while True:
Expand Down Expand Up @@ -690,8 +695,8 @@ def do_download():
ft.fclose()
return

# Verify CRC if enabled
if state.device.verify_crc and len(file_data) > 0:
# Always verify CRC
if len(file_data) > 0:
local_crc = crc16(file_data)
success, dev_size, dev_crc = ft.fcrc(len(file_data))
if not success:
Expand Down
28 changes: 19 additions & 9 deletions Tools/WebServer/cli/fpb_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,10 @@ def __init__(self):
self.inject_base = 0x20001000
self.cached_slots = None # Cache for slot state
self.slot_update_id = 0
self.chunk_size = 128 # Default chunk size for upload
self.tx_chunk_size = 0 # 0 = disabled, >0 = chunk size for TX
self.tx_chunk_delay = 0.005 # Delay between TX chunks (seconds)
self.upload_chunk_size = 128 # Default chunk size for upload
self.download_chunk_size = 1024 # Default chunk size for download
self.serial_tx_fragment_size = 0 # 0 = disabled, >0 = fragment size for TX
self.serial_tx_fragment_delay = 0.002 # Delay between TX fragments (seconds)
self.transfer_max_retries = 10 # Max retries for file transfer

def add_tool_log(self, message):
Expand Down Expand Up @@ -98,7 +99,7 @@ def __init__(
elf_path: Optional[str] = None,
compile_commands: Optional[str] = None,
tx_chunk_size: int = 0,
tx_chunk_delay: float = 0.005,
tx_chunk_delay: float = 0.002,
max_retries: int = 10,
):
self.verbose = verbose
Expand All @@ -107,8 +108,8 @@ def __init__(
self._device_state = DeviceState()
self._device_state.elf_path = elf_path
self._device_state.compile_commands_path = compile_commands
self._device_state.tx_chunk_size = tx_chunk_size
self._device_state.tx_chunk_delay = tx_chunk_delay
self._device_state.serial_tx_fragment_size = tx_chunk_size
self._device_state.serial_tx_fragment_delay = tx_chunk_delay
self._device_state.transfer_max_retries = max_retries
self._fpb = FPBInject(self._device_state)

Expand Down Expand Up @@ -526,7 +527,11 @@ def file_list(self, path: str = "/") -> None:
raise FPBCLIError("No device connected.")
from core.file_transfer import FileTransfer

ft = FileTransfer(self._fpb, chunk_size=self._device_state.chunk_size)
ft = FileTransfer(
self._fpb,
upload_chunk_size=self._device_state.upload_chunk_size,
download_chunk_size=self._device_state.download_chunk_size,
)
success, entries = ft.flist(path)
if not success:
raise FPBCLIError(f"Failed to list directory: {path}")
Expand All @@ -541,7 +546,11 @@ def file_stat(self, path: str) -> None:
raise FPBCLIError("No device connected.")
from core.file_transfer import FileTransfer

ft = FileTransfer(self._fpb, chunk_size=self._device_state.chunk_size)
ft = FileTransfer(
self._fpb,
upload_chunk_size=self._device_state.upload_chunk_size,
download_chunk_size=self._device_state.download_chunk_size,
)
success, stat = ft.fstat(path)
if not success:
raise FPBCLIError(f"Failed to stat: {stat.get('error', 'unknown')}")
Expand All @@ -559,7 +568,8 @@ def file_download(self, remote_path: str, local_path: str) -> None:

ft = FileTransfer(
self._fpb,
chunk_size=self._device_state.chunk_size,
upload_chunk_size=self._device_state.upload_chunk_size,
download_chunk_size=self._device_state.download_chunk_size,
max_retries=self._device_state.transfer_max_retries,
)
success, data, msg = ft.download(remote_path)
Expand Down
Loading
Loading