feat: use api_compat for torrent .resume files (#7932)

* test: add benc2cpp.py, a benc beautifier for hardcoded cpp test cases

* test: add .resume file unit test

* refactor: use api_compat::convert_incoming_data() and convert_outgoing_data() on .resume files

* chore: mark TR_KEY_peers2_6_kebab as APICOMPAT

* chore: mark TR_KEY_speed_Bps_kebab as APICOMPAT

* chore: mark TR_KEY_use_speed_limit_kebab as APICOMPAT

* chore: mark as APICOMPAT: TR_KEY_use_global_speed_limit_kebab

* chore: mark as APICOMPAT: TR_KEY_ratio_mode_kebab

* chore: mark as APICOMPAT: TR_KEY_idle_limit_kebab

* chore: mark as APICOMPAT: TR_KEY_idle_mode_kebab

* chore: mark as APICOMPAT: TR_KEY_max_peers_kebab

* chore: mark as APICOMPAT: TR_KEY_added_date_kebab

* chore: mark as APICOMPAT: TR_KEY_seeding_time_seconds_kebab

* chore: mark as APICOMPAT: TR_KEY_downloading_time_seconds_kebab

* chore: mark as APICOMPAT: TR_KEY_bandwidth_priority

* chore: mark as APICOMPAT: TR_KEY_done_date_kebab

* chore: mark as APICOMPAT: TR_KEY_activity_date_kebab

* chore: remove remaining _kebab cases from resume.cc

* chore: clang-format
This commit is contained in:
Charles Kerr
2025-12-15 09:43:40 -06:00
committed by GitHub
parent 725655653a
commit 109bc70511
5 changed files with 642 additions and 56 deletions

View File

@@ -246,8 +246,8 @@ auto constexpr RpcKeys = std::array<ApiKey, 212U>{ {
} };
auto constexpr SessionKeys = std::array<ApiKey, 139U>{ {
{ TR_KEY_activity_date, TR_KEY_activity_date_kebab }, // TODO(ckerr) legacy duplicate
{ TR_KEY_added_date, TR_KEY_added_date_kebab }, // TODO(ckerr) legacy duplicate
{ TR_KEY_activity_date, TR_KEY_activity_date_kebab_APICOMPAT },
{ TR_KEY_added_date, TR_KEY_added_date_kebab_APICOMPAT },
{ TR_KEY_alt_speed_down, TR_KEY_alt_speed_down_kebab },
{ TR_KEY_alt_speed_enabled, TR_KEY_alt_speed_enabled_kebab },
{ TR_KEY_alt_speed_time_begin, TR_KEY_alt_speed_time_begin_kebab },
@@ -259,7 +259,7 @@ auto constexpr SessionKeys = std::array<ApiKey, 139U>{ {
{ TR_KEY_announce_ip_enabled, TR_KEY_announce_ip_enabled_kebab },
{ TR_KEY_anti_brute_force_enabled, TR_KEY_anti_brute_force_enabled_kebab },
{ TR_KEY_anti_brute_force_threshold, TR_KEY_anti_brute_force_threshold_kebab },
{ TR_KEY_bandwidth_priority, TR_KEY_bandwidth_priority_kebab }, // TODO(ckerr) legacy duplicate
{ TR_KEY_bandwidth_priority, TR_KEY_bandwidth_priority_kebab_APICOMPAT },
{ TR_KEY_bind_address_ipv4, TR_KEY_bind_address_ipv4_kebab },
{ TR_KEY_bind_address_ipv6, TR_KEY_bind_address_ipv6_kebab },
{ TR_KEY_blocklist_date, TR_KEY_blocklist_date_kebab },
@@ -272,18 +272,18 @@ auto constexpr SessionKeys = std::array<ApiKey, 139U>{ {
{ TR_KEY_details_window_height, TR_KEY_details_window_height_kebab },
{ TR_KEY_details_window_width, TR_KEY_details_window_width_kebab },
{ TR_KEY_dht_enabled, TR_KEY_dht_enabled_kebab },
{ TR_KEY_done_date, TR_KEY_done_date_kebab }, // TODO(ckerr) legacy duplicate
{ TR_KEY_done_date, TR_KEY_done_date_kebab_APICOMPAT },
{ TR_KEY_download_dir, TR_KEY_download_dir_kebab }, // TODO(ckerr) legacy duplicate
{ TR_KEY_download_queue_enabled, TR_KEY_download_queue_enabled_kebab },
{ TR_KEY_download_queue_size, TR_KEY_download_queue_size_kebab },
{ TR_KEY_downloaded_bytes, TR_KEY_downloaded_bytes_kebab_APICOMPAT },
{ TR_KEY_downloading_time_seconds, TR_KEY_downloading_time_seconds_kebab },
{ TR_KEY_downloading_time_seconds, TR_KEY_downloading_time_seconds_kebab_APICOMPAT },
{ TR_KEY_files_added, TR_KEY_files_added_kebab_APICOMPAT },
{ TR_KEY_filter_mode, TR_KEY_filter_mode_kebab },
{ TR_KEY_filter_text, TR_KEY_filter_text_kebab },
{ TR_KEY_filter_trackers, TR_KEY_filter_trackers_kebab },
{ TR_KEY_idle_limit, TR_KEY_idle_limit_kebab },
{ TR_KEY_idle_mode, TR_KEY_idle_mode_kebab },
{ TR_KEY_idle_limit, TR_KEY_idle_limit_kebab_APICOMPAT },
{ TR_KEY_idle_mode, TR_KEY_idle_mode_kebab_APICOMPAT },
{ TR_KEY_idle_seeding_limit, TR_KEY_idle_seeding_limit_kebab },
{ TR_KEY_idle_seeding_limit_enabled, TR_KEY_idle_seeding_limit_enabled_kebab },
{ TR_KEY_incomplete_dir, TR_KEY_incomplete_dir_kebab },
@@ -296,7 +296,7 @@ auto constexpr SessionKeys = std::array<ApiKey, 139U>{ {
{ TR_KEY_main_window_width, TR_KEY_main_window_width_kebab },
{ TR_KEY_main_window_x, TR_KEY_main_window_x_kebab },
{ TR_KEY_main_window_y, TR_KEY_main_window_y_kebab },
{ TR_KEY_max_peers, TR_KEY_max_peers_kebab },
{ TR_KEY_max_peers, TR_KEY_max_peers_kebab_APICOMPAT },
{ TR_KEY_message_level, TR_KEY_message_level_kebab },
{ TR_KEY_open_dialog_dir, TR_KEY_open_dialog_dir_kebab },
{ TR_KEY_peer_congestion_algorithm, TR_KEY_peer_congestion_algorithm_kebab },
@@ -307,7 +307,7 @@ auto constexpr SessionKeys = std::array<ApiKey, 139U>{ {
{ TR_KEY_peer_port_random_low, TR_KEY_peer_port_random_low_kebab },
{ TR_KEY_peer_port_random_on_start, TR_KEY_peer_port_random_on_start_kebab },
{ TR_KEY_peer_socket_tos, TR_KEY_peer_socket_tos_kebab },
{ TR_KEY_peers2_6, TR_KEY_peers2_6_kebab },
{ TR_KEY_peers2_6, TR_KEY_peers2_6_kebab_APICOMPAT },
{ TR_KEY_pex_enabled, TR_KEY_pex_enabled_kebab },
{ TR_KEY_port_forwarding_enabled, TR_KEY_port_forwarding_enabled_kebab },
{ TR_KEY_prompt_before_exit, TR_KEY_prompt_before_exit_kebab },
@@ -315,7 +315,7 @@ auto constexpr SessionKeys = std::array<ApiKey, 139U>{ {
{ TR_KEY_queue_stalled_minutes, TR_KEY_queue_stalled_minutes_kebab },
{ TR_KEY_ratio_limit, TR_KEY_ratio_limit_kebab },
{ TR_KEY_ratio_limit_enabled, TR_KEY_ratio_limit_enabled_kebab },
{ TR_KEY_ratio_mode, TR_KEY_ratio_mode_kebab },
{ TR_KEY_ratio_mode, TR_KEY_ratio_mode_kebab_APICOMPAT },
{ TR_KEY_read_clipboard, TR_KEY_read_clipboard_kebab },
{ TR_KEY_remote_session_enabled, TR_KEY_remote_session_enabled_kebab },
{ TR_KEY_remote_session_host, TR_KEY_remote_session_host_kebab },
@@ -347,7 +347,7 @@ auto constexpr SessionKeys = std::array<ApiKey, 139U>{ {
{ TR_KEY_seconds_active, TR_KEY_seconds_active_kebab_APICOMPAT },
{ TR_KEY_seed_queue_enabled, TR_KEY_seed_queue_enabled_kebab },
{ TR_KEY_seed_queue_size, TR_KEY_seed_queue_size_kebab },
{ TR_KEY_seeding_time_seconds, TR_KEY_seeding_time_seconds_kebab },
{ TR_KEY_seeding_time_seconds, TR_KEY_seeding_time_seconds_kebab_APICOMPAT },
{ TR_KEY_session_count, TR_KEY_session_count_kebab_APICOMPAT },
{ TR_KEY_show_backup_trackers, TR_KEY_show_backup_trackers_kebab },
{ TR_KEY_show_extra_peer_details, TR_KEY_show_extra_peer_details_kebab },
@@ -360,7 +360,7 @@ auto constexpr SessionKeys = std::array<ApiKey, 139U>{ {
{ TR_KEY_sleep_per_seconds_during_verify, TR_KEY_sleep_per_seconds_during_verify_kebab },
{ TR_KEY_sort_mode, TR_KEY_sort_mode_kebab },
{ TR_KEY_sort_reversed, TR_KEY_sort_reversed_kebab },
{ TR_KEY_speed_Bps, TR_KEY_speed_Bps_kebab },
{ TR_KEY_speed_Bps, TR_KEY_speed_Bps_kebab_APICOMPAT },
{ TR_KEY_speed_limit_down, TR_KEY_speed_limit_down_kebab },
{ TR_KEY_speed_limit_down_enabled, TR_KEY_speed_limit_down_enabled_kebab },
{ TR_KEY_speed_limit_up, TR_KEY_speed_limit_up_kebab },
@@ -379,8 +379,8 @@ auto constexpr SessionKeys = std::array<ApiKey, 139U>{ {
{ TR_KEY_trash_original_torrent_files, TR_KEY_trash_original_torrent_files_kebab },
{ TR_KEY_upload_slots_per_torrent, TR_KEY_upload_slots_per_torrent_kebab },
{ TR_KEY_uploaded_bytes, TR_KEY_uploaded_bytes_kebab_APICOMPAT },
{ TR_KEY_use_global_speed_limit, TR_KEY_use_global_speed_limit_kebab },
{ TR_KEY_use_speed_limit, TR_KEY_use_speed_limit_kebab },
{ TR_KEY_use_global_speed_limit, TR_KEY_use_global_speed_limit_kebab_APICOMPAT },
{ TR_KEY_use_speed_limit, TR_KEY_use_speed_limit_kebab_APICOMPAT },
{ TR_KEY_utp_enabled, TR_KEY_utp_enabled_kebab },
{ TR_KEY_watch_dir, TR_KEY_watch_dir_kebab },
{ TR_KEY_watch_dir_enabled, TR_KEY_watch_dir_enabled_kebab },

View File

@@ -35,11 +35,11 @@ enum // NOLINT(performance-enum-size)
TR_KEY_NONE, /* represented as an empty string */
TR_KEY_active_torrent_count_camel, /* rpc (deprecated) */
TR_KEY_active_torrent_count, /* rpc */
TR_KEY_activity_date_kebab, /* resume file (legacy) */
TR_KEY_activity_date_kebab_APICOMPAT,
TR_KEY_activity_date_camel, /* rpc (deprecated) */
TR_KEY_activity_date, /* rpc, resume file */
TR_KEY_added, /* pex */
TR_KEY_added_date_kebab, /* resume file (legacy) */
TR_KEY_added_date_kebab_APICOMPAT, /* resume file (legacy) */
TR_KEY_added_f, /* pex */
TR_KEY_added6, /* pex */
TR_KEY_added6_f, /* pex */
@@ -74,7 +74,7 @@ enum // NOLINT(performance-enum-size)
TR_KEY_anti_brute_force_threshold, /* rpc, settings */
TR_KEY_arguments, /* rpc */
TR_KEY_availability, // rpc
TR_KEY_bandwidth_priority_kebab,
TR_KEY_bandwidth_priority_kebab_APICOMPAT,
TR_KEY_bandwidth_priority_camel,
TR_KEY_bandwidth_priority,
TR_KEY_begin_piece,
@@ -143,7 +143,7 @@ enum // NOLINT(performance-enum-size)
TR_KEY_dht_enabled_kebab,
TR_KEY_dht_enabled,
TR_KEY_dnd,
TR_KEY_done_date_kebab,
TR_KEY_done_date_kebab_APICOMPAT,
TR_KEY_done_date_camel,
TR_KEY_done_date,
TR_KEY_download_dir_kebab,
@@ -170,7 +170,7 @@ enum // NOLINT(performance-enum-size)
TR_KEY_downloaded_bytes,
TR_KEY_downloaded_ever,
TR_KEY_downloader_count,
TR_KEY_downloading_time_seconds_kebab,
TR_KEY_downloading_time_seconds_kebab_APICOMPAT,
TR_KEY_downloading_time_seconds,
TR_KEY_dropped,
TR_KEY_dropped6,
@@ -246,8 +246,8 @@ enum // NOLINT(performance-enum-size)
TR_KEY_host,
TR_KEY_id,
TR_KEY_id_timestamp,
TR_KEY_idle_limit_kebab,
TR_KEY_idle_mode_kebab,
TR_KEY_idle_limit_kebab_APICOMPAT,
TR_KEY_idle_mode_kebab_APICOMPAT,
TR_KEY_idle_seeding_limit_kebab,
TR_KEY_idle_seeding_limit_enabled_kebab,
TR_KEY_idle_limit,
@@ -333,7 +333,7 @@ enum // NOLINT(performance-enum-size)
TR_KEY_main_window_y,
TR_KEY_manual_announce_time_camel,
TR_KEY_manual_announce_time,
TR_KEY_max_peers_kebab,
TR_KEY_max_peers_kebab_APICOMPAT,
TR_KEY_max_connected_peers_camel,
TR_KEY_max_connected_peers,
TR_KEY_max_peers,
@@ -392,7 +392,7 @@ enum // NOLINT(performance-enum-size)
TR_KEY_peer_socket_tos,
TR_KEY_peers,
TR_KEY_peers2,
TR_KEY_peers2_6_kebab,
TR_KEY_peers2_6_kebab_APICOMPAT,
TR_KEY_peers2_6,
TR_KEY_peers_connected_camel,
TR_KEY_peers_from_camel,
@@ -464,7 +464,7 @@ enum // NOLINT(performance-enum-size)
TR_KEY_rate_upload,
TR_KEY_ratio_limit_kebab,
TR_KEY_ratio_limit_enabled_kebab,
TR_KEY_ratio_mode_kebab,
TR_KEY_ratio_mode_kebab_APICOMPAT,
TR_KEY_ratio_limit,
TR_KEY_ratio_limit_enabled,
TR_KEY_ratio_mode,
@@ -564,7 +564,7 @@ enum // NOLINT(performance-enum-size)
TR_KEY_seed_ratio_mode,
TR_KEY_seeder_count_camel,
TR_KEY_seeder_count,
TR_KEY_seeding_time_seconds_kebab,
TR_KEY_seeding_time_seconds_kebab_APICOMPAT,
TR_KEY_seeding_time_seconds,
TR_KEY_sequential_download,
TR_KEY_sequential_download_from_piece,
@@ -613,7 +613,7 @@ enum // NOLINT(performance-enum-size)
TR_KEY_sort_reversed,
TR_KEY_source,
TR_KEY_speed,
TR_KEY_speed_Bps_kebab,
TR_KEY_speed_Bps_kebab_APICOMPAT,
TR_KEY_speed_bytes_kebab,
TR_KEY_speed_limit_down_kebab,
TR_KEY_speed_limit_down_enabled_kebab,
@@ -722,8 +722,8 @@ enum // NOLINT(performance-enum-size)
TR_KEY_uploaded_bytes,
TR_KEY_uploaded_ever,
TR_KEY_url_list,
TR_KEY_use_global_speed_limit_kebab,
TR_KEY_use_speed_limit_kebab,
TR_KEY_use_global_speed_limit_kebab_APICOMPAT,
TR_KEY_use_speed_limit_kebab_APICOMPAT,
TR_KEY_use_global_speed_limit,
TR_KEY_use_speed_limit,
TR_KEY_ut_holepunch,

View File

@@ -16,6 +16,7 @@
#include "libtransmission/transmission.h"
#include "libtransmission/api-compat.h"
#include "libtransmission/bitfield.h"
#include "libtransmission/error.h"
#include "libtransmission/file.h"
@@ -74,7 +75,7 @@ auto load_peers(tr_variant::Map const& map, tr_torrent* tor)
ret = tr_resume::Peers;
}
if (auto const* l = map.find_if<tr_variant::Vector>({ TR_KEY_peers2_6, TR_KEY_peers2_6_kebab }); l != nullptr)
if (auto const* l = map.find_if<tr_variant::Vector>(TR_KEY_peers2_6))
{
auto const num_added = add_peers(tor, *l);
tr_logAddTraceTor(tor, fmt::format("Loaded {} IPv6 peers from resume file", num_added));
@@ -266,7 +267,7 @@ void save_idle_limits(tr_variant::Map& map, tr_torrent const* tor)
void load_single_speed_limit(tr_variant::Map const& map, tr_direction dir, tr_torrent* tor)
{
if (auto const i = map.value_if<int64_t>({ TR_KEY_speed_Bps, TR_KEY_speed_Bps_kebab }); i)
if (auto const i = map.value_if<int64_t>(TR_KEY_speed_Bps))
{
tor->set_speed_limit(dir, Speed{ *i, Speed::Units::Byps });
}
@@ -275,12 +276,12 @@ void load_single_speed_limit(tr_variant::Map const& map, tr_direction dir, tr_to
tor->set_speed_limit(dir, Speed{ *i2, Speed::Units::KByps });
}
if (auto const b = map.value_if<bool>({ TR_KEY_use_speed_limit, TR_KEY_use_speed_limit_kebab }); b)
if (auto const b = map.value_if<bool>(TR_KEY_use_speed_limit))
{
tor->use_speed_limit(dir, *b);
}
if (auto const b = map.value_if<bool>({ TR_KEY_use_global_speed_limit, TR_KEY_use_global_speed_limit_kebab }); b)
if (auto const b = map.value_if<bool>(TR_KEY_use_global_speed_limit))
{
tr_torrentUseSessionLimits(tor, *b);
}
@@ -290,15 +291,13 @@ auto load_speed_limits(tr_variant::Map const& map, tr_torrent* tor)
{
auto ret = tr_resume::fields_t{};
if (auto const* child = map.find_if<tr_variant::Map>({ TR_KEY_speed_limit_up, TR_KEY_speed_limit_up_kebab });
child != nullptr)
if (auto const* child = map.find_if<tr_variant::Map>(TR_KEY_speed_limit_up))
{
load_single_speed_limit(*child, TR_UP, tor);
ret = tr_resume::Speedlimit;
}
if (auto const* child = map.find_if<tr_variant::Map>({ TR_KEY_speed_limit_down, TR_KEY_speed_limit_down_kebab });
child != nullptr)
if (auto const* child = map.find_if<tr_variant::Map>(TR_KEY_speed_limit_down))
{
load_single_speed_limit(*child, TR_DOWN, tor);
ret = tr_resume::Speedlimit;
@@ -309,18 +308,18 @@ auto load_speed_limits(tr_variant::Map const& map, tr_torrent* tor)
tr_resume::fields_t load_ratio_limits(tr_variant::Map const& map, tr_torrent* tor)
{
auto const* const d = map.find_if<tr_variant::Map>({ TR_KEY_ratio_limit, TR_KEY_ratio_limit_kebab });
auto const* const d = map.find_if<tr_variant::Map>(TR_KEY_ratio_limit);
if (d == nullptr)
{
return {};
}
if (auto const dratio = d->value_if<double>({ TR_KEY_ratio_limit, TR_KEY_ratio_limit_kebab }); dratio)
if (auto const dratio = d->value_if<double>(TR_KEY_ratio_limit))
{
tor->set_seed_ratio(*dratio);
}
if (auto const i = d->value_if<int64_t>({ TR_KEY_ratio_mode, TR_KEY_ratio_mode_kebab }); i)
if (auto const i = d->value_if<int64_t>(TR_KEY_ratio_mode))
{
tor->set_seed_ratio_mode(static_cast<tr_ratiolimit>(*i));
}
@@ -330,18 +329,18 @@ tr_resume::fields_t load_ratio_limits(tr_variant::Map const& map, tr_torrent* to
tr_resume::fields_t load_idle_limits(tr_variant::Map const& map, tr_torrent* tor)
{
auto const* const d = map.find_if<tr_variant::Map>({ TR_KEY_idle_limit, TR_KEY_idle_limit_kebab });
auto const* const d = map.find_if<tr_variant::Map>(TR_KEY_idle_limit);
if (d == nullptr)
{
return {};
}
if (auto const imin = d->value_if<int64_t>({ TR_KEY_idle_limit, TR_KEY_idle_limit_kebab }); imin)
if (auto const imin = d->value_if<int64_t>(TR_KEY_idle_limit))
{
tor->set_idle_limit_minutes(*imin);
}
if (auto const i = d->value_if<int64_t>({ TR_KEY_idle_mode, TR_KEY_idle_mode_kebab }); i)
if (auto const i = d->value_if<int64_t>(TR_KEY_idle_mode))
{
tor->set_idle_limit_mode(static_cast<tr_idlelimit>(*i));
}
@@ -636,6 +635,7 @@ tr_resume::fields_t load_from_file(tr_torrent* tor, tr_torrent::ResumeHelper& he
return {};
}
otop = libtransmission::api_compat::convert_incoming_data(*otop);
auto const* const p_map = otop->get_if<tr_variant::Map>();
if (p_map == nullptr)
{
@@ -667,8 +667,7 @@ tr_resume::fields_t load_from_file(tr_torrent* tor, tr_torrent::ResumeHelper& he
if ((fields_to_load & (tr_resume::Progress | tr_resume::IncompleteDir)) != 0)
{
if (auto sv = map.value_if<std::string_view>({ TR_KEY_incomplete_dir, TR_KEY_incomplete_dir_kebab });
sv && !std::empty(*sv))
if (auto sv = map.value_if<std::string_view>(TR_KEY_incomplete_dir); sv && !std::empty(*sv))
{
helper.load_incomplete_dir(*sv);
fields_loaded |= tr_resume::IncompleteDir;
@@ -695,7 +694,7 @@ tr_resume::fields_t load_from_file(tr_torrent* tor, tr_torrent::ResumeHelper& he
if ((fields_to_load & tr_resume::MaxPeers) != 0)
{
if (auto i = map.value_if<int64_t>({ TR_KEY_max_peers, TR_KEY_max_peers_kebab }); i)
if (auto const i = map.value_if<int64_t>(TR_KEY_max_peers))
{
tor->set_peer_limit(static_cast<uint16_t>(*i));
fields_loaded |= tr_resume::MaxPeers;
@@ -713,7 +712,7 @@ tr_resume::fields_t load_from_file(tr_torrent* tor, tr_torrent::ResumeHelper& he
if ((fields_to_load & tr_resume::AddedDate) != 0)
{
if (auto i = map.value_if<int64_t>({ TR_KEY_added_date, TR_KEY_added_date_kebab }); i)
if (auto const i = map.value_if<int64_t>(TR_KEY_added_date))
{
helper.load_date_added(static_cast<time_t>(*i));
fields_loaded |= tr_resume::AddedDate;
@@ -722,7 +721,7 @@ tr_resume::fields_t load_from_file(tr_torrent* tor, tr_torrent::ResumeHelper& he
if ((fields_to_load & tr_resume::DoneDate) != 0)
{
if (auto i = map.value_if<int64_t>({ TR_KEY_done_date, TR_KEY_done_date_kebab }); i)
if (auto const i = map.value_if<int64_t>(TR_KEY_done_date))
{
helper.load_date_done(static_cast<time_t>(*i));
fields_loaded |= tr_resume::DoneDate;
@@ -731,7 +730,7 @@ tr_resume::fields_t load_from_file(tr_torrent* tor, tr_torrent::ResumeHelper& he
if ((fields_to_load & tr_resume::ActivityDate) != 0)
{
if (auto i = map.value_if<int64_t>({ TR_KEY_activity_date, TR_KEY_activity_date_kebab }); i)
if (auto const i = map.value_if<int64_t>(TR_KEY_activity_date))
{
tor->set_date_active(*i);
fields_loaded |= tr_resume::ActivityDate;
@@ -740,7 +739,7 @@ tr_resume::fields_t load_from_file(tr_torrent* tor, tr_torrent::ResumeHelper& he
if ((fields_to_load & tr_resume::TimeSeeding) != 0)
{
if (auto i = map.value_if<int64_t>({ TR_KEY_seeding_time_seconds, TR_KEY_seeding_time_seconds_kebab }); i)
if (auto const i = map.value_if<int64_t>(TR_KEY_seeding_time_seconds))
{
helper.load_seconds_seeding_before_current_start(*i);
fields_loaded |= tr_resume::TimeSeeding;
@@ -749,7 +748,7 @@ tr_resume::fields_t load_from_file(tr_torrent* tor, tr_torrent::ResumeHelper& he
if ((fields_to_load & tr_resume::TimeDownloading) != 0)
{
if (auto i = map.value_if<int64_t>({ TR_KEY_downloading_time_seconds, TR_KEY_downloading_time_seconds_kebab }); i)
if (auto const i = map.value_if<int64_t>(TR_KEY_downloading_time_seconds))
{
helper.load_seconds_downloading_before_current_start(*i);
fields_loaded |= tr_resume::TimeDownloading;
@@ -758,8 +757,7 @@ tr_resume::fields_t load_from_file(tr_torrent* tor, tr_torrent::ResumeHelper& he
if ((fields_to_load & tr_resume::BandwidthPriority) != 0)
{
if (auto i = map.value_if<int64_t>({ TR_KEY_bandwidth_priority, TR_KEY_bandwidth_priority_kebab });
i && tr_isPriority(static_cast<tr_priority_t>(*i)))
if (auto const i = map.value_if<int64_t>(TR_KEY_bandwidth_priority); i && tr_isPriority(static_cast<tr_priority_t>(*i)))
{
tr_torrentSetPriority(tor, static_cast<tr_priority_t>(*i));
fields_loaded |= tr_resume::BandwidthPriority;
@@ -985,8 +983,9 @@ void save(tr_torrent* const tor, tr_torrent::ResumeHelper const& helper)
save_labels(map, tor);
save_group(map, tor);
auto const out = libtransmission::api_compat::convert_outgoing_data(std::move(map));
auto serde = tr_variant_serde::benc();
if (!serde.to_file(std::move(map), tor->resume_file()))
if (!serde.to_file(out, tor->resume_file()))
{
tor->error().set_local_error(fmt::format("Unable to save resume file: {:s}", serde.error_.message()));
}

263
tests/assets/benc2cpp.py Normal file
View File

@@ -0,0 +1,263 @@
#!/usr/bin/env python3
#
# Created by GitHub Copilot (GPT-5.2 (Preview)).
#
# License: Same terms as Transmission itself (see COPYING). Transmission
# permits redistribution/modification under GNU GPLv2, GPLv3, or any future
# license endorsed by Mnemosyne LLC.
#
# Purpose:
# Convert a bencoded (benc) file into a C++ concatenated string-literal
# fragment that preserves the exact original bytes. Output is whitespace-only
# formatted for readability (4-space indentation), similar in spirit to
# pretty-printed JSON.
#
# Usage:
# tests/assets/benc2cpp.py path/to/file.benc > out.cppfrag
from __future__ import annotations
import sys
from pathlib import Path
def bytes_to_cpp_string_literal(data: bytes) -> str:
r"""Return a single C++ string literal token for arbitrary bytes.
Uses normal (non-raw) string literals and emits \xNN for bytes that are not
safe/pleasant as-is.
"""
out = '"'
prev_was_hex_escape = False
for b in data:
ch = chr(b)
# C/C++ rule: \x escapes consume *all following hex digits*.
# If we emit "\xNN" and then a literal '0'..'9'/'a'..'f'/'A'..'F',
# it becomes a single (larger) hex escape and may fail to compile.
if (
prev_was_hex_escape
and (
(ord('0') <= b <= ord('9'))
or (ord('a') <= b <= ord('f'))
or (ord('A') <= b <= ord('F'))
)
):
out += f"\\x{b:02x}"
prev_was_hex_escape = True
continue
if ch == "\\":
out += r"\\\\"
prev_was_hex_escape = False
elif ch == '"':
out += r"\\\""
prev_was_hex_escape = False
elif 0x20 <= b <= 0x7E:
out += ch
prev_was_hex_escape = False
else:
out += f"\\x{b:02x}"
prev_was_hex_escape = True
out += '"'
return out
def bencode_tokenize(data: bytes) -> list[bytes]:
r"""Tokenize bencode into syntactic units without changing bytes.
Tokens are:
- b"d", b"l", b"e"
- b"i...e" (entire integer token)
- b"<len>:<payload>" (entire string token, including length and colon)
This is a tokenizer only. It assumes the input is valid bencode.
"""
tokens: list[bytes] = []
i = 0
n = len(data)
def need(cond: bool, msg: str) -> None:
if not cond:
raise ValueError(f"Invalid bencode at offset {i}: {msg}")
while i < n:
b = data[i]
if b in (ord('d'), ord('l'), ord('e')):
tokens.append(bytes([b]))
i += 1
continue
if b == ord('i'):
j = data.find(b'e', i + 1)
need(j != -1, "unterminated integer")
tokens.append(data[i:j + 1])
i = j + 1
continue
if ord('0') <= b <= ord('9'):
j = i
while j < n and ord('0') <= data[j] <= ord('9'):
j += 1
need(j < n and data[j] == ord(':'), "string length missing colon")
strlen = int(data[i:j].decode('ascii'))
start = j + 1
end = start + strlen
need(end <= n, "string payload truncated")
tokens.append(data[i:end])
i = end
continue
msg = f"Invalid bencode at offset {i}: unexpected byte 0x{b:02x}"
raise ValueError(msg)
return tokens
def render_bencode_tokens_pretty(
tokens: list[bytes],
*,
base_indent: int = 4,
indent_step: int = 4,
) -> list[str]:
"""Render bencode tokens into indented C++ string literal lines.
Whitespace-only pretty-printing rules:
- One token per line by default.
- For dictionaries, if a key's value is a scalar (string or integer),
render the key and value on the same line separated by a space.
This changes only whitespace between C string fragments; the concatenated
bytes are identical to the input.
"""
lines: list[str] = []
# Stack entries are either:
# ('list', None)
# ('dict', expecting_key: bool)
stack: list[tuple[str, bool | None]] = []
pending_dict_key: bytes | None = None
def depth() -> int:
return len(stack)
def indent() -> str:
return ' ' * (base_indent + depth() * indent_step)
def is_scalar_token(t: bytes) -> bool:
return t.startswith(b'i') or (t[:1].isdigit())
i = 0
while i < len(tokens):
tok = tokens[i]
if tok == b'e':
if pending_dict_key is not None:
key_lit = bytes_to_cpp_string_literal(pending_dict_key)
lines.append(indent() + key_lit)
pending_dict_key = None
if stack:
stack.pop()
lines.append(indent() + bytes_to_cpp_string_literal(tok))
# If this closed a value container in a dict,
# the parent dict is now ready for next key.
if stack and stack[-1][0] == 'dict' and stack[-1][1] is False:
stack[-1] = ('dict', True)
i += 1
continue
# Dict key collection
if stack and stack[-1][0] == 'dict' and stack[-1][1] is True:
pending_dict_key = tok
stack[-1] = ('dict', False)
i += 1
continue
# Dict value emission
is_dict_value = (
stack
and stack[-1][0] == 'dict'
and stack[-1][1] is False
and pending_dict_key is not None
)
if is_dict_value:
if is_scalar_token(tok):
lines.append(
indent()
+ bytes_to_cpp_string_literal(pending_dict_key)
+ ' '
+ bytes_to_cpp_string_literal(tok)
)
pending_dict_key = None
stack[-1] = ('dict', True)
i += 1
continue
# Non-scalar (container) value: key on its own line, then container
# token.
key_lit = bytes_to_cpp_string_literal(pending_dict_key)
lines.append(indent() + key_lit)
pending_dict_key = None
lines.append(indent() + bytes_to_cpp_string_literal(tok))
if tok == b'd':
stack.append(('dict', True))
elif tok == b'l':
stack.append(('list', None))
else:
stack[-1] = ('dict', True)
i += 1
continue
# Default emission
lines.append(indent() + bytes_to_cpp_string_literal(tok))
if tok == b'd':
stack.append(('dict', True))
elif tok == b'l':
stack.append(('list', None))
i += 1
if pending_dict_key is not None:
lines.append(indent() + bytes_to_cpp_string_literal(pending_dict_key))
return lines
def main(argv: list[str]) -> int:
if len(argv) != 2:
sys.stderr.write(f"Usage: {Path(argv[0]).name} path/to/file.benc\n")
return 2
in_path = Path(argv[1])
data = in_path.read_bytes()
tokens = bencode_tokenize(data)
pretty_lines = render_bencode_tokens_pretty(tokens)
sys.stdout.write("// clang-format off\n")
sys.stdout.write("constexpr std::string_view Benc =\n")
if not pretty_lines:
sys.stdout.write(" \"\";\n")
else:
for line in pretty_lines[:-1]:
sys.stdout.write(line)
sys.stdout.write("\n")
sys.stdout.write(pretty_lines[-1])
sys.stdout.write(";\n")
sys.stdout.write("// clang-format on\n")
return 0
if __name__ == "__main__":
raise SystemExit(main(sys.argv))

View File

@@ -644,6 +644,306 @@ constexpr std::string_view UnrecognisedInfoLegacyResponse = R"json({
"tag": 10
})json";
// clang-format off
constexpr std::string_view LegacyResumeBenc =
"d"
"13:activity-date" "i1765724117e"
"10:added-date" "i1756689559e"
"18:bandwidth-priority" "i0e"
"7:corrupt" "i0e"
"11:destination" "30:/data/trackers/untracked/Books"
"3:dnd"
"l"
"i0e"
"i0e"
"i0e"
"i0e"
"i0e"
"i0e"
"i0e"
"e"
"9:done-date" "i1756689845e"
"10:downloaded" "i4830420542e"
"24:downloading-time-seconds" "i286e"
"5:files"
"l"
"102:Oz Series - Frank L Baum [PUBLIC DOMAIN] v1-7/v01 - Oz - Baum - The Wonderful Wizard of Oz (1990).epub"
"100:Oz Series - Frank L Baum [PUBLIC DOMAIN] v1-7/v02 - Oz - Baum - The Marvelous Land of Oz (1904).epub"
"86:Oz Series - Frank L Baum [PUBLIC DOMAIN] v1-7/v03 - Oz - Baum - Ozma of Oz (1907).epub"
"104:Oz Series - Frank L Baum [PUBLIC DOMAIN] v1-7/v04 - Oz - Baum - Dorothy and the Wizard in Oz (1908).epub"
"90:Oz Series - Frank L Baum [PUBLIC DOMAIN] v1-7/v05 - Oz - Baum - The Road to Oz (1909).epub"
"98:Oz Series - Frank L Baum [PUBLIC DOMAIN] v1-7/v06 - Oz - Baum - The Emerald City of Oz (1910).epub"
"100:Oz Series - Frank L Baum [PUBLIC DOMAIN] v1-7/v07 - Oz - Baum - The Patchwork Girl of Oz (1913).epub"
"e"
"5:group" "0:"
"10:idle-limit"
"d"
"10:idle-limit" "i30e"
"9:idle-mode" "i0e"
"e"
"6:labels"
"l"
"e"
"9:max-peers" "i20e"
"4:name" "45:Oz Series - Frank L Baum [PUBLIC DOMAIN] v1-7"
"6:paused" "i0e"
"6:peers2"
"l"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\x80\x3b\xac\x8f\x3b\x1c"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\xe2\xa1\xe3\x25\x2c\xfa"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\xf2\x50\x82\xab\xed\x08"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\xeb\xb2\x8c\xa1\x1e\xc6"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\xe8\x92\x9e\x87\xd1\xb4"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\x0a\xca\x51\xdd\x61\x52"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\x3d\xf0\x9c\x23\x55\x20"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\x4d\x9f\x2e\xd9\x40\x9e"
"e"
"d"
"5:flags" "i12e"
"14:socket_address" "6:\x83\xa6\xd7\x7f\xa3\x4c"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\xcd\x4a\xdf\x95\xc8\xaa"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\x78\x5b\x6c\x9b\xa8\x38"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\x60\xc6\xe0\x11\xc5\x76"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\xd4\xfa\x37\x77\x0f\xe4"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\xfb\x28\x6c\x4d\xc3\x02"
"e"
"e"
"8:priority"
"l"
"i0e"
"i0e"
"i0e"
"i0e"
"i0e"
"i0e"
"i0e"
"e"
"8:progress"
"d"
"6:blocks" "3:all"
"6:mtimes"
"l"
"i1756689844e"
"i1756689800e"
"i1756689836e"
"i1756689812e"
"i1756689839e"
"i1756689844e"
"i1756689804e"
"e"
"6:pieces" "3:all"
"e"
"11:ratio-limit"
"d"
"11:ratio-limit" "8:2.000000"
"10:ratio-mode" "i0e"
"e"
"20:seeding-time-seconds" "i7373039e"
"19:sequential_download" "i0e"
"30:sequential_download_from_piece" "i0e"
"16:speed-limit-down"
"d"
"9:speed-Bps" "i2000000e"
"22:use-global-speed-limit" "i1e"
"15:use-speed-limit" "i0e"
"e"
"14:speed-limit-up"
"d"
"9:speed-Bps" "i5000000e"
"22:use-global-speed-limit" "i1e"
"15:use-speed-limit" "i0e"
"e"
"8:uploaded" "i98667375637e"
"e";
constexpr std::string_view ResumeBenc =
"d"
"13:activity_date" "i1765724117e"
"10:added_date" "i1756689559e"
"18:bandwidth_priority" "i0e"
"7:corrupt" "i0e"
"11:destination" "30:/data/trackers/untracked/Books"
"3:dnd"
"l"
"i0e"
"i0e"
"i0e"
"i0e"
"i0e"
"i0e"
"i0e"
"e"
"9:done_date" "i1756689845e"
"10:downloaded" "i4830420542e"
"24:downloading_time_seconds" "i286e"
"5:files"
"l"
"102:Oz Series - Frank L Baum [PUBLIC DOMAIN] v1-7/v01 - Oz - Baum - The Wonderful Wizard of Oz (1990).epub"
"100:Oz Series - Frank L Baum [PUBLIC DOMAIN] v1-7/v02 - Oz - Baum - The Marvelous Land of Oz (1904).epub"
"86:Oz Series - Frank L Baum [PUBLIC DOMAIN] v1-7/v03 - Oz - Baum - Ozma of Oz (1907).epub"
"104:Oz Series - Frank L Baum [PUBLIC DOMAIN] v1-7/v04 - Oz - Baum - Dorothy and the Wizard in Oz (1908).epub"
"90:Oz Series - Frank L Baum [PUBLIC DOMAIN] v1-7/v05 - Oz - Baum - The Road to Oz (1909).epub"
"98:Oz Series - Frank L Baum [PUBLIC DOMAIN] v1-7/v06 - Oz - Baum - The Emerald City of Oz (1910).epub"
"100:Oz Series - Frank L Baum [PUBLIC DOMAIN] v1-7/v07 - Oz - Baum - The Patchwork Girl of Oz (1913).epub"
"e"
"5:group" "0:"
"10:idle_limit"
"d"
"10:idle_limit" "i30e"
"9:idle_mode" "i0e"
"e"
"6:labels"
"l"
"e"
"9:max_peers" "i20e"
"4:name" "45:Oz Series - Frank L Baum [PUBLIC DOMAIN] v1-7"
"6:paused" "i0e"
"6:peers2"
"l"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\x80\x3b\xac\x8f\x3b\x1c"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\xe2\xa1\xe3\x25\x2c\xfa"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\xf2\x50\x82\xab\xed\x08"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\xeb\xb2\x8c\xa1\x1e\xc6"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\xe8\x92\x9e\x87\xd1\xb4"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\x0a\xca\x51\xdd\x61\x52"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\x3d\xf0\x9c\x23\x55\x20"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\x4d\x9f\x2e\xd9\x40\x9e"
"e"
"d"
"5:flags" "i12e"
"14:socket_address" "6:\x83\xa6\xd7\x7f\xa3\x4c"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\xcd\x4a\xdf\x95\xc8\xaa"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\x78\x5b\x6c\x9b\xa8\x38"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\x60\xc6\xe0\x11\xc5\x76"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\xd4\xfa\x37\x77\x0f\xe4"
"e"
"d"
"5:flags" "i0e"
"14:socket_address" "6:\xfb\x28\x6c\x4d\xc3\x02"
"e"
"e"
"8:priority"
"l"
"i0e"
"i0e"
"i0e"
"i0e"
"i0e"
"i0e"
"i0e"
"e"
"8:progress"
"d"
"6:blocks" "3:all"
"6:mtimes"
"l"
"i1756689844e"
"i1756689800e"
"i1756689836e"
"i1756689812e"
"i1756689839e"
"i1756689844e"
"i1756689804e"
"e"
"6:pieces" "3:all"
"e"
"11:ratio_limit"
"d"
"11:ratio_limit" "8:2.000000"
"10:ratio_mode" "i0e"
"e"
"20:seeding_time_seconds" "i7373039e"
"19:sequential_download" "i0e"
"30:sequential_download_from_piece" "i0e"
"16:speed_limit_down"
"d"
"9:speed_Bps" "i2000000e"
"22:use_global_speed_limit" "i1e"
"15:use_speed_limit" "i0e"
"e"
"14:speed_limit_up"
"d"
"9:speed_Bps" "i5000000e"
"22:use_global_speed_limit" "i1e"
"15:use_speed_limit" "i0e"
"e"
"8:uploaded" "i98667375637e"
"e";
// clang-format on
} // namespace
TEST(ApiCompatTest, canConvertRpc)
@@ -708,12 +1008,11 @@ TEST(ApiCompatTest, canConvertRpc)
}
}
TEST(ApiCompatTest, canConvertDataFiles)
TEST(ApiCompatTest, canConvertJsonDataFiles)
{
using Style = libtransmission::api_compat::Style;
using TestCase = std::tuple<std::string_view, std::string_view, Style, std::string_view>;
// clang-format off
static auto constexpr TestCases = std::array<TestCase, 8U>{ {
{ "settings tr5 -> tr5", CurrentSettingsJson, Style::Tr5, CurrentSettingsJson },
{ "settings tr5 -> tr4", CurrentSettingsJson, Style::Tr4, LegacySettingsJson },
@@ -725,14 +1024,39 @@ TEST(ApiCompatTest, canConvertDataFiles)
{ "stats tr4 -> tr5", LegacyStatsJson, Style::Tr5, CurrentStatsJson },
{ "stats tr4 -> tr4", LegacyStatsJson, Style::Tr4, LegacyStatsJson },
} };
// clang-format on
for (auto const& [name, src, tgt_style, expected] : TestCases)
{
auto serde = tr_variant_serde::json();
serde.inplace();
auto parsed = serde.parse(src);
ASSERT_TRUE(parsed.has_value());
auto converted = libtransmission::api_compat::convert(*parsed, tgt_style);
EXPECT_EQ(expected, serde.to_string(converted)) << name;
}
}
TEST(ApiCompatTest, canConvertBencDataFiles)
{
using Style = libtransmission::api_compat::Style;
using TestCase = std::tuple<std::string_view, std::string_view, Style, std::string_view>;
static auto constexpr TestCases = std::array<TestCase, 4U>{ {
{ "resume tr5 -> tr5", ResumeBenc, Style::Tr5, ResumeBenc },
{ "resume tr5 -> tr4", ResumeBenc, Style::Tr4, LegacyResumeBenc },
{ "resume tr4 -> tr5", LegacyResumeBenc, Style::Tr5, ResumeBenc },
{ "resume tr4 -> tr4", LegacyResumeBenc, Style::Tr4, LegacyResumeBenc },
} };
for (auto const& [name, src, tgt_style, expected] : TestCases)
{
auto serde = tr_variant_serde::benc();
serde.inplace();
auto parsed = serde.parse(src);
ASSERT_TRUE(parsed.has_value()) << name;
auto converted = libtransmission::api_compat::convert(*parsed, tgt_style);
EXPECT_EQ(expected, serde.to_string(converted)) << name;
}
}