rust: Remove Rust support from tree

Closes #40469

Signed-off-by: David Goulet <dgoulet@torproject.org>
This commit is contained in:
David Goulet 2021-09-14 11:03:00 -04:00
parent c3e50f0fde
commit ae9042abbf
69 changed files with 40 additions and 5984 deletions

8
.gitignore vendored
View File

@ -52,7 +52,6 @@ core.*
/autom4te.cache
/build-stamp
/compile
/config.rust
/configure
/Doxyfile
/orconfig.h
@ -63,7 +62,6 @@ core.*
/config.guess
/config.sub
/conftest*
/link_rust.sh
/micro-revision.*
/patch-stamp
/stamp-h
@ -161,12 +159,6 @@ core.*
/src/app/tor-cov
/src/app/tor-cov.exe
# /src/rust
/src/rust/.cargo/config
/src/rust/.cargo/registry
/src/rust/target
/src/rust/registry
# /src/test
/src/test/Makefile
/src/test/Makefile.in

3
.gitmodules vendored
View File

@ -1,3 +0,0 @@
[submodule "src/ext/rust"]
path = src/ext/rust
url = https://git.torproject.org/tor-rust-dependencies

View File

@ -2,13 +2,6 @@ language: c
cache:
ccache: true
## cargo: true
directories:
- $HOME/.cargo
## caching CARGO_TARGET_DIR actually slows down the build over time,
## because old build products are never deleted.
## where we point CARGO_TARGET_DIR in all our cargo invocations
#- $TRAVIS_BUILD_DIR/src/rust/target
compiler:
- gcc
@ -29,8 +22,6 @@ env:
- HARDENING_OPTIONS="--enable-all-bugs-are-fatal --enable-expensive-hardening"
## We turn off asciidoc by default, because it's slow
- ASCIIDOC_OPTIONS="--disable-asciidoc"
## Our default rust version is the minimum supported version
- RUST_VERSION="1.31.0"
## Turn off tor's sandbox in chutney, until we fix sandbox errors that are
## triggered by Ubuntu Xenial and Bionic. See #32722.
- CHUTNEY_TOR_SANDBOX="0"
@ -68,10 +59,6 @@ matrix:
## We check disable module dirauth
- env: MODULES_OPTIONS="--disable-module-dirauth" HARDENING_OPTIONS="--enable-expensive-hardening"
## We run rust on Linux, because it's faster than rust on macOS
## We check rust offline
- env: RUST_OPTIONS="--enable-rust" TOR_RUST_DEPENDENCIES=true
## We check NSS
## Use -std=gnu99 to turn off some newer features, and maybe turn on some
## extra gcc warnings?
@ -86,14 +73,6 @@ matrix:
## We run `make doxygen` without `make check`.
- env: SKIP_MAKE_CHECK="yes" DOXYGEN="yes"
## macOS builds are very slow, and we have a limited number of
## concurrent macOS jobs. We're not actively developing Rust, so it is
## the lowest priority.
## We run rust on macOS, because we have seen macOS rust failures before
#- env: RUST_VERSION="nightly" RUST_OPTIONS="--enable-rust --enable-cargo-online-mode"
# compiler: clang
# os: osx
## Allow the build to report success (with non-required sub-builds
## continuing to run) if all required sub-builds have succeeded.
fast_finish: true
@ -101,16 +80,6 @@ matrix:
## Careful! We use global envs, which makes it hard to allow failures by env:
## https://docs.travis-ci.com/user/customizing-the-build#matching-jobs-with-allow_failures
allow_failures:
## macOS rust and chutney are very slow, so we let the build finish before
## they are done. We'd like to fast finish, but still eventually show
## any failures in the build status. But Travis doesn't have that ability.
## Since this job is disabled, there's not much point having an exception
## for it
#- env: RUST_VERSION="nightly" RUST_OPTIONS="--enable-rust --enable-cargo-online-mode"
# compiler: clang
# os: osx
## Since we're actively developing IPv6, we want to require the IPv6
## chutney tests
#- env: CHUTNEY_MAKE="test-network-ipv6" CHUTNEY="yes" CHUTNEY_ALLOW_FAILURES="2" SKIP_MAKE_CHECK="yes"
@ -187,8 +156,6 @@ osx_image: xcode11.2
before_install:
## Set pipefail: we use pipes
- set -o pipefail || echo "pipefail failed"
## Create empty rust directories for non-Rust builds, so caching succeeds
- if [[ "$RUST_OPTIONS" == "" ]]; then mkdir -p $HOME/.cargo $TRAVIS_BUILD_DIR/src/rust/target; fi
install:
## If we're on OSX, configure ccache (ccache is automatically installed and configured on Linux)
@ -200,13 +167,6 @@ install:
- if [[ "$COVERAGE_OPTIONS" != "" ]]; then pip install --user cpp-coveralls; fi
## If we're on OSX, and using asciidoc, configure asciidoc
- if [[ "$ASCIIDOC_OPTIONS" == "" ]] && [[ "$TRAVIS_OS_NAME" == "osx" ]]; then export XML_CATALOG_FILES="/usr/local/etc/xml/catalog"; fi
## If we're using Rust, download rustup
- if [[ "$RUST_OPTIONS" != "" ]]; then curl -Ssf -o rustup.sh https://sh.rustup.rs; fi
## Install the stable channels of rustc and cargo and setup our toolchain environment
- if [[ "$RUST_OPTIONS" != "" ]]; then sh rustup.sh -y --default-toolchain $RUST_VERSION; fi
- if [[ "$RUST_OPTIONS" != "" ]]; then source $HOME/.cargo/env; fi
## If we're testing rust builds in offline-mode, then set up our vendored dependencies
- if [[ "$TOR_RUST_DEPENDENCIES" == "true" ]]; then export TOR_RUST_DEPENDENCIES=$PWD/src/ext/rust/crates; fi
## If we're running chutney, install it.
- if [[ "$CHUTNEY" != "" ]]; then git clone --depth 1 https://github.com/torproject/chutney.git ; export CHUTNEY_PATH="$(pwd)/chutney"; fi
## If we're running stem, install it.
@ -215,13 +175,6 @@ install:
## Finally, list installed package versions
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then dpkg-query --show; fi
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew list --versions; fi
## Get some info about rustup, rustc and cargo
- if [[ "$RUST_OPTIONS" != "" ]]; then which rustup; fi
- if [[ "$RUST_OPTIONS" != "" ]]; then which rustc; fi
- if [[ "$RUST_OPTIONS" != "" ]]; then which cargo; fi
- if [[ "$RUST_OPTIONS" != "" ]]; then rustup --version; fi
- if [[ "$RUST_OPTIONS" != "" ]]; then rustc --version; fi
- if [[ "$RUST_OPTIONS" != "" ]]; then cargo --version; fi
## Get python version
- python --version
## If we're running chutney, show the chutney commit
@ -240,7 +193,7 @@ script:
# Skip test_rebind and test_include on macOS
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then export TOR_SKIP_TEST_REBIND=true; export TOR_SKIP_TEST_INCLUDE=true; fi
- ./autogen.sh
- CONFIGURE_FLAGS="$ASCIIDOC_OPTIONS $COVERAGE_OPTIONS $HARDENING_OPTIONS $MODULES_OPTIONS $NSS_OPTIONS $OPENSSL_OPTIONS $RUST_OPTIONS --enable-fatal-warnings --disable-silent-rules"
- CONFIGURE_FLAGS="$ASCIIDOC_OPTIONS $COVERAGE_OPTIONS $HARDENING_OPTIONS $MODULES_OPTIONS $NSS_OPTIONS $OPENSSL_OPTIONS --enable-fatal-warnings --disable-silent-rules"
- echo "Configure flags are $CONFIGURE_FLAGS CC=\"$CC $C_DIALECT_OPTIONS\""
- ./configure $CONFIGURE_FLAGS CC="$CC $C_DIALECT_OPTIONS";
## We run `make check` because that's what https://jenkins.torproject.org does.
@ -270,9 +223,6 @@ after_failure:
before_cache:
## Delete all gcov files.
- if [[ "$COVERAGE_OPTIONS" != "" ]]; then make reset-gcov; fi
## Delete the cargo registry before caching .cargo, because it's cheaper to
## download the registry and throw it away, rather than caching it
- rm -rf $HOME/.cargo/registry
notifications:
irc:

View File

@ -36,12 +36,6 @@ else
TESTING_TOR_BINARY=$(top_builddir)/src/app/tor$(EXEEXT)
endif
if USE_RUST
rust_ldadd=$(top_builddir)/$(TOR_RUST_LIB_PATH)
else
rust_ldadd=
endif
# "Common" libraries used to link tor's utility code.
TOR_UTIL_LIBS = \
src/lib/libtor-geoip.a \
@ -601,51 +595,6 @@ check-typos:
echo "You can install the latest version of misspell here: https://github.com/client9/misspell#install"; \
fi
.PHONY: rustfmt
rustfmt:
if USE_RUST
@if test -x "`which cargo-fmt 2>&1;true`"; then \
echo "Formatting Rust code ..."; \
(cd "$(top_srcdir)/src/rust" && cargo fmt --all --); \
else \
echo "Tor uses rustfmt (via cargo-fmt) to format Rust code."; \
echo "However, it seems that you don't have rustfmt installed."; \
printf "You can install rustfmt by following the directions here:"; \
echo " https://github.com/rust-lang-nursery/rustfmt"; \
fi
endif
.PHONY: check-rustfmt
check-rustfmt:
if USE_RUST
@if test -x "`which cargo-fmt 2>&1;true`"; then \
printf "Running rustfmt..."; \
(cd "$(top_srcdir)/src/rust" && cargo fmt --all -- --check && echo "done.") || \
(echo "**************** check-rustfmt failed. ****************"; \
echo " Run \`make rustfmt\` to apply the above changes."; \
exit 1); \
else \
echo "Tor uses rustfmt (via cargo-fmt) to format Rust code."; \
echo "However, it seems that you don't have rustfmt installed."; \
printf "You can install rustfmt by following the directions here:"; \
echo " https://github.com/rust-lang-nursery/rustfmt"; \
fi
endif
.PHONY: clippy
clippy:
if USE_RUST
@if test -x "`which cargo-clippy 2>&1;true`"; then \
echo "Running cargo clippy ..."; \
echo "Prepare yourself for the onslaught of suggestions ..."; \
(cd "$(top_srcdir)/src/rust" && cargo clippy); \
else \
echo "Tor can use clippy to lint Rust code."; \
echo "However, it seems that you don't have clippy installed."; \
echo "You can install the latest version of clippy by following the directions here: https://github.com/rust-lang-nursery/rust-clippy"; \
fi
endif
.PHONY: check-changes
check-changes:
if USEPYTHON
@ -686,7 +635,7 @@ update-copyright:
$(PERL) $(top_srcdir)/scripts/maint/updateCopyright.pl $(OWNED_TOR_C_FILES)
.PHONY: autostyle
autostyle: update-versions rustfmt autostyle-ifdefs rectify-includes
autostyle: update-versions autostyle-ifdefs rectify-includes
mostlyclean-local:
rm -f $(top_builddir)/src/*/*.gc{da,no} $(top_builddir)/src/*/*/*.gc{da,no}
@ -694,14 +643,6 @@ mostlyclean-local:
rm -rf $(top_builddir)/doc/doxygen
rm -rf $(TEST_NETWORK_ALL_LOG_DIR)
clean-local:
rm -rf $(top_builddir)/src/rust/target
rm -rf $(top_builddir)/src/rust/.cargo/registry
if USE_RUST
distclean-local: distclean-rust
endif
# This relies on some internal details of how automake implements
# distcheck. We check two directories because automake-1.15 changed
# from $(distdir)/_build to $(distdir)/_build/sub.

4
changes/ticket40469 Normal file
View File

@ -0,0 +1,4 @@
o Code simplification and refactoring (rust):
- Remove Rust support and its associated code. It is unsupported and Rust
focus should be shifted to arti. Closes 40469.

24
config.rust Normal file
View File

@ -0,0 +1,24 @@
# Used by our cargo build.rs script to get variables from autoconf.
#
# The "configure" script will generate "config.rust" from "config.rust.in",
# and then build.rs will read "config.rust".
BUILDDIR=/home/dgoulet/Documents/git/tor
TOR_LDFLAGS_zlib=
TOR_LDFLAGS_nss=@TOR_LDFLAGS_nss@
TOR_LDFLAGS_openssl=
TOR_LDFLAGS_libevent=
TOR_ZLIB_LIBS=-lz
TOR_LIB_MATH=-lm
TOR_LIBEVENT_LIBS=-levent
TOR_OPENSSL_LIBS=-lssl -lcrypto
TOR_LIB_WS32=
TOR_LIB_GDI=
TOR_LIB_USERENV=
CURVE25519_LIBS=
TOR_SYSTEMD_LIBS=-lsystemd
TOR_LZMA_LIBS=-llzma
TOR_ZSTD_LIBS=-lzstd
LIBS=-lseccomp -lcap
LDFLAGS= -pie -z relro -z now -rdynamic
NSS_LIBS=

View File

@ -1,24 +0,0 @@
# Used by our cargo build.rs script to get variables from autoconf.
#
# The "configure" script will generate "config.rust" from "config.rust.in",
# and then build.rs will read "config.rust".
BUILDDIR=@BUILDDIR@
TOR_LDFLAGS_zlib=@TOR_LDFLAGS_zlib@
TOR_LDFLAGS_nss=@TOR_LDFLAGS_nss@
TOR_LDFLAGS_openssl=@TOR_LDFLAGS_openssl@
TOR_LDFLAGS_libevent=@TOR_LDFLAGS_libevent@
TOR_ZLIB_LIBS=@TOR_ZLIB_LIBS@
TOR_LIB_MATH=@TOR_LIB_MATH@
TOR_LIBEVENT_LIBS=@TOR_LIBEVENT_LIBS@
TOR_OPENSSL_LIBS=@TOR_OPENSSL_LIBS@
TOR_LIB_WS32=@TOR_LIB_WS32@
TOR_LIB_GDI=@TOR_LIB_GDI@
TOR_LIB_USERENV=@TOR_LIB_USERENV@
CURVE25519_LIBS=@CURVE25519_LIBS@
TOR_SYSTEMD_LIBS=@TOR_SYSTEMD_LIBS@
TOR_LZMA_LIBS=@TOR_LZMA_LIBS@
TOR_ZSTD_LIBS=@TOR_ZSTD_LIBS@
LIBS=@LIBS@
LDFLAGS=@LDFLAGS@
NSS_LIBS=@NSS_LIBS@

View File

@ -75,10 +75,6 @@ AC_ARG_ENABLE(oss-fuzz,
AS_HELP_STRING(--enable-oss-fuzz, [build extra fuzzers based on 'oss-fuzz' environment]))
AC_ARG_ENABLE(memory-sentinels,
AS_HELP_STRING(--disable-memory-sentinels, [disable code that tries to prevent some kinds of memory access bugs. For fuzzing only.]))
AC_ARG_ENABLE(rust,
AS_HELP_STRING(--enable-rust, [enable rust integration]))
AC_ARG_ENABLE(cargo-online-mode,
AS_HELP_STRING(--enable-cargo-online-mode, [Allow cargo to make network requests to fetch crates. For builds with rust only.]))
AC_ARG_ENABLE(restart-debugging,
AS_HELP_STRING(--enable-restart-debugging, [Build Tor with support for debugging in-process restart. Developers only.]))
AC_ARG_ENABLE(zstd-advanced-apis,
@ -115,7 +111,6 @@ AM_CONDITIONAL(COVERAGE_ENABLED, test "x$enable_coverage" = "xyes")
AM_CONDITIONAL(DISABLE_ASSERTS_IN_UNIT_TESTS, test "x$enable_asserts_in_tests" = "xno")
AM_CONDITIONAL(LIBFUZZER_ENABLED, test "x$enable_libfuzzer" = "xyes")
AM_CONDITIONAL(OSS_FUZZ_ENABLED, test "x$enable_oss_fuzz" = "xyes")
AM_CONDITIONAL(USE_RUST, test "x$enable_rust" = "xyes")
AM_CONDITIONAL(USE_NSS, test "x$enable_nss" = "xyes")
AM_CONDITIONAL(USE_OPENSSL, test "x$enable_nss" != "xyes")
@ -483,13 +478,6 @@ fi
AM_CONDITIONAL(USEPYTHON, [test "x$PYTHON" != "x"])
dnl List all external rust crates we depend on here. Include the version
rust_crates=" \
digest-0.7.2 \
libc-0.2.39 \
"
AC_SUBST(rust_crates)
ifdef([AC_C_FLEXIBLE_ARRAY_MEMBER], [
AC_C_FLEXIBLE_ARRAY_MEMBER
], [
@ -642,105 +630,6 @@ fi
AC_C_BIGENDIAN
AC_ARG_VAR([TOR_RUST_TARGET], [Rust target, must be specified when cross-compiling (HOST != BUILD). example: i686-pc-windows-gnu])
if test "x$enable_rust" = "xyes"; then
AC_ARG_VAR([RUSTC], [path to the rustc binary])
AC_CHECK_PROG([RUSTC], [rustc], [rustc],[no])
if test "x$RUSTC" = "xno"; then
AC_MSG_ERROR([rustc unavailable but rust integration requested.])
fi
AC_ARG_VAR([CARGO], [path to the cargo binary])
AC_CHECK_PROG([CARGO], [cargo], [cargo],[no])
if test "x$CARGO" = "xno"; then
AC_MSG_ERROR([cargo unavailable but rust integration requested.])
fi
AC_DEFINE([HAVE_RUST], 1, [have Rust])
if test "x$enable_fatal_warnings" = "xyes"; then
RUST_WARN=
else
RUST_WARN=#
fi
if test "x$enable_cargo_online_mode" = "xyes"; then
CARGO_ONLINE=
RUST_DL=#
else
CARGO_ONLINE=--frozen
RUST_DL=
dnl When we're not allowed to touch the network, we need crate dependencies
dnl locally available.
AC_MSG_CHECKING([rust crate dependencies])
AC_ARG_VAR([TOR_RUST_DEPENDENCIES], [path to directory with local crate mirror])
if test "x$TOR_RUST_DEPENDENCIES" = "x"; then
TOR_RUST_DEPENDENCIES="${srcdir}/src/ext/rust/crates"
fi
dnl Check whether the path exists before we try to cd into it.
if test ! -d "$TOR_RUST_DEPENDENCIES"; then
AC_MSG_ERROR([Rust dependency directory $TOR_RUST_DEPENDENCIES does not exist. Specify a dependency directory using the TOR_RUST_DEPENDENCIES variable or allow cargo to fetch crates using --enable-cargo-online-mode.])
ERRORED=1
fi
dnl Make the path absolute, since we'll be using it from within a
dnl subdirectory.
TOR_RUST_DEPENDENCIES=$(cd "$TOR_RUST_DEPENDENCIES" ; pwd)
for dep in $rust_crates; do
if test ! -d "$TOR_RUST_DEPENDENCIES"/"$dep"; then
AC_MSG_ERROR([Failure to find rust dependency $TOR_RUST_DEPENDENCIES/$dep. Specify a dependency directory using the TOR_RUST_DEPENDENCIES variable or allow cargo to fetch crates using --enable-cargo-online-mode.])
ERRORED=1
fi
done
if test "x$ERRORED" = "x"; then
AC_MSG_RESULT([yes])
fi
fi
dnl For now both MSVC and MinGW rust libraries will output static libs with
dnl the MSVC naming convention.
if test "$bwin32" = "true"; then
tor_rust_static_name=tor_rust.lib
else
tor_rust_static_name=libtor_rust.a
fi
AC_CANONICAL_BUILD
if test -n "$TOR_RUST_TARGET"; then
if test "$host" = "$build"; then
AC_MSG_ERROR([HOST = BUILD is invalid if TOR_RUST_TARGET is specified, see configure --help for more information.])
fi
RUST_TARGET_PROP="target = '$TOR_RUST_TARGET'"
TOR_RUST_LIB_PATH="src/rust/target/$TOR_RUST_TARGET/release/$tor_rust_static_name"
else
if test "$host" != "$build"; then
AC_MSG_ERROR([TOR_RUST_TARGET must be specified when cross-compiling with Rust enabled.])
fi
RUST_TARGET_PROP=
TOR_RUST_LIB_PATH="src/rust/target/release/$tor_rust_static_name"
fi
AC_SUBST(RUST_TARGET_PROP)
AC_SUBST(TOR_RUST_LIB_PATH)
AC_SUBST(CARGO_ONLINE)
AC_SUBST(RUST_WARN)
AC_SUBST(RUST_DL)
dnl Let's check the rustc version, too
AC_MSG_CHECKING([rust version])
RUSTC_VERSION=`$RUSTC --version`
RUSTC_VERSION_MAJOR=`$RUSTC --version | cut -d ' ' -f 2 | cut -d '.' -f 1`
RUSTC_VERSION_MINOR=`$RUSTC --version | cut -d ' ' -f 2 | cut -d '.' -f 2`
if test "x$RUSTC_VERSION_MAJOR" = "x" -o "x$RUSTC_VERSION_MINOR" = "x"; then
AC_MSG_ERROR([rustc version couldn't be identified])
fi
if test "$RUSTC_VERSION_MAJOR" -lt 2 -a "$RUSTC_VERSION_MINOR" -lt 31; then
AC_MSG_ERROR([rustc must be at least version 1.31.0])
fi
AC_MSG_RESULT([$RUSTC_VERSION])
fi
AC_SEARCH_LIBS(socket, [socket network])
AC_SEARCH_LIBS(gethostbyname, [nsl])
AC_SEARCH_LIBS(dlopen, [dl])
@ -916,8 +805,6 @@ if test "$bwin32" = "true"; then
TOR_LIB_WS32=-lws2_32
TOR_LIB_IPHLPAPI=-liphlpapi
TOR_LIB_SHLWAPI=-lshlwapi
# Some of the cargo-cults recommend -lwsock32 as well, but I don't
# think it's actually necessary.
TOR_LIB_GDI=-lgdi32
TOR_LIB_USERENV=-luserenv
TOR_LIB_BCRYPT=-lbcrypt
@ -1461,33 +1348,6 @@ if test "$fragile_hardening" = "yes"; then
TOR_CHECK_CFLAGS([-fno-omit-frame-pointer])
fi
dnl Find the correct libraries to add in order to use the sanitizers.
dnl
dnl When building Rust, Cargo will run the linker with the -nodefaultlibs
dnl option, which will prevent the compiler from linking the sanitizer
dnl libraries it needs. We need to specify them manually.
dnl
dnl What's more, we need to specify them in a linker script rather than
dnl from build.rs: these options aren't allowed in the cargo:rustc-flags
dnl variable.
RUST_LINKER_OPTIONS=""
if test "x$have_clang" = "xyes"; then
if test "x$CFLAGS_ASAN" != "x"; then
RUST_LINKER_OPTIONS="$RUST_LINKER_OPTIONS -Clink-arg=$CFLAGS_ASAN -Cdefault-linker-libraries"
fi
if test "x$CFLAGS_UBSAN" != "x"; then
RUST_LINKER_OPTIONS="$RUST_LINKER_OPTIONS -Clink-arg=$CFLAGS_UBSAN -Cdefault-linker-libraries"
fi
else
if test "x$CFLAGS_ASAN" != "x"; then
RUST_LINKER_OPTIONS="$RUST_LINKER_OPTIONS -Clink-arg=-fsanitize=address -Cdefault-linker-libraries"
fi
if test "x$CFLAGS_UBSAN" != "x"; then
RUST_LINKER_OPTIONS="$RUST_LINKER_OPTIONS -Clink-arg=-fsanitize=undefined -Cdefault-linker-libraries"
fi
fi
AC_SUBST(RUST_LINKER_OPTIONS)
CFLAGS_BUGTRAP="$CFLAGS_FTRAPV $CFLAGS_ASAN $CFLAGS_UBSAN"
CFLAGS_CONSTTIME="$CFLAGS_FWRAPV"
@ -2698,11 +2558,9 @@ CPPFLAGS="$CPPFLAGS $TOR_CPPFLAGS_libevent $TOR_CPPFLAGS_openssl $TOR_CPPFLAGS_z
AC_CONFIG_FILES([
Doxyfile
Makefile
config.rust
contrib/operator-tools/tor.logrotate
src/config/torrc.sample
src/config/torrc.minimal
src/rust/.cargo/config
scripts/maint/checkOptionDocs.pl
warning_flags
])
@ -2778,12 +2636,6 @@ AS_ECHO
test "x$enable_fatal_warnings" = "xyes" && value=1 || value=0
PPRINT_PROP_BOOL([Warnings are fatal (--enable-fatal-warnings)], $value)
test "x$enable_rust" = "xyes" && value=1 || value=0
PPRINT_PROP_BOOL([Rust support (--enable-rust)], $value)
test "x$enable_cargo_online_mode" = "xyes" && value=1 || value=0
PPRINT_PROP_BOOL([Cargo Online Fetch (--enable-cargo-online-mode)], $value)
test "x$enable_android" = "xyes" && value=1 || value=0
PPRINT_PROP_BOOL([Android support (--enable-android)], $value)

View File

@ -381,11 +381,10 @@ use case.
#### 2.2.2. Detecting and Negotiating Machine Support
When a new machine specification is added to Tor (or removed from Tor), you
should bump the Padding subprotocol version in `src/core/or/protover.c` and
`src/rust/protover/protover.rs`, add a field to `protover_summary_flags_t` in
`or.h`, and set this field in `memoize_protover_summary()` in versions.c. This
new field must then be checked in `circpad_node_supports_padding()` in
`circuitpadding.c`.
should bump the Padding subprotocol version in `src/core/or/protover.c`, add a
field to `protover_summary_flags_t` in `or.h`, and set this field in
`memoize_protover_summary()` in versions.c. This new field must then be
checked in `circpad_node_supports_padding()` in `circuitpadding.c`.
Note that this protocol version update and associated support check is not
necessary if your experiments will *only* be using your own relays that

View File

@ -1,553 +0,0 @@
# Rust Coding Standards
You MUST follow the standards laid out in `doc/HACKING/CodingStandards.md`,
where applicable.
## Module/Crate Declarations
Each Tor C module which is being rewritten MUST be in its own crate.
See the structure of `src/rust` for examples.
In your crate, you MUST use `lib.rs` ONLY for pulling in external
crates (e.g. `extern crate libc;`) and exporting public objects from
other Rust modules (e.g. `pub use mymodule::foo;`). For example, if
you create a crate in `src/rust/yourcrate`, your Rust code should
live in `src/rust/yourcrate/yourcode.rs` and the public interface
to it should be exported in `src/rust/yourcrate/lib.rs`.
If your code is to be called from Tor C code, you MUST define a safe
`ffi.rs`. See the "Safety" section further down for more details.
For example, in a hypothetical `tor_addition` Rust module:
In `src/rust/tor_addition/addition.rs`:
```rust
pub fn get_sum(a: i32, b: i32) -> i32 {
a + b
}
```
In `src/rust/tor_addition/lib.rs`:
```rust
pub use addition::*;
```
In `src/rust/tor_addition/ffi.rs`:
```rust
#[no_mangle]
pub extern "C" fn tor_get_sum(a: c_int, b: c_int) -> c_int {
get_sum(a, b)
}
```
If your Rust code must call out to parts of Tor's C code, you must
declare the functions you are calling in the `external` crate, located
at `src/rust/external`.
<!-- XXX get better examples of how to declare these externs, when/how they -->
<!-- XXX are unsafe, what they are expected to do —isis -->
Modules should strive to be below 500 lines (tests excluded). Single
responsibility and limited dependencies should be a guiding standard.
If you have any external modules as dependencies (e.g. `extern crate
libc;`), you MUST declare them in your crate's `lib.rs` and NOT in any
other module.
## Dependencies and versions
In general, we use modules from only the Rust standard library
whenever possible. We will review including external crates on a
case-by-case basis.
If a crate only contains traits meant for compatibility between Rust
crates, such as [the digest crate](https://crates.io/crates/digest) or
[the failure crate](https://crates.io/crates/failure), it is very likely
permissible to add it as a dependency. However, a brief review should
be conducted as to the usefulness of implementing external traits
(i.e. how widespread is the usage, how many other crates either
implement the traits or have trait bounds based upon them), as well as
the stability of the traits (i.e. if the trait is going to change, we'll
potentially have to re-do all our implementations of it).
For large external libraries, especially which implement features which
would be labour-intensive to reproduce/maintain ourselves, such as
cryptographic or mathematical/statistics libraries, only crates which
have stabilised to 1.0.0 should be considered, however, again, we may
make exceptions on a case-by-case basis.
Currently, Tor requires that you use the latest stable Rust version. At
some point in the future, we will freeze on a given stable Rust version,
to ensure backward compatibility with stable distributions that ship it.
## Updating/Adding Dependencies
To add/remove/update dependencies, first add your dependencies,
exactly specifying their versions, into the appropriate *crate-level*
`Cargo.toml` in `src/rust/` (i.e. *not* `/src/rust/Cargo.toml`, but
instead the one for your crate). Also, investigate whether your
dependency has any optional dependencies which are unnecessary but are
enabled by default. If so, you'll likely be able to enable/disable
them via some feature, e.g.:
```toml
[dependencies]
foo = { version = "1.0.0", default-features = false }
```
Next, run `/scripts/maint/updateRustDependencies.sh`. Then, go into
`src/ext/rust` and commit the changes to the `tor-rust-dependencies`
repo.
## Documentation
You MUST include `#![deny(missing_docs)]` in your crate.
For function/method comments, you SHOULD include a one-sentence, "first person"
description of function behaviour (see requirements for documentation as
described in `src/HACKING/CodingStandards.md`), then an `# Inputs` section
for inputs or initialisation values, a `# Returns` section for return
values/types, a `# Warning` section containing warnings for unsafe behaviours or
panics that could happen. For publicly accessible
types/constants/objects/functions/methods, you SHOULD also include an
`# Examples` section with runnable doctests.
You MUST document your module with _module docstring_ comments,
i.e. `//!` at the beginning of each line.
## Style
You SHOULD consider breaking up large literal numbers with `_` when it makes it
more human readable to do so, e.g. `let x: u64 = 100_000_000_000`.
## Testing
All code MUST be unittested and integration tested.
Public functions/objects exported from a crate SHOULD include doctests
describing how the function/object is expected to be used.
Integration tests SHOULD go into a `tests/` directory inside your
crate. Unittests SHOULD go into their own module inside the module
they are testing, e.g. in `src/rust/tor_addition/addition.rs` you
should put:
```rust
#[cfg(test)]
mod test {
use super::*;
#[test]
fn addition_with_zero() {
let sum: i32 = get_sum(5i32, 0i32);
assert_eq!(sum, 5);
}
}
```
## Benchmarking
The external `test` crate can be used for most benchmarking. However, using
this crate requires nightly Rust. Since we may want to switch to a more
stable Rust compiler eventually, we shouldn't do things which will automatically
break builds for stable compilers. Therefore, you MUST feature-gate your
benchmarks in the following manner.
If you wish to benchmark some of your Rust code, you MUST put the
following in the `[features]` section of your crate's `Cargo.toml`:
```toml
[features]
bench = []
```
Next, in your crate's `lib.rs` you MUST put:
```rust
#[cfg(all(test, feature = "bench"))]
extern crate test;
```
This ensures that the external crate `test`, which contains utilities
for basic benchmarks, is only used when running benchmarks via `cargo
bench --features bench`.
Finally, to write your benchmark code, in
`src/rust/tor_addition/addition.rs` you SHOULD put:
```rust
#[cfg(all(test, features = "bench"))]
mod bench {
use test::Bencher;
use super::*;
#[bench]
fn addition_small_integers(b: &mut Bencher) {
b.iter(| | get_sum(5i32, 0i32));
}
}
```
## Fuzzing
If you wish to fuzz parts of your code, please see the
[cargo fuzz](https://github.com/rust-fuzz/cargo-fuzz) crate, which uses
[libfuzzer-sys](https://github.com/rust-fuzz/libfuzzer-sys).
## Whitespace & Formatting
You MUST run `rustfmt` (https://github.com/rust-lang-nursery/rustfmt)
on your code before your code will be merged. You can install rustfmt
by doing `cargo install rustfmt-nightly` and then run it with `cargo
fmt`.
## Safety
You SHOULD read [the nomicon](https://doc.rust-lang.org/nomicon/) before writing
Rust FFI code. It is *highly advised* that you read and write normal Rust code
before attempting to write FFI or any other unsafe code.
Here are some additional bits of advice and rules:
0. Any behaviours which Rust considers to be undefined are forbidden
From https://doc.rust-lang.org/reference/behavior-considered-undefined.html:
> Behavior considered undefined
>
> The following is a list of behavior which is forbidden in all Rust code,
> including within unsafe blocks and unsafe functions. Type checking provides the
> guarantee that these issues are never caused by safe code.
>
> * Data races
> * Dereferencing a null/dangling raw pointer
> * Reads of [undef](https://llvm.org/docs/LangRef.html#undefined-values)
> (uninitialized) memory
> * Breaking the
> [pointer aliasing rules](https://llvm.org/docs/LangRef.html#pointer-aliasing-rules)
> with raw pointers (a subset of the rules used by C)
> * `&mut T` and `&T` follow LLVMs scoped noalias model, except if the `&T`
> contains an `UnsafeCell<U>`. Unsafe code must not violate these aliasing
> guarantees.
> * Mutating non-mutable data (that is, data reached through a shared
> reference or data owned by a `let` binding), unless that data is
> contained within an `UnsafeCell<U>`.
> * Invoking undefined behavior via compiler intrinsics:
> - Indexing outside of the bounds of an object with
> `std::ptr::offset` (`offset` intrinsic), with the exception of
> one byte past the end which is permitted.
> - Using `std::ptr::copy_nonoverlapping_memory` (`memcpy32`/`memcpy64`
> intrinsics) on overlapping buffers
> * Invalid values in primitive types, even in private fields/locals:
> - Dangling/null references or boxes
> - A value other than `false` (0) or `true` (1) in a `bool`
> - A discriminant in an `enum` not included in the type definition
> - A value in a `char` which is a surrogate or above `char::MAX`
> - Non-UTF-8 byte sequences in a `str`
> * Unwinding into Rust from foreign code or unwinding from Rust into foreign
> code. Rust's failure system is not compatible with exception handling in other
> languages. Unwinding must be caught and handled at FFI boundaries.
1. `unwrap()`
If you call `unwrap()`, anywhere, even in a test, you MUST include
an inline comment stating how the unwrap will either 1) never fail,
or 2) should fail (i.e. in a unittest).
You SHOULD NOT use `unwrap()` anywhere in which it is possible to handle the
potential error with the eel operator, `?` or another non panicking way.
For example, consider a function which parses a string into an integer:
```rust
fn parse_port_number(config_string: &str) -> u16 {
u16::from_str_radix(config_string, 10).unwrap()
}
```
There are numerous ways this can fail, and the `unwrap()` will cause the
whole program to byte the dust! Instead, either you SHOULD use `ok()`
(or another equivalent function which will return an `Option` or a `Result`)
and change the return type to be compatible:
```rust
fn parse_port_number(config_string: &str) -> Option<u16> {
u16::from_str_radix(config_string, 10).ok()
}
```
or you SHOULD use `or()` (or another similar method):
```rust
fn parse_port_number(config_string: &str) -> Option<u16> {
u16::from_str_radix(config_string, 10).or(Err("Couldn't parse port into a u16")
}
```
Using methods like `or()` can be particularly handy when you must do
something afterwards with the data, for example, if we wanted to guarantee
that the port is high. Combining these methods with the eel operator (`?`)
makes this even easier:
```rust
fn parse_port_number(config_string: &str) -> Result<u16, Err> {
let port = u16::from_str_radix(config_string, 10).or(Err("Couldn't parse port into a u16"))?;
if port > 1024 {
return Ok(port);
} else {
return Err("Low ports not allowed");
}
}
```
2. `unsafe`
If you use `unsafe`, you MUST describe a contract in your
documentation which describes how and when the unsafe code may
fail, and what expectations are made w.r.t. the interfaces to
unsafe code. This is also REQUIRED for major pieces of FFI between
C and Rust.
When creating an FFI in Rust for C code to call, it is NOT REQUIRED
to declare the entire function `unsafe`. For example, rather than doing:
```rust
#[no_mangle]
pub unsafe extern "C" fn increment_and_combine_numbers(mut numbers: [u8; 4]) -> u32 {
for number in &mut numbers {
*number += 1;
}
std::mem::transmute::<[u8; 4], u32>(numbers)
}
```
You SHOULD instead do:
```rust
#[no_mangle]
pub extern "C" fn increment_and_combine_numbers(mut numbers: [u8; 4]) -> u32 {
for index in 0..numbers.len() {
numbers[index] += 1;
}
unsafe {
std::mem::transmute::<[u8; 4], u32>(numbers)
}
}
```
3. Pass only C-compatible primitive types and bytes over the boundary
Rust's C-compatible primitive types are integers and floats.
These types are declared in the [libc crate](https://doc.rust-lang.org/libc/x86_64-unknown-linux-gnu/libc/index.html#types).
Most Rust objects have different [representations](https://doc.rust-lang.org/libc/x86_64-unknown-linux-gnu/libc/index.html#types)
in C and Rust, so they can't be passed using FFI.
Tor currently uses the following Rust primitive types from libc for FFI:
* defined-size integers: `uint32_t`
* native-sized integers: `c_int`
* native-sized floats: `c_double`
* native-sized raw pointers: `* c_void`, `* c_char`, `** c_char`
TODO: C smartlist to Stringlist conversion using FFI
The only non-primitive type which may cross the FFI boundary is
bytes, e.g. `&[u8]`. This SHOULD be done on the Rust side by
passing a pointer (`*mut libc::c_char`). The length can be passed
explicitly (`libc::size_t`), or the string can be NUL-byte terminated
C string.
One might be tempted to do this via doing
`CString::new("blah").unwrap().into_raw()`. This has several problems:
a) If you do `CString::new("bl\x00ah")` then the unwrap() will fail
due to the additional NULL terminator, causing a dangling
pointer to be returned (as well as a potential use-after-free).
b) Returning the raw pointer will cause the CString to run its deallocator,
which causes any C code which tries to access the contents to dereference a
NULL pointer.
c) If we were to do `as_raw()` this would result in a potential double-free
since the Rust deallocator would run and possibly Tor's deallocator.
d) Calling `into_raw()` without later using the same pointer in Rust to call
`from_raw()` and then deallocate in Rust can result in a
[memory leak](https://doc.rust-lang.org/std/ffi/struct.CString.html#method.into_raw).
[It was determined](https://github.com/rust-lang/rust/pull/41074) that this
is safe to do if you use the same allocator in C and Rust and also specify
the memory alignment for CString (except that there is no way to specify
the alignment for CString). It is believed that the alignment is always 1,
which would mean it's safe to dealloc the resulting `*mut c_char` in Tor's
C code. However, the Rust developers are not willing to guarantee the
stability of, or a contract for, this behaviour, citing concerns that this
is potentially extremely and subtly unsafe.
4. Perform an allocation on the other side of the boundary
After crossing the boundary, the other side MUST perform an
allocation to copy the data and is therefore responsible for
freeing that memory later.
5. No touching other language's enums
Rust enums should never be touched from C (nor can they be safely
`#[repr(C)]`) nor vice versa:
> "The chosen size is the default enum size for the target platform's C
> ABI. Note that enum representation in C is implementation defined, so this is
> really a "best guess". In particular, this may be incorrect when the C code
> of interest is compiled with certain flags."
(from https://gankro.github.io/nomicon/other-reprs.html)
6. Type safety
Wherever possible and sensical, you SHOULD create new types in a
manner which prevents type confusion or misuse. For example,
rather than using an untyped mapping between strings and integers
like so:
```rust
use std::collections::HashMap;
pub fn get_elements_with_over_9000_points(map: &HashMap<String, usize>) -> Vec<String> {
...
}
```
It would be safer to define a new type, such that some other usage
of `HashMap<String, usize>` cannot be confused for this type:
```rust
pub struct DragonBallZPowers(pub HashMap<String, usize>);
impl DragonBallZPowers {
pub fn over_nine_thousand<'a>(&'a self) -> Vec<&'a String> {
let mut powerful_enough: Vec<&'a String> = Vec::with_capacity(5);
for (character, power) in &self.0 {
if *power > 9000 {
powerful_enough.push(character);
}
}
powerful_enough
}
}
```
Note the following code, which uses Rust's type aliasing, is valid
but it does NOT meet the desired type safety goals:
```rust
pub type Power = usize;
pub fn over_nine_thousand(power: &Power) -> bool {
if *power > 9000 {
return true;
}
false
}
// We can still do the following:
let his_power: usize = 9001;
over_nine_thousand(&his_power);
```
7. Unsafe mucking around with lifetimes
Because lifetimes are technically, in type theory terms, a kind, i.e. a
family of types, individual lifetimes can be treated as types. For example,
one can arbitrarily extend and shorten lifetime using `std::mem::transmute`:
```rust
struct R<'a>(&'a i32);
unsafe fn extend_lifetime<'b>(r: R<'b>) -> R<'static> {
std::mem::transmute::<R<'b>, R<'static>>(r)
}
unsafe fn shorten_invariant_lifetime<'b, 'c>(r: &'b mut R<'static>) -> &'b mut R<'c> {
std::mem::transmute::<&'b mut R<'static>, &'b mut R<'c>>(r)
}
```
Calling `extend_lifetime()` would cause an `R` passed into it to live forever
for the life of the program (the `'static` lifetime). Similarly,
`shorten_invariant_lifetime()` could be used to take something meant to live
forever, and cause it to disappear! This is incredibly unsafe. If you're
going to be mucking around with lifetimes like this, first, you better have
an extremely good reason, and second, you may as be honest and explicit about
it, and for ferris' sake just use a raw pointer.
In short, just because lifetimes can be treated like types doesn't mean you
should do it.
8. Doing excessively unsafe things when there's a safer alternative
Similarly to #7, often there are excessively unsafe ways to do a task and a
simpler, safer way. You MUST choose the safer option where possible.
For example, `std::mem::transmute` can be abused in ways where casting with
`as` would be both simpler and safer:
```rust
// Don't do this
let ptr = &0;
let ptr_num_transmute = unsafe { std::mem::transmute::<&i32, usize>(ptr)};
// Use an `as` cast instead
let ptr_num_cast = ptr as *const i32 as usize;
```
In fact, using `std::mem::transmute` for *any* reason is a code smell and as
such SHOULD be avoided.
9. Casting integers with `as`
This is generally fine to do, but it has some behaviours which you should be
aware of. Casting down chops off the high bits, e.g.:
```rust
let x: u32 = 4294967295;
println!("{}", x as u16); // prints 65535
```
Some cases which you MUST NOT do include:
* Casting an `u128` down to an `f32` or vice versa (e.g.
`u128::MAX as f32` but this isn't only a problem with overflowing
as it is also undefined behaviour for `42.0f32 as u128`),
* Casting between integers and floats when the thing being cast
cannot fit into the type it is being casted into, e.g.:
```rust
println!("{}", 42949.0f32 as u8); // prints 197 in debug mode and 0 in release
println!("{}", 1.04E+17 as u8); // prints 0 in both modes
println!("{}", (0.0/0.0) as i64); // prints whatever the heck LLVM wants
```
Because this behaviour is undefined, it can even produce segfaults in
safe Rust code. For example, the following program built in release
mode segfaults:
```rust
#[inline(never)]
pub fn trigger_ub(sl: &[u8; 666]) -> &[u8] {
// Note that the float is out of the range of `usize`, invoking UB when casting.
let idx = 1e99999f64 as usize;
&sl[idx..] // The bound check is elided due to `idx` being of an undefined value.
}
fn main() {
println!("{}", trigger_ub(&[1; 666])[999999]); // ~ out of bound
}
```
And in debug mode panics with:
thread 'main' panicked at 'slice index starts at 140721821254240 but ends at 666', /checkout/src/libcore/slice/mod.rs:754:4

View File

@ -1,187 +0,0 @@
# Hacking on Rust in Tor
## Getting Started
Please read or review our documentation on Rust coding standards
(`doc/HACKING/CodingStandardsRust.md`) before doing anything.
Please also read
[the Rust Code of Conduct](https://www.rust-lang.org/en-US/conduct.html). We
aim to follow the good example set by the Rust community and be
excellent to one another. Let's be careful with each other, so we can
be memory-safe together!
Next, please contact us before rewriting anything! Rust in Tor is still
an experiment. It is an experiment that we very much want to see
succeed, so we're going slowly and carefully. For the moment, it's also
a completely volunteer-driven effort: while many, if not most, of us are
paid to work on Tor, we are not yet funded to write Rust code for Tor.
Please be patient with the other people who are working on getting more
Rust code into Tor, because they are graciously donating their free time
to contribute to this effort.
## Resources for learning Rust
**Beginning resources**
The primary resource for learning Rust is
[The Book](https://doc.rust-lang.org/book/). If you'd like to start writing
Rust immediately, without waiting for anything to install, there is
[an interactive browser-based playground](https://play.rust-lang.org/).
**Advanced resources**
If you're interested in playing with various Rust compilers and viewing
a very nicely displayed output of the generated assembly, there is
[the Godbolt compiler explorer](https://rust.godbolt.org/)
For learning how to write unsafe Rust, read
[The Rustonomicon](https://doc.rust-lang.org/nomicon/).
For learning everything you ever wanted to know about Rust macros, there
is
[The Little Book of Rust Macros](https://danielkeep.github.io/tlborm/book/index.html).
For learning more about FFI and Rust, see Jake Goulding's
[Rust FFI Omnibus](https://jakegoulding.com/rust-ffi-omnibus/).
## Compiling Tor with Rust enabled
You will need to run the `configure` script with the `--enable-rust`
flag to explicitly build with Rust. Additionally, you will need to
specify where to fetch Rust dependencies, as we allow for either
fetching dependencies from Cargo or specifying a local directory.
**Fetch dependencies from Cargo**
```console
$ ./configure --enable-rust --enable-cargo-online-mode
```
**Using a local dependency cache**
You'll need the following Rust dependencies (as of this writing):
libc==0.2.39
We vendor our Rust dependencies in a separate repo using
[cargo-vendor](https://github.com/alexcrichton/cargo-vendor). To use
them, do:
```console
$ git submodule init
$ git submodule update
```
To specify the local directory containing the dependencies, (assuming
you are in the top level of the repository) configure tor with:
```console
$ TOR_RUST_DEPENDENCIES='path_to_dependencies_directory' ./configure --enable-rust
```
(Note that `TOR_RUST_DEPENDENCIES` must be the full path to the directory; it
cannot be relative.)
Assuming you used the above `git submodule` commands and you're in the
topmost directory of the repository, this would be:
```console
$ TOR_RUST_DEPENDENCIES=`pwd`/src/ext/rust/crates ./configure --enable-rust
```
## Identifying which modules to rewrite
The places in the Tor codebase that are good candidates for porting to
Rust are:
1. loosely coupled to other Tor submodules,
2. have high test coverage, and
3. would benefit from being implemented in a memory safe language.
Help in either identifying places such as this, or working to improve
existing areas of the C codebase by adding regression tests and
simplifying dependencies, would be really helpful.
Furthermore, as submodules in C are implemented in Rust, this is a good
opportunity to refactor, add more tests, and split modules into smaller
areas of responsibility.
A good first step is to build a module-level callgraph to understand how
interconnected your target module is.
```console
$ git clone https://git.torproject.org/user/nickm/calltool.git
$ cd tor
$ CFLAGS=0 ./configure
$ ../calltool/src/main.py module_callgraph
```
The output will tell you each module name, along with a set of every module that
the module calls. Modules which call fewer other modules are better targets.
## Writing your Rust module
Strive to change the C API as little as possible.
We are currently targeting Rust stable. (See `CodingStandardsRust.md` for more
details.)
It is on our TODO list to try to cultivate good
standing with various distro maintainers of `rustc` and `cargo`, in
order to ensure that whatever version we solidify on is readily
available.
If parts of your Rust code needs to stay in sync with C code (such as
handling enums across the FFI boundary), annonotate these places in a
comment structured as follows:
`/// C_RUST_COUPLED: <path_to_file> <name_of_c_object>`
Where `<name_of_c_object>` can be an enum, struct, constant, etc. Then,
do the same in the C code, to note that rust will need to be changed
when the C does.
## Adding your Rust module to Tor's build system
0. Your translation of the C module should live in its own crate(s)
in the `src/rust/` directory.
1. Add your crate to `src/rust/Cargo.toml`, in the
`[workspace.members]` section.
2. Add your crate's files to src/rust/include.am
If your crate should be available to C (rather than just being included as a
dependency of other Rust modules):
0. Declare the crate as a dependency of tor_rust in
`src/rust/tor_util/Cargo.toml` and include it in
`src/rust/tor_rust/lib.rs`
## How to test your Rust code
Everything should be tested full stop. Even non-public functionality.
Be sure to edit `src/test/test_rust.sh` to add the name of your
crate to the `crates` variable! This will ensure that `cargo test` is
run on your crate.
Configure Tor's build system to build with Rust enabled:
```console
$ ./configure --enable-fatal-warnings --enable-rust --enable-cargo-online-mode
```
Tor's test should be run by doing:
```console
$ make check
```
Tor's integration tests should also pass:
```console
$ make test-stem
```
## Submitting a patch
Please follow the instructions in `doc/HACKING/GettingStarted.md`.

View File

@ -51,10 +51,8 @@ EXTRA_DIST+= doc/asciidoc-helper.sh \
doc/TUNING \
doc/HACKING/README.1st.md \
doc/HACKING/CodingStandards.md \
doc/HACKING/CodingStandardsRust.md \
doc/HACKING/Fuzzing.md \
doc/HACKING/GettingStarted.md \
doc/HACKING/GettingStartedRust.md \
doc/HACKING/HelpfulTools.md \
doc/HACKING/HowToReview.md \
doc/HACKING/Module.md \

View File

@ -30,7 +30,6 @@ RUN_STAGE_TEST="${RUN_STAGE_TEST:-yes}"
FATAL_WARNINGS="${FATAL_WARNINGS:-yes}"
HARDENING="${HARDENING:-no}"
COVERAGE="${COVERAGE:-no}"
RUST="${RUST:-no}"
DOXYGEN="${DOXYGEN:-no}"
ASCIIDOC="${ASCIIDOC:-no}"
TRACING="${TRACING:-no}"
@ -193,7 +192,6 @@ yes_or_no ON_GITLAB
yes_or_no FATAL_WARNINGS
yes_or_no HARDENING
yes_or_no COVERAGE
yes_or_no RUST
yes_or_no DOXYGEN
yes_or_no ASCIIDOC
yes_or_no TRACING
@ -245,9 +243,6 @@ fi
if [[ "$COVERAGE" == "yes" ]]; then
configure_options+=("--enable-coverage")
fi
if [[ "$RUST" == "yes" ]]; then
configure_options+=("--enable-rust")
fi
if [[ "$ASCIIDOC" != "yes" ]]; then
configure_options+=("--disable-asciidoc")
fi

View File

@ -32,7 +32,6 @@ FUZZING_LDFLAG = \
@TOR_LDFLAGS_zlib@ $(TOR_LDFLAGS_CRYPTLIB) @TOR_LDFLAGS_libevent@
FUZZING_LIBS = \
src/test/libtor-testing.a \
$(rust_ldadd) \
@TOR_ZLIB_LIBS@ @TOR_LIB_MATH@ \
@TOR_LIBEVENT_LIBS@ $(TOR_LIBS_CRYPTLIB) \
@TOR_LIB_WS32@ @TOR_LIB_IPHLPAPI@ @TOR_LIB_SHLWAPI@ @TOR_LIB_GDI@ @TOR_LIB_USERENV@ @CURVE25519_LIBS@ \

View File

@ -41,7 +41,6 @@ rm -f "$TOPLEVEL/contrib/dist/suse/tor.sh" "$TOPLEVEL/contrib/dist/tor.sh"
find "$TOPLEVEL/contrib" "$TOPLEVEL/doc" "$TOPLEVEL/scripts" "$TOPLEVEL/src" \
-name "*.sh" \
-not -path "$TOPLEVEL/src/ext/*" \
-not -path "$TOPLEVEL/src/rust/registry/*" \
-exec shellcheck {} +
# Check scripts that aren't named *.sh

View File

@ -7,8 +7,7 @@ import os
# We don't want to run metrics for unittests, automatically-generated C files,
# external libraries or git leftovers.
EXCLUDE_SOURCE_DIRS = {"src/test/", "src/trunnel/", "src/rust/",
"src/ext/" }
EXCLUDE_SOURCE_DIRS = {"src/test/", "src/trunnel/", "src/ext/" }
EXCLUDE_FILES = {"orconfig.h"}

View File

@ -1,45 +0,0 @@
#!/usr/bin/env bash
#
# Copyright (c) 2018 The Tor Project, Inc.
# Copyright (c) 2018 isis agora lovecruft
# See LICENSE for license information
#
# updateRustDependencies.sh
# -------------------------
# Update our vendored Rust dependencies, either adding/removing
# dependencies and/or upgrading current dependencies to newer
# versions.
#
# To use this script, first add your dependencies, exactly specifying
# their versions, into the appropriate *crate-level* Cargo.toml in
# src/rust/ (i.e. *not* /src/rust/Cargo.toml, but instead the one for
# your crate).
#
# Next, run this script. Then, go into src/ext/rust and commit the
# changes to the tor-rust-dependencies repo.
set -e
HERE=$(dirname "$(realpath "$0")")
TOPLEVEL=$(dirname "$(dirname "$HERE")")
TOML="$TOPLEVEL/src/rust/Cargo.toml"
VENDORED="$TOPLEVEL/src/ext/rust/crates"
CARGO=$(command -v cargo)
if ! test -f "$TOML" ; then
printf "Error: Couldn't find workspace Cargo.toml in expected location: %s\\n" "$TOML"
fi
if ! test -d "$VENDORED" ; then
printf "Error: Couldn't find directory for Rust dependencies! Expected location: %s\\n" "$VENDORED"
fi
if test -z "$CARGO" ; then
printf "Error: cargo must be installed and in your \$PATH\\n"
fi
if test -z "$(cargo --list | grep vendor)" ; then
printf "Error: cargo-vendor not installed\\n"
fi
$CARGO vendor -v --locked --explicit-version --no-delete --sync "$TOML" "$VENDORED"

View File

@ -17,7 +17,6 @@ src_app_tor_SOURCES = src/app/main/tor_main.c
src_app_tor_LDFLAGS = @TOR_LDFLAGS_zlib@ $(TOR_LDFLAGS_CRYPTLIB) \
@TOR_LDFLAGS_libevent@ @TOR_STATIC_LDFLAGS@
src_app_tor_LDADD = libtor.a \
$(rust_ldadd) \
@TOR_ZLIB_LIBS@ @TOR_LIB_MATH@ @TOR_LIBEVENT_LIBS@ $(TOR_LIBS_CRYPTLIB) \
@TOR_LIB_WS32@ @TOR_LIB_IPHLPAPI@ @TOR_LIB_SHLWAPI@ @TOR_LIB_GDI@ @TOR_LIB_USERENV@ \
@CURVE25519_LIBS@ @TOR_SYSTEMD_LIBS@ \

View File

@ -102,12 +102,6 @@
#include <systemd/sd-daemon.h>
#endif /* defined(HAVE_SYSTEMD) */
#ifdef HAVE_RUST
// helper function defined in Rust to output a log message indicating if tor is
// running with Rust enabled. See src/rust/tor_util
void rust_log_welcome_string(void);
#endif
/********* PROTOTYPES **********/
static void dumpmemusage(int severity);
@ -611,10 +605,6 @@ tor_init(int argc, char *argv[])
tor_compress_log_init_warnings();
}
#ifdef HAVE_RUST
rust_log_welcome_string();
#endif /* defined(HAVE_RUST) */
/* Warn _if_ the tracing subsystem is built in. */
tracing_log_warning();

View File

@ -28,7 +28,6 @@ LIBTOR_APP_A_SOURCES += \
src/core/or/orconn_event.c \
src/core/or/policies.c \
src/core/or/protover.c \
src/core/or/protover_rust.c \
src/core/or/reasons.c \
src/core/or/relay.c \
src/core/or/scheduler.c \

View File

@ -28,8 +28,6 @@
#include "core/or/versions.h"
#include "lib/tls/tortls.h"
#ifndef HAVE_RUST
static const smartlist_t *get_supported_protocol_list(void);
static int protocol_list_contains(const smartlist_t *protos,
protocol_type_t pr, uint32_t ver);
@ -752,5 +750,3 @@ protover_free_all(void)
supported_protocol_list = NULL;
}
}
#endif /* !defined(HAVE_RUST) */

View File

@ -99,13 +99,13 @@ typedef struct proto_entry_t {
uint64_t bitmask;
} proto_entry_t;
#if !defined(HAVE_RUST) && defined(TOR_UNIT_TESTS)
#if defined(TOR_UNIT_TESTS)
STATIC struct smartlist_t *parse_protocol_list(const char *s);
STATIC char *encode_protocol_list(const struct smartlist_t *sl);
STATIC const char *protocol_type_to_str(protocol_type_t pr);
STATIC int str_to_protocol_type(const char *s, protocol_type_t *pr_out);
STATIC void proto_entry_free_(proto_entry_t *entry);
#endif /* !defined(HAVE_RUST) && defined(TOR_UNIT_TESTS) */
#endif /* defined(TOR_UNIT_TESTS) */
#define proto_entry_free(entry) \
FREE_AND_NULL(proto_entry_t, proto_entry_free_, (entry))

View File

@ -1,34 +0,0 @@
/* Copyright (c) 2016-2021, The Tor Project, Inc. */
/* See LICENSE for licensing information */
/*
* \file protover_rust.c
* \brief Provide a C wrapper for functions exposed in /src/rust/protover,
* and safe translation/handling between the Rust/C boundary.
*/
#include "core/or/or.h"
#include "core/or/protover.h"
#ifdef HAVE_RUST
/* Define for compatibility, used in main.c */
void
protover_free_all(void)
{
}
int protover_contains_long_protocol_names_(const char *s);
/**
* Return true if the unparsed protover in <b>s</b> would contain a protocol
* name longer than MAX_PROTOCOL_NAME_LENGTH, and false otherwise.
*/
bool
protover_list_is_invalid(const char *s)
{
return protover_contains_long_protocol_names_(s) != 0;
}
#endif /* defined(HAVE_RUST) */

View File

@ -85,7 +85,6 @@ include src/app/main/include.am
include src/core/include.am
include src/app/include.am
include src/rust/include.am
include src/test/include.am
include src/tools/include.am
include src/win32/include.am

View File

@ -1,12 +0,0 @@
[source]
@RUST_DL@ [source.crates-io]
@RUST_DL@ registry = 'https://github.com/rust-lang/crates.io-index'
@RUST_DL@ replace-with = 'vendored-sources'
@RUST_DL@ [source.vendored-sources]
@RUST_DL@ directory = '@TOR_RUST_DEPENDENCIES@'
[build]
@RUST_WARN@ rustflags = [ "-D", "warnings" ]
@RUST_TARGET_PROP@

View File

@ -1,12 +0,0 @@
max_width = 100
hard_tabs = false
tab_spaces = 4
newline_style = "Unix"
#use_small_heuristics = "Default"
reorder_imports = true
reorder_modules = true
remove_nested_parens = true
merge_derives = true
use_try_shorthand = false
use_field_init_shorthand = false
force_explicit_abi = true

122
src/rust/Cargo.lock generated
View File

@ -1,122 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
[[package]]
name = "crypto"
version = "0.0.1"
dependencies = [
"digest 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"external 0.0.1",
"libc 0.2.39 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.5.0-pre.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_core 0.2.0-pre.0 (registry+https://github.com/rust-lang/crates.io-index)",
"smartlist 0.0.1",
"tor_allocate 0.0.1",
"tor_log 0.1.0",
]
[[package]]
name = "digest"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"generic-array 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "external"
version = "0.0.1"
dependencies = [
"libc 0.2.39 (registry+https://github.com/rust-lang/crates.io-index)",
"smartlist 0.0.1",
"tor_allocate 0.0.1",
]
[[package]]
name = "generic-array"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"typenum 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "libc"
version = "0.2.39"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "protover"
version = "0.0.1"
dependencies = [
"external 0.0.1",
"libc 0.2.39 (registry+https://github.com/rust-lang/crates.io-index)",
"smartlist 0.0.1",
"tor_allocate 0.0.1",
"tor_log 0.1.0",
"tor_util 0.0.1",
]
[[package]]
name = "rand"
version = "0.5.0-pre.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rand_core 0.2.0-pre.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rand_core"
version = "0.2.0-pre.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "smartlist"
version = "0.0.1"
dependencies = [
"libc 0.2.39 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "tor_allocate"
version = "0.0.1"
dependencies = [
"libc 0.2.39 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "tor_log"
version = "0.1.0"
dependencies = [
"libc 0.2.39 (registry+https://github.com/rust-lang/crates.io-index)",
"tor_allocate 0.0.1",
]
[[package]]
name = "tor_rust"
version = "0.1.0"
dependencies = [
"protover 0.0.1",
"tor_util 0.0.1",
]
[[package]]
name = "tor_util"
version = "0.0.1"
dependencies = [
"libc 0.2.39 (registry+https://github.com/rust-lang/crates.io-index)",
"tor_allocate 0.0.1",
"tor_log 0.1.0",
]
[[package]]
name = "typenum"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[metadata]
"checksum digest 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "00a49051fef47a72c9623101b19bd71924a45cca838826caae3eaa4d00772603"
"checksum generic-array 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ef25c5683767570c2bbd7deba372926a55eaae9982d7726ee2a1050239d45b9d"
"checksum libc 0.2.39 (registry+https://github.com/rust-lang/crates.io-index)" = "f54263ad99207254cf58b5f701ecb432c717445ea2ee8af387334bdd1a03fdff"
"checksum rand 0.5.0-pre.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3795e4701d9628a63a84d0289e66279883b40df165fca7caed7b87122447032a"
"checksum rand_core 0.2.0-pre.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c7255ffbdb188d5be1a69b6f9f3cf187de4207430b9e79ed5b76458a6b20de9a"
"checksum typenum 1.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "13a99dc6780ef33c78780b826cf9d2a78840b72cae9474de4bcaf9051e60ebbd"

View File

@ -1,26 +0,0 @@
[workspace]
members = [
"crypto",
"external",
"protover",
"smartlist",
"tor_allocate",
"tor_log",
"tor_rust",
"tor_util",
]
# Can remove panic="abort" when this issue is fixed:
# https://github.com/rust-lang/rust/issues/52652
[profile.dev]
panic = "abort"
[profile.release]
debug = true
panic = "abort"
[profile.test]
panic = "abort"
[profile.bench]
panic = "abort"

View File

@ -1,192 +0,0 @@
//! Build script for Rust modules in Tor.
//!
//! We need to use this because some of our Rust tests need to use some
//! of our C modules, which need to link some external libraries.
//!
//! This script works by looking at a "config.rust" file generated by our
//! configure script, and then building a set of options for cargo to pass to
//! the compiler.
use std::collections::HashMap;
use std::env;
use std::fs::File;
use std::io;
use std::io::prelude::*;
use std::path::PathBuf;
/// Wrapper around a key-value map.
struct Config(HashMap<String, String>);
/// Locate a config.rust file generated by autoconf, starting in the OUT_DIR
/// location provided by cargo and recursing up the directory tree. Note that
/// we need to look in the OUT_DIR, since autoconf will place generated files
/// in the build directory.
fn find_cfg() -> io::Result<String> {
let mut path = PathBuf::from(env::var("OUT_DIR").unwrap());
loop {
path.push("config.rust");
if path.exists() {
return Ok(path.to_str().unwrap().to_owned());
}
path.pop(); // remove config.rust
if !path.pop() {
// can't remove last part of directory
return Err(io::Error::new(io::ErrorKind::NotFound, "No config.rust"));
}
}
}
impl Config {
/// Find the config.rust file and try to parse it.
///
/// The file format is a series of lines of the form KEY=VAL, with
/// any blank lines and lines starting with # ignored.
fn load() -> io::Result<Config> {
let path = find_cfg()?;
let f = File::open(&path)?;
let reader = io::BufReader::new(f);
let mut map = HashMap::new();
for line in reader.lines() {
let s = line?;
if s.trim().starts_with("#") || s.trim() == "" {
continue;
}
let idx = match s.find("=") {
None => {
return Err(io::Error::new(io::ErrorKind::InvalidData, "missing ="));
}
Some(x) => x,
};
let (var, eq_val) = s.split_at(idx);
let val = &eq_val[1..];
map.insert(var.to_owned(), val.to_owned());
}
Ok(Config(map))
}
/// Return a reference to the value whose key is 'key'.
///
/// Panics if 'key' is not found in the configuration.
fn get(&self, key: &str) -> &str {
self.0.get(key).unwrap()
}
/// Add a dependency on a static C library that is part of Tor, by name.
fn component(&self, s: &str) {
println!("cargo:rustc-link-lib=static={}", s);
}
/// Add a dependency on a native library that is not part of Tor, by name.
fn dependency(&self, s: &str) {
println!("cargo:rustc-link-lib={}", s);
}
/// Add a link path, relative to Tor's build directory.
fn link_relpath(&self, s: &str) {
let builddir = self.get("BUILDDIR");
println!("cargo:rustc-link-search=native={}/{}", builddir, s);
}
/// Add an absolute link path.
fn link_path(&self, s: &str) {
println!("cargo:rustc-link-search=native={}", s);
}
/// Parse the CFLAGS in s, looking for -l and -L items, and adding
/// rust configuration as appropriate.
fn from_cflags(&self, s: &str) {
let mut next_is_lib = false;
let mut next_is_path = false;
for ent in self.get(s).split_whitespace() {
if next_is_lib {
self.dependency(ent);
next_is_lib = false;
} else if next_is_path {
self.link_path(ent);
next_is_path = false;
} else if ent == "-l" {
next_is_lib = true;
} else if ent == "-L" {
next_is_path = true;
} else if ent.starts_with("-L") {
self.link_path(&ent[2..]);
} else if ent.starts_with("-l") {
self.dependency(&ent[2..]);
}
}
}
}
pub fn main() {
let cfg = Config::load().unwrap();
let package = env::var("CARGO_PKG_NAME").unwrap();
match package.as_ref() {
"crypto" => {
// Right now, I'm having a separate configuration for each Rust
// package, since I'm hoping we can trim them down. Once we have a
// second Rust package that needs to use this build script, let's
// extract some of this stuff into a module.
//
// This is a ridiculous amount of code to be pulling in just
// to test our crypto library: modularity would be our
// friend here.
cfg.from_cflags("TOR_LDFLAGS_zlib");
cfg.from_cflags("TOR_LDFLAGS_openssl");
cfg.from_cflags("TOR_LDFLAGS_libevent");
cfg.link_relpath("src/lib");
cfg.link_relpath("src/ext/keccak-tiny");
cfg.link_relpath("src/ext/ed25519/ref10");
cfg.link_relpath("src/ext/ed25519/donna");
cfg.link_relpath("src/trunnel");
// Note that we can't pull in "libtor-testing", or else we
// will have dependencies on all the other rust packages that
// tor uses. We must be careful with factoring and dependencies
// moving forward!
cfg.component("tor-crypt-ops-testing");
cfg.component("tor-sandbox-testing");
cfg.component("tor-encoding-testing");
cfg.component("tor-fs-testing");
cfg.component("tor-net-testing");
cfg.component("tor-buf-testing");
cfg.component("tor-time-testing");
cfg.component("tor-thread-testing");
cfg.component("tor-memarea-testing");
cfg.component("tor-log-testing");
cfg.component("tor-lock-testing");
cfg.component("tor-fdio-testing");
cfg.component("tor-container-testing");
cfg.component("tor-smartlist-core-testing");
cfg.component("tor-string-testing");
cfg.component("tor-malloc");
cfg.component("tor-wallclock");
cfg.component("tor-err-testing");
cfg.component("tor-version-testing");
cfg.component("tor-intmath-testing");
cfg.component("tor-ctime-testing");
cfg.component("curve25519_donna");
cfg.component("keccak-tiny");
cfg.component("ed25519_ref10");
cfg.component("ed25519_donna");
cfg.component("or-trunnel-testing");
cfg.from_cflags("TOR_ZLIB_LIBS");
cfg.from_cflags("TOR_LIB_MATH");
cfg.from_cflags("NSS_LIBS");
cfg.from_cflags("TOR_OPENSSL_LIBS");
cfg.from_cflags("TOR_LIBEVENT_LIBS");
cfg.from_cflags("TOR_LIB_WS32");
cfg.from_cflags("TOR_LIB_GDI");
cfg.from_cflags("TOR_LIB_USERENV");
cfg.from_cflags("CURVE25519_LIBS");
cfg.from_cflags("TOR_LZMA_LIBS");
cfg.from_cflags("TOR_ZSTD_LIBS");
cfg.from_cflags("LIBS");
}
_ => {
panic!("No configuration in build.rs for package {}", package);
}
}
}

View File

@ -1,37 +0,0 @@
[package]
authors = ["The Tor Project",
"Isis Lovecruft <isis@torproject.org>"]
name = "crypto"
version = "0.0.1"
publish = false
build = "../build.rs"
[lib]
name = "crypto"
path = "lib.rs"
[dependencies]
libc = "=0.2.39"
digest = "=0.7.2"
rand_core = { version = "=0.2.0-pre.0", default-features = false }
external = { path = "../external" }
smartlist = { path = "../smartlist" }
tor_allocate = { path = "../tor_allocate" }
tor_log = { path = "../tor_log" }
[dev-dependencies]
rand = { version = "=0.5.0-pre.2", default-features = false }
rand_core = { version = "=0.2.0-pre.0", default-features = false }
[features]
# If this feature is enabled, test code which calls Tor C code from Rust will
# execute with `cargo test`. Due to numerous linker issues (#25386), this is
# currently disabled by default.
test-c-from-rust = []
# We have to define a feature here because doctests don't get cfg(test),
# and we need to disable some C dependencies when running the doctests
# because of the various linker issues. See
# https://github.com/rust-lang/rust/issues/45599
test_linking_hack = []

View File

@ -1,7 +0,0 @@
// Copyright (c) 2018-2019, The Tor Project, Inc.
// Copyright (c) 2018, isis agora lovecruft
// See LICENSE for licensing information
//! Hash Digests and eXtendible Output Functions (XOFs)
pub mod sha2;

View File

@ -1,234 +0,0 @@
// Copyright (c) 2018-2019, The Tor Project, Inc.
// Copyright (c) 2018, isis agora lovecruft
// See LICENSE for licensing information
//! Hash Digests and eXtendible Output Functions (XOFs)
pub use digest::Digest;
use digest::generic_array::typenum::U32;
use digest::generic_array::typenum::U64;
use digest::generic_array::GenericArray;
use digest::BlockInput;
use digest::FixedOutput;
use digest::Input;
use external::crypto_digest::get_256_bit_digest;
use external::crypto_digest::get_512_bit_digest;
use external::crypto_digest::CryptoDigest;
use external::crypto_digest::DigestAlgorithm;
pub use external::crypto_digest::DIGEST256_LEN;
pub use external::crypto_digest::DIGEST512_LEN;
/// The block size for both SHA-256 and SHA-512 digests is 512 bits/64 bytes.
///
/// Unfortunately, we have to use the generic_array crate currently to express
/// this at compile time. Later, in the future, when Rust implements const
/// generics, we'll be able to remove this dependency (actually, it will get
/// removed from the digest crate, which is currently `pub use`ing it).
type BlockSize = U64;
/// A SHA2-256 digest.
///
/// # C_RUST_COUPLED
///
/// * `crypto_digest_dup`
#[derive(Clone)]
pub struct Sha256 {
engine: CryptoDigest,
}
/// Construct a new, default instance of a `Sha256` hash digest function.
///
/// # Examples
///
/// ```rust,no_run
/// use crypto::digests::sha2::{Sha256, Digest};
///
/// let mut hasher: Sha256 = Sha256::default();
/// ```
///
/// # Returns
///
/// A new `Sha256` digest.
impl Default for Sha256 {
fn default() -> Sha256 {
Sha256 {
engine: CryptoDigest::new(Some(DigestAlgorithm::SHA2_256)),
}
}
}
impl BlockInput for Sha256 {
type BlockSize = BlockSize;
}
/// Input `msg` into the digest.
///
/// # Examples
///
/// ```rust,no_run
/// use crypto::digests::sha2::{Sha256, Digest};
///
/// let mut hasher: Sha256 = Sha256::default();
///
/// hasher.input(b"foo");
/// hasher.input(b"bar");
/// ```
impl Input for Sha256 {
fn process(&mut self, msg: &[u8]) {
self.engine.add_bytes(&msg);
}
}
/// Retrieve the output hash from everything which has been fed into this
/// `Sha256` digest thus far.
///
//
// FIXME: Once const generics land in Rust, we should genericise calling
// crypto_digest_get_digest in external::crypto_digest.
impl FixedOutput for Sha256 {
type OutputSize = U32;
fn fixed_result(self) -> GenericArray<u8, Self::OutputSize> {
let buffer: [u8; DIGEST256_LEN] = get_256_bit_digest(self.engine);
GenericArray::from(buffer)
}
}
/// A SHA2-512 digest.
///
/// # C_RUST_COUPLED
///
/// * `crypto_digest_dup`
#[derive(Clone)]
pub struct Sha512 {
engine: CryptoDigest,
}
/// Construct a new, default instance of a `Sha512` hash digest function.
///
/// # Examples
///
/// ```rust,no_run
/// use crypto::digests::sha2::{Sha512, Digest};
///
/// let mut hasher: Sha512 = Sha512::default();
/// ```
///
/// # Returns
///
/// A new `Sha512` digest.
impl Default for Sha512 {
fn default() -> Sha512 {
Sha512 {
engine: CryptoDigest::new(Some(DigestAlgorithm::SHA2_512)),
}
}
}
impl BlockInput for Sha512 {
type BlockSize = BlockSize;
}
/// Input `msg` into the digest.
///
/// # Examples
///
/// ```rust,no_run
/// use crypto::digests::sha2::{Sha512, Digest};
///
/// let mut hasher: Sha512 = Sha512::default();
///
/// hasher.input(b"foo");
/// hasher.input(b"bar");
/// ```
impl Input for Sha512 {
fn process(&mut self, msg: &[u8]) {
self.engine.add_bytes(&msg);
}
}
/// Retrieve the output hash from everything which has been fed into this
/// `Sha512` digest thus far.
///
//
// FIXME: Once const generics land in Rust, we should genericise calling
// crypto_digest_get_digest in external::crypto_digest.
impl FixedOutput for Sha512 {
type OutputSize = U64;
fn fixed_result(self) -> GenericArray<u8, Self::OutputSize> {
let buffer: [u8; DIGEST512_LEN] = get_512_bit_digest(self.engine);
GenericArray::clone_from_slice(&buffer)
}
}
#[cfg(test)]
mod test {
#[cfg(feature = "test-c-from-rust")]
use digest::Digest;
#[cfg(feature = "test-c-from-rust")]
use super::*;
#[cfg(feature = "test-c-from-rust")]
#[test]
fn sha256_default() {
let _: Sha256 = Sha256::default();
}
#[cfg(feature = "test-c-from-rust")]
#[test]
fn sha256_digest() {
let mut h: Sha256 = Sha256::new();
let mut result: [u8; DIGEST256_LEN] = [0u8; DIGEST256_LEN];
let expected = [
151, 223, 53, 136, 181, 163, 242, 75, 171, 195, 133, 27, 55, 47, 11, 167, 26, 157, 205,
222, 212, 59, 20, 185, 208, 105, 97, 191, 193, 112, 125, 157,
];
h.input(b"foo");
h.input(b"bar");
h.input(b"baz");
result.copy_from_slice(h.fixed_result().as_slice());
println!("{:?}", &result[..]);
assert_eq!(result, expected);
}
#[cfg(feature = "test-c-from-rust")]
#[test]
fn sha512_default() {
let _: Sha512 = Sha512::default();
}
#[cfg(feature = "test-c-from-rust")]
#[test]
fn sha512_digest() {
let mut h: Sha512 = Sha512::new();
let mut result: [u8; DIGEST512_LEN] = [0u8; DIGEST512_LEN];
let expected = [
203, 55, 124, 16, 176, 245, 166, 44, 128, 54, 37, 167, 153, 217, 233, 8, 190, 69, 231,
103, 245, 209, 71, 212, 116, 73, 7, 203, 5, 89, 122, 164, 237, 211, 41, 160, 175, 20,
122, 221, 12, 244, 24, 30, 211, 40, 250, 30, 121, 148, 38, 88, 38, 179, 237, 61, 126,
246, 240, 103, 202, 153, 24, 90,
];
h.input(b"foo");
h.input(b"bar");
h.input(b"baz");
result.copy_from_slice(h.fixed_result().as_slice());
println!("{:?}", &result[..]);
assert_eq!(&result[..], &expected[..]);
}
}

View File

@ -1,46 +0,0 @@
// Copyright (c) 2018-2019, The Tor Project, Inc.
// Copyright (c) 2018, isis agora lovecruft
// See LICENSE for licensing information
//! Common cryptographic functions and utilities.
//!
//! # Hash Digests and eXtendable Output Functions (XOFs)
//!
//! The `digests` module contains submodules for specific hash digests
//! and extendable output functions.
//!
//! ```rust,no_run
//! use crypto::digests::sha2::*;
//!
//! let mut hasher: Sha256 = Sha256::default();
//! let mut result: [u8; 32] = [0u8; 32];
//!
//! hasher.input(b"foo");
//! hasher.input(b"bar");
//! hasher.input(b"baz");
//!
//! result.copy_from_slice(hasher.result().as_slice());
//!
//! assert!(result == [b'X'; DIGEST256_LEN]);
//! ```
// XXX: add missing docs
//#![deny(missing_docs)]
// External crates from cargo or TOR_RUST_DEPENDENCIES.
extern crate digest;
extern crate libc;
extern crate rand_core;
// External dependencies for tests.
#[cfg(test)]
extern crate rand as rand_crate;
// Our local crates.
extern crate external;
#[cfg(not(test))]
#[macro_use]
extern crate tor_log;
pub mod digests; // Unfortunately named "digests" plural to avoid name conflict with the digest crate
pub mod rand;

View File

@ -1,6 +0,0 @@
// Copyright (c) 2018-2019, The Tor Project, Inc.
// Copyright (c) 2018, isis agora lovecruft
// See LICENSE for licensing information
// Internal dependencies
pub mod rng;

View File

@ -1,145 +0,0 @@
// Copyright (c) 2018-2019, The Tor Project, Inc.
// Copyright (c) 2018, isis agora lovecruft
// See LICENSE for licensing information
//! Wrappers for Tor's random number generators to provide implementations of
//! `rand_core` traits.
// This is the real implementation, in use in production, which calls into our C
// wrappers in /src/common/crypto_rand.c, which call into OpenSSL, system
// libraries, and make syscalls.
#[cfg(not(test))]
mod internal {
use std::u64;
use rand_core::impls::next_u32_via_fill;
use rand_core::impls::next_u64_via_fill;
use rand_core::CryptoRng;
use rand_core::Error;
use rand_core::RngCore;
use external::c_tor_crypto_rand;
use external::c_tor_crypto_seed_rng;
use external::c_tor_crypto_strongest_rand;
use tor_log::LogDomain;
use tor_log::LogSeverity;
/// Largest strong entropy request permitted.
//
// C_RUST_COUPLED: `MAX_STRONGEST_RAND_SIZE` /src/common/crypto_rand.c
const MAX_STRONGEST_RAND_SIZE: usize = 256;
/// A wrapper around OpenSSL's RNG.
pub struct TorRng {
// This private, zero-length field forces the struct to be treated the
// same as its opaque C counterpart.
_unused: [u8; 0],
}
/// Mark `TorRng` as being suitable for cryptographic purposes.
impl CryptoRng for TorRng {}
impl TorRng {
// C_RUST_COUPLED: `crypto_seed_rng()` /src/common/crypto_rand.c
#[allow(dead_code)]
pub fn new() -> Self {
if !c_tor_crypto_seed_rng() {
tor_log_msg!(
LogSeverity::Warn,
LogDomain::General,
"TorRng::from_seed()",
"The RNG could not be seeded!"
);
}
// XXX also log success at info level —isis
TorRng { _unused: [0u8; 0] }
}
}
impl RngCore for TorRng {
// C_RUST_COUPLED: `crypto_strongest_rand()` /src/common/crypto_rand.c
fn next_u32(&mut self) -> u32 {
next_u32_via_fill(self)
}
// C_RUST_COUPLED: `crypto_strongest_rand()` /src/common/crypto_rand.c
fn next_u64(&mut self) -> u64 {
next_u64_via_fill(self)
}
// C_RUST_COUPLED: `crypto_strongest_rand()` /src/common/crypto_rand.c
fn fill_bytes(&mut self, dest: &mut [u8]) {
c_tor_crypto_rand(dest);
}
// C_RUST_COUPLED: `crypto_strongest_rand()` /src/common/crypto_rand.c
fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> {
Ok(self.fill_bytes(dest))
}
}
/// A CSPRNG which hashes together randomness from OpenSSL's RNG and entropy
/// obtained from the operating system.
pub struct TorStrongestRng {
// This private, zero-length field forces the struct to be treated the
// same as its opaque C counterpart.
_unused: [u8; 0],
}
/// Mark `TorRng` as being suitable for cryptographic purposes.
impl CryptoRng for TorStrongestRng {}
impl TorStrongestRng {
// C_RUST_COUPLED: `crypto_seed_rng()` /src/common/crypto_rand.c
#[allow(dead_code)]
pub fn new() -> Self {
if !c_tor_crypto_seed_rng() {
tor_log_msg!(
LogSeverity::Warn,
LogDomain::General,
"TorStrongestRng::from_seed()",
"The RNG could not be seeded!"
);
}
// XXX also log success at info level —isis
TorStrongestRng { _unused: [0u8; 0] }
}
}
impl RngCore for TorStrongestRng {
// C_RUST_COUPLED: `crypto_strongest_rand()` /src/common/crypto_rand.c
fn next_u32(&mut self) -> u32 {
next_u32_via_fill(self)
}
// C_RUST_COUPLED: `crypto_strongest_rand()` /src/common/crypto_rand.c
fn next_u64(&mut self) -> u64 {
next_u64_via_fill(self)
}
// C_RUST_COUPLED: `crypto_strongest_rand()` /src/common/crypto_rand.c
fn fill_bytes(&mut self, dest: &mut [u8]) {
debug_assert!(dest.len() <= MAX_STRONGEST_RAND_SIZE);
c_tor_crypto_strongest_rand(dest);
}
// C_RUST_COUPLED: `crypto_strongest_rand()` /src/common/crypto_rand.c
fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> {
Ok(self.fill_bytes(dest))
}
}
}
// For testing, we expose a pure-Rust implementation.
#[cfg(test)]
mod internal {
// It doesn't matter if we pretend ChaCha is a CSPRNG in tests.
pub use rand_crate::ChaChaRng as TorRng;
pub use rand_crate::ChaChaRng as TorStrongestRng;
}
// Finally, expose the public functionality of whichever appropriate internal
// module.
pub use self::internal::*;

View File

@ -1,20 +0,0 @@
[package]
authors = ["The Tor Project"]
version = "0.0.1"
name = "external"
[dependencies]
libc = "=0.2.39"
smartlist = { path = "../smartlist" }
tor_allocate = { path = "../tor_allocate" }
[lib]
name = "external"
path = "lib.rs"
[features]
# We have to define a feature here because doctests don't get cfg(test),
# and we need to disable some C dependencies when running the doctests
# because of the various linker issues. See
# https://github.com/rust-lang/rust/issues/45599
test_linking_hack = []

View File

@ -1,454 +0,0 @@
// Copyright (c) 2018-2019, The Tor Project, Inc.
// Copyright (c) 2018, isis agora lovecruft
// See LICENSE for licensing information
//! Bindings to external digest and XOF functions which live within
//! src/common/crypto_digest.[ch].
//!
//! We wrap our C implementations in src/common/crypto_digest.[ch] with more
//! Rusty types and interfaces in src/rust/crypto/digest/.
use std::process::abort;
use libc::c_char;
use libc::c_int;
use libc::size_t;
use libc::uint8_t;
use smartlist::Stringlist;
/// Length of the output of our message digest.
pub const DIGEST_LEN: usize = 20;
/// Length of the output of our second (improved) message digests. (For now
/// this is just sha256, but it could be any other 256-bit digest.)
pub const DIGEST256_LEN: usize = 32;
/// Length of the output of our 64-bit optimized message digests (SHA512).
pub const DIGEST512_LEN: usize = 64;
/// Length of a sha1 message digest when encoded in base32 with trailing = signs
/// removed.
pub const BASE32_DIGEST_LEN: usize = 32;
/// Length of a sha1 message digest when encoded in base64 with trailing = signs
/// removed.
pub const BASE64_DIGEST_LEN: usize = 27;
/// Length of a sha256 message digest when encoded in base64 with trailing =
/// signs removed.
pub const BASE64_DIGEST256_LEN: usize = 43;
/// Length of a sha512 message digest when encoded in base64 with trailing =
/// signs removed.
pub const BASE64_DIGEST512_LEN: usize = 86;
/// Length of hex encoding of SHA1 digest, not including final NUL.
pub const HEX_DIGEST_LEN: usize = 40;
/// Length of hex encoding of SHA256 digest, not including final NUL.
pub const HEX_DIGEST256_LEN: usize = 64;
/// Length of hex encoding of SHA512 digest, not including final NUL.
pub const HEX_DIGEST512_LEN: usize = 128;
/// Our C code uses an enum to declare the digest algorithm types which we know
/// about. However, because enums are implementation-defined in C, we can
/// neither work with them directly nor translate them into Rust enums.
/// Instead, we represent them as a u8 (under the assumption that we'll never
/// support more than 256 hash functions).
#[allow(non_camel_case_types)]
type digest_algorithm_t = u8;
const DIGEST_SHA1: digest_algorithm_t = 0;
const DIGEST_SHA256: digest_algorithm_t = 1;
const DIGEST_SHA512: digest_algorithm_t = 2;
const DIGEST_SHA3_256: digest_algorithm_t = 3;
const DIGEST_SHA3_512: digest_algorithm_t = 4;
/// The number of hash digests we produce for a `common_digests_t`.
///
/// We can't access these from Rust, because their definitions in C require
/// introspecting the `digest_algorithm_t` typedef, which is an enum, so we have
/// to redefine them here.
const N_COMMON_DIGEST_ALGORITHMS: usize = DIGEST_SHA256 as usize + 1;
/// A digest function.
#[repr(C)]
#[derive(Debug, Copy, Clone)]
#[allow(non_camel_case_types)]
struct crypto_digest_t {
// This private, zero-length field forces the struct to be treated the same
// as its opaque C counterpart.
_unused: [u8; 0],
}
/// An eXtendible Output Function (XOF).
#[repr(C)]
#[derive(Debug, Copy, Clone)]
#[allow(non_camel_case_types)]
struct crypto_xof_t {
// This private, zero-length field forces the struct to be treated the same
// as its opaque C counterpart.
_unused: [u8; 0],
}
/// A set of all the digests we commonly compute, taken on a single
/// string. Any digests that are shorter than 512 bits are right-padded
/// with 0 bits.
///
/// Note that this representation wastes 44 bytes for the SHA1 case, so
/// don't use it for anything where we need to allocate a whole bunch at
/// once.
#[repr(C)]
#[derive(Debug, Copy, Clone)]
#[allow(non_camel_case_types)]
struct common_digests_t {
pub d: [[c_char; N_COMMON_DIGEST_ALGORITHMS]; DIGEST256_LEN],
}
/// A `smartlist_t` is just an alias for the `#[repr(C)]` type `Stringlist`, to
/// make it more clear that we're working with a smartlist which is owned by C.
#[allow(non_camel_case_types)]
// BINDGEN_GENERATED: This type isn't actually bindgen generated, but the code
// below it which uses it is. As such, this comes up as "dead code" as well.
#[allow(dead_code)]
type smartlist_t = Stringlist;
/// All of the external functions from `src/common/crypto_digest.h`.
///
/// These are kept private because they should be wrapped with Rust to make their usage safer.
//
// BINDGEN_GENERATED: These definitions were generated with bindgen and cleaned
// up manually. As such, there are more bindings than are likely necessary or
// which are in use.
#[allow(dead_code)]
extern "C" {
fn crypto_digest(digest: *mut c_char, m: *const c_char, len: size_t) -> c_int;
fn crypto_digest256(
digest: *mut c_char,
m: *const c_char,
len: size_t,
algorithm: digest_algorithm_t,
) -> c_int;
fn crypto_digest512(
digest: *mut c_char,
m: *const c_char,
len: size_t,
algorithm: digest_algorithm_t,
) -> c_int;
fn crypto_common_digests(ds_out: *mut common_digests_t, m: *const c_char, len: size_t)
-> c_int;
fn crypto_digest_smartlist_prefix(
digest_out: *mut c_char,
len_out: size_t,
prepend: *const c_char,
lst: *const smartlist_t,
append: *const c_char,
alg: digest_algorithm_t,
);
fn crypto_digest_smartlist(
digest_out: *mut c_char,
len_out: size_t,
lst: *const smartlist_t,
append: *const c_char,
alg: digest_algorithm_t,
);
fn crypto_digest_algorithm_get_name(alg: digest_algorithm_t) -> *const c_char;
fn crypto_digest_algorithm_get_length(alg: digest_algorithm_t) -> size_t;
fn crypto_digest_algorithm_parse_name(name: *const c_char) -> c_int;
fn crypto_digest_new() -> *mut crypto_digest_t;
fn crypto_digest256_new(algorithm: digest_algorithm_t) -> *mut crypto_digest_t;
fn crypto_digest512_new(algorithm: digest_algorithm_t) -> *mut crypto_digest_t;
fn crypto_digest_free_(digest: *mut crypto_digest_t);
fn crypto_digest_add_bytes(digest: *mut crypto_digest_t, data: *const c_char, len: size_t);
fn crypto_digest_get_digest(digest: *mut crypto_digest_t, out: *mut c_char, out_len: size_t);
fn crypto_digest_dup(digest: *const crypto_digest_t) -> *mut crypto_digest_t;
fn crypto_digest_assign(into: *mut crypto_digest_t, from: *const crypto_digest_t);
fn crypto_hmac_sha256(
hmac_out: *mut c_char,
key: *const c_char,
key_len: size_t,
msg: *const c_char,
msg_len: size_t,
);
fn crypto_mac_sha3_256(
mac_out: *mut uint8_t,
len_out: size_t,
key: *const uint8_t,
key_len: size_t,
msg: *const uint8_t,
msg_len: size_t,
);
fn crypto_xof_new() -> *mut crypto_xof_t;
fn crypto_xof_add_bytes(xof: *mut crypto_xof_t, data: *const uint8_t, len: size_t);
fn crypto_xof_squeeze_bytes(xof: *mut crypto_xof_t, out: *mut uint8_t, len: size_t);
fn crypto_xof_free(xof: *mut crypto_xof_t);
}
/// A wrapper around a `digest_algorithm_t`.
pub enum DigestAlgorithm {
SHA2_256,
SHA2_512,
SHA3_256,
SHA3_512,
}
impl From<DigestAlgorithm> for digest_algorithm_t {
fn from(digest: DigestAlgorithm) -> digest_algorithm_t {
match digest {
DigestAlgorithm::SHA2_256 => DIGEST_SHA256,
DigestAlgorithm::SHA2_512 => DIGEST_SHA512,
DigestAlgorithm::SHA3_256 => DIGEST_SHA3_256,
DigestAlgorithm::SHA3_512 => DIGEST_SHA3_512,
}
}
}
/// A wrapper around a mutable pointer to a `crypto_digest_t`.
pub struct CryptoDigest(*mut crypto_digest_t);
/// Explicitly copy the state of a `CryptoDigest` hash digest context.
///
/// # C_RUST_COUPLED
///
/// * `crypto_digest_dup`
impl Clone for CryptoDigest {
fn clone(&self) -> CryptoDigest {
let digest: *mut crypto_digest_t;
unsafe {
digest = crypto_digest_dup(self.0 as *const crypto_digest_t);
}
// See the note in the implementation of CryptoDigest for the
// reasoning for `abort()` here.
if digest.is_null() {
abort();
}
CryptoDigest(digest)
}
}
impl CryptoDigest {
/// A wrapper to call one of the C functions `crypto_digest_new`,
/// `crypto_digest256_new`, or `crypto_digest512_new`.
///
/// # Warnings
///
/// This function will `abort()` the entire process in an "abnormal" fashion,
/// i.e. not unwinding this or any other thread's stack, running any
/// destructors, or calling any panic/exit hooks) if `tor_malloc()` (called in
/// `crypto_digest256_new()`) is unable to allocate memory.
///
/// # Returns
///
/// A new `CryptoDigest`, which is a wrapper around a opaque representation
/// of a `crypto_digest_t`. The underlying `crypto_digest_t` _MUST_ only
/// ever be handled via a raw pointer, and never introspected.
///
/// # C_RUST_COUPLED
///
/// * `crypto_digest_new`
/// * `crypto_digest256_new`
/// * `crypto_digest512_new`
/// * `tor_malloc` (called by `crypto_digest256_new`, but we make
/// assumptions about its behaviour and return values here)
pub fn new(algorithm: Option<DigestAlgorithm>) -> CryptoDigest {
let digest: *mut crypto_digest_t;
if algorithm.is_none() {
unsafe {
digest = crypto_digest_new();
}
} else {
let algo: digest_algorithm_t = algorithm.unwrap().into(); // can't fail because it's Some
unsafe {
// XXX This is a pretty awkward API to use from Rust...
digest = match algo {
DIGEST_SHA1 => crypto_digest_new(),
DIGEST_SHA256 => crypto_digest256_new(DIGEST_SHA256),
DIGEST_SHA3_256 => crypto_digest256_new(DIGEST_SHA3_256),
DIGEST_SHA512 => crypto_digest512_new(DIGEST_SHA512),
DIGEST_SHA3_512 => crypto_digest512_new(DIGEST_SHA3_512),
_ => abort(),
}
}
}
// In our C code, `crypto_digest*_new()` allocates memory with
// `tor_malloc()`. In `tor_malloc()`, if the underlying malloc
// implementation fails to allocate the requested memory and returns a
// NULL pointer, we call `exit(1)`. In the case that this `exit(1)` is
// called within a worker, be that a process or a thread, the inline
// comments within `tor_malloc()` mention "that's ok, since the parent
// will run out of memory soon anyway". However, if it takes long
// enough for the worker to die, and it manages to return a NULL pointer
// to our Rust code, our Rust is now in an irreparably broken state and
// may exhibit undefined behaviour. An even worse scenario, if/when we
// have parent/child processes/threads controlled by Rust, would be that
// the UB contagion in Rust manages to spread to other children before
// the entire process (hopefully terminates).
//
// However, following the assumptions made in `tor_malloc()` that
// calling `exit(1)` in a child is okay because the parent will
// eventually run into the same errors, and also to stymie any UB
// contagion in the meantime, we call abort!() here to terminate the
// entire program immediately.
if digest.is_null() {
abort();
}
CryptoDigest(digest)
}
/// A wrapper to call the C function `crypto_digest_add_bytes`.
///
/// # Inputs
///
/// * `bytes`: a byte slice of bytes to be added into this digest.
///
/// # C_RUST_COUPLED
///
/// * `crypto_digest_add_bytes`
pub fn add_bytes(&self, bytes: &[u8]) {
unsafe {
crypto_digest_add_bytes(
self.0 as *mut crypto_digest_t,
bytes.as_ptr() as *const c_char,
bytes.len() as size_t,
)
}
}
}
impl Drop for CryptoDigest {
fn drop(&mut self) {
unsafe {
crypto_digest_free_(self.0 as *mut crypto_digest_t);
}
}
}
/// Get the 256-bit digest output of a `crypto_digest_t`.
///
/// # Inputs
///
/// * `digest`: A `CryptoDigest` which wraps either a `DIGEST_SHA256` or a
/// `DIGEST_SHA3_256`.
///
/// # Warning
///
/// Calling this function with a `CryptoDigest` which is neither SHA2-256 or
/// SHA3-256 is a programming error. Since we cannot introspect the opaque
/// struct from Rust, however, there is no way for us to check that the correct
/// one is being passed in. That is up to you, dear programmer. If you mess
/// up, you will get a incorrectly-sized hash digest in return, and it will be
/// your fault. Don't do that.
///
/// # Returns
///
/// A 256-bit hash digest, as a `[u8; 32]`.
///
/// # C_RUST_COUPLED
///
/// * `crypto_digest_get_digest`
/// * `DIGEST256_LEN`
//
// FIXME: Once const generics land in Rust, we should genericise calling
// crypto_digest_get_digest w.r.t. output array size.
pub fn get_256_bit_digest(digest: CryptoDigest) -> [u8; DIGEST256_LEN] {
let mut buffer: [u8; DIGEST256_LEN] = [0u8; DIGEST256_LEN];
unsafe {
crypto_digest_get_digest(
digest.0,
buffer.as_mut_ptr() as *mut c_char,
DIGEST256_LEN as size_t,
);
if buffer.as_ptr().is_null() {
abort();
}
}
buffer
}
/// Get the 512-bit digest output of a `crypto_digest_t`.
///
/// # Inputs
///
/// * `digest`: A `CryptoDigest` which wraps either a `DIGEST_SHA512` or a
/// `DIGEST_SHA3_512`.
///
/// # Warning
///
/// Calling this function with a `CryptoDigest` which is neither SHA2-512 or
/// SHA3-512 is a programming error. Since we cannot introspect the opaque
/// struct from Rust, however, there is no way for us to check that the correct
/// one is being passed in. That is up to you, dear programmer. If you mess
/// up, you will get a incorrectly-sized hash digest in return, and it will be
/// your fault. Don't do that.
///
/// # Returns
///
/// A 512-bit hash digest, as a `[u8; 64]`.
///
/// # C_RUST_COUPLED
///
/// * `crypto_digest_get_digest`
/// * `DIGEST512_LEN`
//
// FIXME: Once const generics land in Rust, we should genericise calling
// crypto_digest_get_digest w.r.t. output array size.
pub fn get_512_bit_digest(digest: CryptoDigest) -> [u8; DIGEST512_LEN] {
let mut buffer: [u8; DIGEST512_LEN] = [0u8; DIGEST512_LEN];
unsafe {
crypto_digest_get_digest(
digest.0,
buffer.as_mut_ptr() as *mut c_char,
DIGEST512_LEN as size_t,
);
if buffer.as_ptr().is_null() {
abort();
}
}
buffer
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_layout_common_digests_t() {
assert_eq!(
::std::mem::size_of::<common_digests_t>(),
64usize,
concat!("Size of: ", stringify!(common_digests_t))
);
assert_eq!(
::std::mem::align_of::<common_digests_t>(),
1usize,
concat!("Alignment of ", stringify!(common_digests_t))
);
}
#[test]
fn test_layout_crypto_digest_t() {
assert_eq!(
::std::mem::size_of::<crypto_digest_t>(),
0usize,
concat!("Size of: ", stringify!(crypto_digest_t))
);
assert_eq!(
::std::mem::align_of::<crypto_digest_t>(),
1usize,
concat!("Alignment of ", stringify!(crypto_digest_t))
);
}
}

View File

@ -1,84 +0,0 @@
// Copyright (c) 2018-2019, The Tor Project, Inc.
// Copyright (c) 2018, isis agora lovecruft
// See LICENSE for licensing information
//! Bindings to external (P)RNG interfaces and utilities in
//! src/common/crypto_rand.[ch].
//!
//! We wrap our C implementations in src/common/crypto_rand.[ch] here in order
//! to provide wrappers with native Rust types, and then provide more Rusty
//! types and and trait implementations in src/rust/crypto/rand/.
use std::time::Duration;
use libc::c_double;
use libc::c_int;
use libc::size_t;
use libc::time_t;
use libc::uint8_t;
extern "C" {
fn crypto_seed_rng() -> c_int;
fn crypto_rand(out: *mut uint8_t, out_len: size_t);
fn crypto_strongest_rand(out: *mut uint8_t, out_len: size_t);
fn crypto_rand_time_range(min: time_t, max: time_t) -> time_t;
fn crypto_rand_double() -> c_double;
}
/// Seed OpenSSL's random number generator with bytes from the operating
/// system.
///
/// # Returns
///
/// `true` on success; `false` on failure.
pub fn c_tor_crypto_seed_rng() -> bool {
let ret: c_int;
unsafe {
ret = crypto_seed_rng();
}
match ret {
0 => return true,
_ => return false,
}
}
/// Fill the bytes of `dest` with random data.
pub fn c_tor_crypto_rand(dest: &mut [u8]) {
unsafe {
crypto_rand(dest.as_mut_ptr(), dest.len() as size_t);
}
}
/// Fill the bytes of `dest` with "strong" random data by hashing
/// together randomness obtained from OpenSSL's RNG and the operating
/// system.
pub fn c_tor_crypto_strongest_rand(dest: &mut [u8]) {
// We'll let the C side panic if the len is larger than
// MAX_STRONGEST_RAND_SIZE, rather than potentially panicking here. A
// paranoid caller should assert on the length of dest *before* calling this
// function.
unsafe {
crypto_strongest_rand(dest.as_mut_ptr(), dest.len() as size_t);
}
}
/// Get a random time, in seconds since the Unix Epoch.
///
/// # Returns
///
/// A `std::time::Duration` of seconds since the Unix Epoch.
pub fn c_tor_crypto_rand_time_range(min: &Duration, max: &Duration) -> Duration {
let ret: time_t;
unsafe {
ret = crypto_rand_time_range(min.as_secs() as time_t, max.as_secs() as time_t);
}
Duration::from_secs(ret as u64)
}
/// Return a pseudorandom 64-bit float, chosen uniformly from the range [0.0, 1.0).
pub fn c_tor_crypto_rand_double() -> f64 {
unsafe { crypto_rand_double() }
}

View File

@ -1,37 +0,0 @@
// Copyright (c) 2016-2019, The Tor Project, Inc. */
// See LICENSE for licensing information */
use libc::{c_char, c_int};
use std::ffi::CString;
extern "C" {
fn tor_version_as_new_as(platform: *const c_char, cutoff: *const c_char) -> c_int;
}
/// Wrap calls to tor_version_as_new_as, defined in routerparse.c
pub fn c_tor_version_as_new_as(platform: &str, cutoff: &str) -> bool {
// CHK: These functions should log a warning if an error occurs. This
// can be added when integration with tor's logger is added to rust
let c_platform = match CString::new(platform) {
Ok(n) => n,
Err(_) => return false,
};
let c_cutoff = match CString::new(cutoff) {
Ok(n) => n,
Err(_) => return false,
};
let result: c_int = unsafe { tor_version_as_new_as(c_platform.as_ptr(), c_cutoff.as_ptr()) };
result == 1
}
extern "C" {
fn tor_is_using_nss() -> c_int;
}
/// Return true if Tor was built to use NSS.
pub fn c_tor_is_using_nss() -> bool {
0 != unsafe { tor_is_using_nss() }
}

View File

@ -1,19 +0,0 @@
//! Copyright (c) 2016-2019, The Tor Project, Inc. */
//! See LICENSE for licensing information */
//! Interface for external calls to tor C ABI
//!
//! The purpose of this module is to provide a clean interface for when Rust
//! modules need to interact with functionality in tor C code rather than each
//! module implementing this functionality repeatedly.
extern crate libc;
extern crate tor_allocate;
extern crate smartlist;
pub mod crypto_digest;
mod crypto_rand;
mod external;
pub use crypto_rand::*;
pub use external::*;

View File

@ -1,41 +0,0 @@
include src/rust/tor_rust/include.am
EXTRA_DIST +=\
src/rust/build.rs \
src/rust/Cargo.toml \
src/rust/Cargo.lock \
src/rust/.cargo/config.in \
src/rust/crypto/Cargo.toml \
src/rust/crypto/lib.rs \
src/rust/crypto/digests/mod.rs \
src/rust/crypto/digests/sha2.rs \
src/rust/crypto/rand/mod.rs \
src/rust/crypto/rand/rng.rs \
src/rust/external/Cargo.toml \
src/rust/external/crypto_digest.rs \
src/rust/external/crypto_rand.rs \
src/rust/external/external.rs \
src/rust/external/lib.rs \
src/rust/protover/Cargo.toml \
src/rust/protover/errors.rs \
src/rust/protover/protoset.rs \
src/rust/protover/ffi.rs \
src/rust/protover/lib.rs \
src/rust/protover/protover.rs \
src/rust/protover/tests/protover.rs \
src/rust/smartlist/Cargo.toml \
src/rust/smartlist/lib.rs \
src/rust/smartlist/smartlist.rs \
src/rust/tor_allocate/Cargo.toml \
src/rust/tor_allocate/lib.rs \
src/rust/tor_allocate/tor_allocate.rs \
src/rust/tor_log/Cargo.toml \
src/rust/tor_log/lib.rs \
src/rust/tor_log/tor_log.rs \
src/rust/tor_rust/Cargo.toml \
src/rust/tor_rust/include.am \
src/rust/tor_rust/lib.rs \
src/rust/tor_util/Cargo.toml \
src/rust/tor_util/ffi.rs \
src/rust/tor_util/lib.rs \
src/rust/tor_util/strings.rs

View File

@ -1,33 +0,0 @@
[package]
authors = ["The Tor Project"]
version = "0.0.1"
name = "protover"
[features]
# We have to define a feature here because doctests don't get cfg(test),
# and we need to disable some C dependencies when running the doctests
# because of the various linker issues. See
# https://github.com/rust-lang/rust/issues/45599
test_linking_hack = []
[dependencies]
libc = "=0.2.39"
[dependencies.smartlist]
path = "../smartlist"
[dependencies.external]
path = "../external"
[dependencies.tor_util]
path = "../tor_util"
[dependencies.tor_allocate]
path = "../tor_allocate"
[dependencies.tor_log]
path = "../tor_log"
[lib]
name = "protover"
path = "lib.rs"

View File

@ -1,57 +0,0 @@
// Copyright (c) 2018-2019, The Tor Project, Inc.
// Copyright (c) 2018, isis agora lovecruft
// See LICENSE for licensing information
//! Various errors which may occur during protocol version parsing.
use std::fmt;
use std::fmt::Display;
/// All errors which may occur during protover parsing routines.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
#[allow(missing_docs)] // See Display impl for error descriptions
pub enum ProtoverError {
Overlap,
LowGreaterThanHigh,
Unparseable,
ExceedsMax,
ExceedsExpansionLimit,
UnknownProtocol,
ExceedsNameLimit,
InvalidProtocol,
}
/// Descriptive error messages for `ProtoverError` variants.
impl Display for ProtoverError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ProtoverError::Overlap => write!(
f,
"Two or more (low, high) protover ranges would overlap once expanded."
),
ProtoverError::LowGreaterThanHigh => write!(
f,
"The low in a (low, high) protover range was greater than high."
),
ProtoverError::Unparseable => write!(f, "The protover string was unparseable."),
ProtoverError::ExceedsMax => write!(
f,
"The high in a (low, high) protover range exceeds 63."
),
ProtoverError::ExceedsExpansionLimit => write!(
f,
"The protover string would exceed the maximum expansion limit."
),
ProtoverError::UnknownProtocol => write!(
f,
"A protocol in the protover string we attempted to parse is unknown."
),
ProtoverError::ExceedsNameLimit => {
write!(f, "An unrecognised protocol name was too long.")
}
ProtoverError::InvalidProtocol => {
write!(f, "A protocol name includes invalid characters.")
}
}
}
}

View File

@ -1,247 +0,0 @@
// Copyright (c) 2016-2019, The Tor Project, Inc. */
// See LICENSE for licensing information */
//! FFI functions, only to be called from C.
//!
//! Equivalent C versions of this api are in `protover.c`
use libc::{c_char, c_int, uint32_t};
use std::ffi::CStr;
use smartlist::*;
use tor_allocate::allocate_and_copy_string;
use errors::ProtoverError;
use protover::*;
/// Translate C enums to Rust Proto enums, using the integer value of the C
/// enum to map to its associated Rust enum.
///
/// C_RUST_COUPLED: protover.h `protocol_type_t`
fn translate_to_rust(c_proto: uint32_t) -> Result<Protocol, ProtoverError> {
match c_proto {
0 => Ok(Protocol::Link),
1 => Ok(Protocol::LinkAuth),
2 => Ok(Protocol::Relay),
3 => Ok(Protocol::DirCache),
4 => Ok(Protocol::HSDir),
5 => Ok(Protocol::HSIntro),
6 => Ok(Protocol::HSRend),
7 => Ok(Protocol::Desc),
8 => Ok(Protocol::Microdesc),
9 => Ok(Protocol::Cons),
10 => Ok(Protocol::Padding),
11 => Ok(Protocol::FlowCtrl),
_ => Err(ProtoverError::UnknownProtocol),
}
}
/// Provide an interface for C to translate arguments and return types for
/// protover::all_supported
#[no_mangle]
pub extern "C" fn protover_all_supported(
c_relay_version: *const c_char,
missing_out: *mut *mut c_char,
) -> c_int {
if c_relay_version.is_null() {
return 1;
}
// Require an unsafe block to read the version from a C string. The pointer
// is checked above to ensure it is not null.
let c_str: &CStr = unsafe { CStr::from_ptr(c_relay_version) };
let relay_version = match c_str.to_str() {
Ok(n) => n,
Err(_) => return 1,
};
let relay_proto_entry: UnvalidatedProtoEntry =
match UnvalidatedProtoEntry::from_str_any_len(relay_version) {
Ok(n) => n,
Err(_) => return 1,
};
if let Some(unsupported) = relay_proto_entry.all_supported() {
if missing_out.is_null() {
return 0;
}
let ptr = allocate_and_copy_string(&unsupported.to_string());
unsafe { *missing_out = ptr };
return 0;
}
1
}
/// Provide an interface for C to translate arguments and return types for
/// protover::list_supports_protocol
#[no_mangle]
pub extern "C" fn protocol_list_supports_protocol(
c_protocol_list: *const c_char,
c_protocol: uint32_t,
version: uint32_t,
) -> c_int {
if c_protocol_list.is_null() {
return 0;
}
// Require an unsafe block to read the version from a C string. The pointer
// is checked above to ensure it is not null.
let c_str: &CStr = unsafe { CStr::from_ptr(c_protocol_list) };
let protocol_list = match c_str.to_str() {
Ok(n) => n,
Err(_) => return 0,
};
let proto_entry: UnvalidatedProtoEntry = match protocol_list.parse() {
Ok(n) => n,
Err(_) => return 0,
};
let protocol: UnknownProtocol = match translate_to_rust(c_protocol) {
Ok(n) => n.into(),
Err(_) => return 0,
};
if proto_entry.supports_protocol(&protocol, &version) {
1
} else {
0
}
}
#[no_mangle]
pub extern "C" fn protover_contains_long_protocol_names_(c_protocol_list: *const c_char) -> c_int {
if c_protocol_list.is_null() {
return 1;
}
// Require an unsafe block to read the version from a C string. The pointer
// is checked above to ensure it is not null.
let c_str: &CStr = unsafe { CStr::from_ptr(c_protocol_list) };
let protocol_list = match c_str.to_str() {
Ok(n) => n,
Err(_) => return 1,
};
match protocol_list.parse::<UnvalidatedProtoEntry>() {
Ok(_) => 0,
Err(_) => 1,
}
}
/// Provide an interface for C to translate arguments and return types for
/// protover::list_supports_protocol_or_later
#[no_mangle]
pub extern "C" fn protocol_list_supports_protocol_or_later(
c_protocol_list: *const c_char,
c_protocol: uint32_t,
version: uint32_t,
) -> c_int {
if c_protocol_list.is_null() {
return 0;
}
// Require an unsafe block to read the version from a C string. The pointer
// is checked above to ensure it is not null.
let c_str: &CStr = unsafe { CStr::from_ptr(c_protocol_list) };
let protocol_list = match c_str.to_str() {
Ok(n) => n,
Err(_) => return 0,
};
let protocol = match translate_to_rust(c_protocol) {
Ok(n) => n,
Err(_) => return 0,
};
let proto_entry: UnvalidatedProtoEntry = match protocol_list.parse() {
Ok(n) => n,
Err(_) => return 0,
};
if proto_entry.supports_protocol_or_later(&protocol.into(), &version) {
return 1;
}
0
}
/// Provide an interface for C to translate arguments and return types for
/// protover::get_supported_protocols
#[no_mangle]
pub extern "C" fn protover_get_supported_protocols() -> *const c_char {
let supported: &'static CStr;
supported = get_supported_protocols_cstr();
supported.as_ptr()
}
/// Provide an interface for C to translate arguments and return types for
/// protover::compute_vote
//
// Why is the threshold a signed integer? —isis
#[no_mangle]
pub extern "C" fn protover_compute_vote(list: *const Stringlist, threshold: c_int) -> *mut c_char {
if list.is_null() {
return allocate_and_copy_string("");
}
// Dereference of raw pointer requires an unsafe block. The pointer is
// checked above to ensure it is not null.
let data: Vec<String> = unsafe { (*list).get_list() };
let hold: usize = threshold as usize;
let mut proto_entries: Vec<UnvalidatedProtoEntry> = Vec::new();
for datum in data {
let entry: UnvalidatedProtoEntry = match datum.parse() {
Ok(n) => n,
Err(_) => continue,
};
proto_entries.push(entry);
}
let vote: UnvalidatedProtoEntry = ProtoverVote::compute(&proto_entries, &hold);
allocate_and_copy_string(&vote.to_string())
}
/// Provide an interface for C to translate arguments and return types for
/// protover::is_supported_here
#[no_mangle]
pub extern "C" fn protover_is_supported_here(c_protocol: uint32_t, version: uint32_t) -> c_int {
let protocol = match translate_to_rust(c_protocol) {
Ok(n) => n,
Err(_) => return 0,
};
let is_supported = is_supported_here(&protocol, &version);
return if is_supported { 1 } else { 0 };
}
/// Provide an interface for C to translate arguments and return types for
/// protover::compute_for_old_tor
#[no_mangle]
pub extern "C" fn protover_compute_for_old_tor(version: *const c_char) -> *const c_char {
let supported: &'static CStr;
let empty: &'static CStr;
empty = cstr!("");
if version.is_null() {
return empty.as_ptr();
}
// Require an unsafe block to read the version from a C string. The pointer
// is checked above to ensure it is not null.
let c_str: &CStr = unsafe { CStr::from_ptr(version) };
let version = match c_str.to_str() {
Ok(n) => n,
Err(_) => return empty.as_ptr(),
};
supported = compute_for_old_tor_cstr(&version);
supported.as_ptr()
}

View File

@ -1,40 +0,0 @@
//! Copyright (c) 2016-2019, The Tor Project, Inc. */
//! See LICENSE for licensing information */
//! Versioning information for different pieces of the Tor protocol.
//!
//! The below description is taken from src/rust/protover.c, which is currently
//! enabled by default. We are in the process of experimenting with Rust in
//! tor, and this protover module is implemented to help achieve this goal.
//!
//! Starting in version 0.2.9.3-alpha, Tor places separate version numbers on
//! each of the different components of its protocol. Relays use these numbers
//! to advertise what versions of the protocols they can support, and clients
//! use them to find what they can ask a given relay to do. Authorities vote
//! on the supported protocol versions for each relay, and also vote on the
//! which protocols you should have to support in order to be on the Tor
//! network. All Tor instances use these required/recommended protocol versions
//! to tell what level of support for recent protocols each relay has, and
//! to decide whether they should be running given their current protocols.
//!
//! The main advantage of these protocol versions numbers over using Tor
//! version numbers is that they allow different implementations of the Tor
//! protocols to develop independently, without having to claim compatibility
//! with specific versions of Tor.
// XXX: add missing docs
//#![deny(missing_docs)]
extern crate external;
extern crate libc;
extern crate smartlist;
extern crate tor_allocate;
#[macro_use]
extern crate tor_util;
pub mod errors;
pub mod ffi;
pub mod protoset;
mod protover;
pub use protover::*;

View File

@ -1,697 +0,0 @@
// Copyright (c) 2018-2019, The Tor Project, Inc.
// Copyright (c) 2018, isis agora lovecruft
// See LICENSE for licensing information
//! Sets for lazily storing ordered, non-overlapping ranges of integers.
use std::cmp;
use std::iter;
use std::slice;
use std::str::FromStr;
use std::u32;
use errors::ProtoverError;
/// A single version number.
pub type Version = u32;
/// A `ProtoSet` stores an ordered `Vec<T>` of `(low, high)` pairs of ranges of
/// non-overlapping protocol versions.
///
/// # Examples
///
/// ```
/// use std::str::FromStr;
///
/// use protover::errors::ProtoverError;
/// use protover::protoset::ProtoSet;
/// use protover::protoset::Version;
///
/// # fn do_test() -> Result<ProtoSet, ProtoverError> {
/// let protoset: ProtoSet = ProtoSet::from_str("3-5,8")?;
///
/// // We could also equivalently call:
/// let protoset: ProtoSet = "3-5,8".parse()?;
///
/// assert!(protoset.contains(&4));
/// assert!(!protoset.contains(&7));
///
/// let expanded: Vec<Version> = protoset.clone().into();
///
/// assert_eq!(&expanded[..], &[3, 4, 5, 8]);
///
/// let contracted: String = protoset.clone().to_string();
///
/// assert_eq!(contracted, "3-5,8".to_string());
/// # Ok(protoset)
/// # }
/// # fn main() { do_test(); } // wrap the test so we can use the ? operator
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub struct ProtoSet {
pub(crate) pairs: Vec<(Version, Version)>,
}
impl Default for ProtoSet {
fn default() -> Self {
let pairs: Vec<(Version, Version)> = Vec::new();
ProtoSet { pairs }
}
}
impl<'a> ProtoSet {
/// Create a new `ProtoSet` from a slice of `(low, high)` pairs.
///
/// # Inputs
///
/// We do not assume the input pairs are deduplicated or ordered.
pub fn from_slice(low_high_pairs: &'a [(Version, Version)]) -> Result<Self, ProtoverError> {
let mut pairs: Vec<(Version, Version)> = Vec::with_capacity(low_high_pairs.len());
for &(low, high) in low_high_pairs {
pairs.push((low, high));
}
// Sort the pairs without reallocation and remove all duplicate pairs.
pairs.sort_unstable();
pairs.dedup();
ProtoSet { pairs }.is_ok()
}
}
/// Expand this `ProtoSet` to a `Vec` of all its `Version`s.
///
/// # Examples
///
/// ```
/// use std::str::FromStr;
/// use protover::protoset::ProtoSet;
/// use protover::protoset::Version;
/// # use protover::errors::ProtoverError;
///
/// # fn do_test() -> Result<Vec<Version>, ProtoverError> {
/// let protoset: ProtoSet = ProtoSet::from_str("3-5,21")?;
/// let versions: Vec<Version> = protoset.into();
///
/// assert_eq!(&versions[..], &[3, 4, 5, 21]);
/// #
/// # Ok(versions)
/// # }
/// # fn main() { do_test(); } // wrap the test so we can use the ? operator
/// ```
impl Into<Vec<Version>> for ProtoSet {
fn into(self) -> Vec<Version> {
let mut versions: Vec<Version> = Vec::new();
for &(low, high) in self.iter() {
versions.extend(low..high + 1);
}
versions
}
}
impl ProtoSet {
/// Get an iterator over the `(low, high)` `pairs` in this `ProtoSet`.
pub fn iter(&self) -> slice::Iter<(Version, Version)> {
self.pairs.iter()
}
/// Expand this `ProtoSet` into a `Vec` of all its `Version`s.
///
/// # Examples
///
/// ```
/// # use protover::errors::ProtoverError;
/// use protover::protoset::ProtoSet;
///
/// # fn do_test() -> Result<bool, ProtoverError> {
/// let protoset: ProtoSet = "3-5,9".parse()?;
///
/// assert_eq!(protoset.expand(), vec![3, 4, 5, 9]);
///
/// let protoset: ProtoSet = "1,3,5-7".parse()?;
///
/// assert_eq!(protoset.expand(), vec![1, 3, 5, 6, 7]);
/// #
/// # Ok(true)
/// # }
/// # fn main() { do_test(); } // wrap the test so we can use the ? operator
/// ```
pub fn expand(self) -> Vec<Version> {
self.into()
}
pub fn len(&self) -> usize {
let mut length: usize = 0;
for &(low, high) in self.iter() {
length += (high as usize - low as usize) + 1;
}
length
}
/// Check that this `ProtoSet` is well-formed.
///
/// This is automatically called in `ProtoSet::from_str()`.
///
/// # Errors
///
/// * `ProtoverError::LowGreaterThanHigh`: if its `pairs` were not
/// well-formed, i.e. a `low` in a `(low, high)` was higher than the
/// previous `high`,
/// * `ProtoverError::Overlap`: if one or more of the `pairs` are
/// overlapping,
/// * `ProtoverError::ExceedsMax`: if the number of versions when expanded
/// would exceed `MAX_PROTOCOLS_TO_EXPAND`, and
///
/// # Returns
///
/// A `Result` whose `Ok` is this `Protoset`, and whose `Err` is one of the
/// errors enumerated in the Errors section above.
fn is_ok(self) -> Result<ProtoSet, ProtoverError> {
let mut last_high: Version = 0;
for &(low, high) in self.iter() {
if low == u32::MAX || high == u32::MAX {
return Err(ProtoverError::ExceedsMax);
}
if low <= last_high {
return Err(ProtoverError::Overlap);
} else if low > high {
return Err(ProtoverError::LowGreaterThanHigh);
}
last_high = high;
}
Ok(self)
}
/// Determine if this `ProtoSet` contains no `Version`s.
///
/// # Returns
///
/// * `true` if this `ProtoSet`'s length is zero, and
/// * `false` otherwise.
///
/// # Examples
///
/// ```
/// use protover::protoset::ProtoSet;
///
/// let protoset: ProtoSet = ProtoSet::default();
///
/// assert!(protoset.is_empty());
/// ```
pub fn is_empty(&self) -> bool {
self.pairs.len() == 0
}
/// Determine if `version` is included within this `ProtoSet`.
///
/// # Inputs
///
/// * `version`: a `Version`.
///
/// # Returns
///
/// `true` if the `version` is contained within this set; `false` otherwise.
///
/// # Examples
///
/// ```
/// # use protover::errors::ProtoverError;
/// use protover::protoset::ProtoSet;
///
/// # fn do_test() -> Result<ProtoSet, ProtoverError> {
/// let protoset: ProtoSet = ProtoSet::from_slice(&[(0, 5), (7, 9), (13, 14)])?;
///
/// assert!(protoset.contains(&5));
/// assert!(!protoset.contains(&10));
/// #
/// # Ok(protoset)
/// # }
/// # fn main() { do_test(); } // wrap the test so we can use the ? operator
/// ```
pub fn contains(&self, version: &Version) -> bool {
for &(low, high) in self.iter() {
if low <= *version && *version <= high {
return true;
}
}
false
}
/// Returns all the `Version`s in `self` which are not also in the `other`
/// `ProtoSet`.
///
/// # Examples
///
/// ```
/// # use protover::errors::ProtoverError;
/// use protover::protoset::ProtoSet;
///
/// # fn do_test() -> Result<bool, ProtoverError> {
/// let protoset: ProtoSet = "1,3-6,10-12,15-16".parse()?;
/// let other: ProtoSet = "2,5-7,9-11,14-20".parse()?;
///
/// let subset: ProtoSet = protoset.and_not_in(&other);
///
/// assert_eq!(subset.expand(), vec![1, 3, 4, 12]);
/// #
/// # Ok(true)
/// # }
/// # fn main() { do_test(); } // wrap the test so we can use the ? operator
/// ```
pub fn and_not_in(&self, other: &Self) -> Self {
if self.is_empty() || other.is_empty() {
return self.clone();
}
let pairs = self.iter().flat_map(|&(lo, hi)| {
let the_end = (hi + 1, hi + 1); // special case to mark the end of the range.
let excluded_ranges = other
.iter()
.cloned() // have to be owned tuples, to match iter::once(the_end).
.skip_while(move|&(_, hi2)| hi2 < lo) // skip the non-overlapping ranges.
.take_while(move|&(lo2, _)| lo2 <= hi) // take all the overlapping ones.
.chain(iter::once(the_end));
let mut nextlo = lo;
excluded_ranges.filter_map(move |(excluded_lo, excluded_hi)| {
let pair = if nextlo < excluded_lo {
Some((nextlo, excluded_lo - 1))
} else {
None
};
nextlo = cmp::min(excluded_hi, u32::MAX - 1) + 1;
pair
})
});
let pairs = pairs.collect();
ProtoSet::is_ok(ProtoSet { pairs }).expect("should be already sorted")
}
}
/// Largest allowed protocol version.
/// C_RUST_COUPLED: protover.c `MAX_PROTOCOL_VERSION`
const MAX_PROTOCOL_VERSION: Version = 63;
impl FromStr for ProtoSet {
type Err = ProtoverError;
/// Parse the unique version numbers supported by a subprotocol from a string.
///
/// # Inputs
///
/// * `version_string`, a string comprised of "[0-9,-]"
///
/// # Returns
///
/// A `Result` whose `Ok` value is a `ProtoSet` holding all of the unique
/// version numbers.
///
/// The returned `Result`'s `Err` value is an `ProtoverError` appropriate to
/// the error.
///
/// # Errors
///
/// This function will error if:
///
/// * the `version_string` is an equals (`"="`) sign,
/// * the expansion of a version range produces an error (see
/// `expand_version_range`),
/// * any single version number is not parseable as an `u32` in radix 10, or
/// * there are greater than 2^16 version numbers to expand.
///
/// # Examples
///
/// ```
/// use std::str::FromStr;
///
/// use protover::errors::ProtoverError;
/// use protover::protoset::ProtoSet;
///
/// # fn do_test() -> Result<ProtoSet, ProtoverError> {
/// let protoset: ProtoSet = ProtoSet::from_str("2-5,8")?;
///
/// assert!(protoset.contains(&5));
/// assert!(!protoset.contains(&10));
///
/// // We can also equivalently call `ProtoSet::from_str` by doing (all
/// // implementations of `FromStr` can be called this way, this one isn't
/// // special):
/// let protoset: ProtoSet = "4-6,12".parse()?;
///
/// // Calling it (either way) can take really large ranges (up to `u32::MAX`):
/// let protoset: ProtoSet = "1-70000".parse()?;
/// let protoset: ProtoSet = "1-4294967296".parse()?;
///
/// // There are lots of ways to get an `Err` from this function. Here are
/// // a few:
/// assert_eq!(Err(ProtoverError::Unparseable), ProtoSet::from_str("="));
/// assert_eq!(Err(ProtoverError::Unparseable), ProtoSet::from_str("-"));
/// assert_eq!(Err(ProtoverError::Unparseable), ProtoSet::from_str("not_an_int"));
/// assert_eq!(Err(ProtoverError::Unparseable), ProtoSet::from_str("3-"));
/// assert_eq!(Err(ProtoverError::Unparseable), ProtoSet::from_str("1-,4"));
///
/// // An empty string is, however, legal, and results in an
/// // empty `ProtoSet`:
/// assert_eq!(Ok(ProtoSet::default()), ProtoSet::from_str(""));
/// #
/// # Ok(protoset)
/// # }
/// # fn main() { do_test(); } // wrap the test so we can use the ? operator
/// ```
fn from_str(version_string: &str) -> Result<Self, Self::Err> {
// If we were passed in an empty string, then return an empty ProtoSet.
if version_string.is_empty() {
return Ok(Self::default());
}
let mut pairs: Vec<(Version, Version)> = Vec::new();
let pieces: ::std::str::Split<char> = version_string.split(',');
for p in pieces {
let (lo,hi) = if p.contains('-') {
let mut pair = p.splitn(2, '-');
let low = pair.next().ok_or(ProtoverError::Unparseable)?;
let high = pair.next().ok_or(ProtoverError::Unparseable)?;
let lo: Version = low.parse().or(Err(ProtoverError::Unparseable))?;
let hi: Version = high.parse().or(Err(ProtoverError::Unparseable))?;
(lo,hi)
} else {
let v: u32 = p.parse().or(Err(ProtoverError::Unparseable))?;
(v, v)
};
if lo > MAX_PROTOCOL_VERSION || hi > MAX_PROTOCOL_VERSION {
return Err(ProtoverError::ExceedsMax);
}
pairs.push((lo, hi));
}
ProtoSet::from_slice(&pairs[..])
}
}
impl ToString for ProtoSet {
/// Contracts a `ProtoSet` of versions into a string.
///
/// # Returns
///
/// A `String` representation of this `ProtoSet` in ascending order.
fn to_string(&self) -> String {
let mut final_output: Vec<String> = Vec::new();
for &(lo, hi) in self.iter() {
if lo != hi {
debug_assert!(lo < hi);
final_output.push(format!("{}-{}", lo, hi));
} else {
final_output.push(format!("{}", lo));
}
}
final_output.join(",")
}
}
/// Checks to see if there is a continuous range of integers, starting at the
/// first in the list. Returns the last integer in the range if a range exists.
///
/// # Inputs
///
/// `list`, an ordered vector of `u32` integers of "[0-9,-]" representing the
/// supported versions for a single protocol.
///
/// # Returns
///
/// A `bool` indicating whether the list contains a range, starting at the first
/// in the list, a`Version` of the last integer in the range, and a `usize` of
/// the index of that version.
///
/// For example, if given vec![1, 2, 3, 5], find_range will return true,
/// as there is a continuous range, and 3, which is the last number in the
/// continuous range, and 2 which is the index of 3.
fn find_range(list: &Vec<Version>) -> (bool, Version, usize) {
if list.len() == 0 {
return (false, 0, 0);
}
let mut index: usize = 0;
let mut iterable = list.iter().peekable();
let mut range_end = match iterable.next() {
Some(n) => *n,
None => return (false, 0, 0),
};
let mut has_range = false;
while iterable.peek().is_some() {
let n = *iterable.next().unwrap();
if n != range_end + 1 {
break;
}
has_range = true;
range_end = n;
index += 1;
}
(has_range, range_end, index)
}
impl From<Vec<Version>> for ProtoSet {
fn from(mut v: Vec<Version>) -> ProtoSet {
let mut version_pairs: Vec<(Version, Version)> = Vec::new();
v.sort_unstable();
v.dedup();
'vector: while !v.is_empty() {
let (has_range, end, index): (bool, Version, usize) = find_range(&v);
if has_range {
let first: Version = match v.first() {
Some(x) => *x,
None => continue,
};
let last: Version = match v.get(index) {
Some(x) => *x,
None => continue,
};
debug_assert!(last == end, format!("last = {}, end = {}", last, end));
version_pairs.push((first, last));
v = v.split_off(index + 1);
if v.len() == 0 {
break 'vector;
}
} else {
let last: Version = match v.get(index) {
Some(x) => *x,
None => continue,
};
version_pairs.push((last, last));
v.remove(index);
}
}
ProtoSet::from_slice(&version_pairs[..]).unwrap_or(ProtoSet::default())
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_find_range() {
assert_eq!((false, 0, 0), find_range(&vec![]));
assert_eq!((false, 1, 0), find_range(&vec![1]));
assert_eq!((true, 2, 1), find_range(&vec![1, 2]));
assert_eq!((true, 3, 2), find_range(&vec![1, 2, 3]));
assert_eq!((true, 3, 2), find_range(&vec![1, 2, 3, 5]));
}
macro_rules! assert_contains_each {
($protoset:expr, $versions:expr) => {
for version in $versions {
assert!($protoset.contains(version));
}
};
}
macro_rules! test_protoset_contains_versions {
($list:expr, $str:expr) => {
let versions: &[Version] = $list;
let protoset: Result<ProtoSet, ProtoverError> = ProtoSet::from_str($str);
assert!(protoset.is_ok());
let p = protoset.unwrap();
assert_contains_each!(p, versions);
};
}
#[test]
fn test_versions_from_str() {
test_protoset_contains_versions!(&[], "");
test_protoset_contains_versions!(&[1], "1");
test_protoset_contains_versions!(&[1, 2], "1,2");
test_protoset_contains_versions!(&[1, 2, 3], "1-3");
test_protoset_contains_versions!(&[1, 2, 5], "1-2,5");
test_protoset_contains_versions!(&[1, 3, 4, 5], "1,3-5");
test_protoset_contains_versions!(&[42, 55, 56, 57, 58], "42,55-58");
}
#[test]
fn test_versions_from_str_ab() {
assert_eq!(Err(ProtoverError::Unparseable), ProtoSet::from_str("a,b"));
}
#[test]
fn test_versions_from_str_negative_1() {
assert_eq!(Err(ProtoverError::Unparseable), ProtoSet::from_str("-1"));
}
#[test]
fn test_versions_from_str_commas() {
assert_eq!(Err(ProtoverError::Unparseable), ProtoSet::from_str(","));
assert_eq!(Err(ProtoverError::Unparseable), ProtoSet::from_str("1,,2"));
assert_eq!(Err(ProtoverError::Unparseable), ProtoSet::from_str("1,2,"));
}
#[test]
fn test_versions_from_str_hyphens() {
assert_eq!(Err(ProtoverError::Unparseable), ProtoSet::from_str("--1"));
assert_eq!(Err(ProtoverError::Unparseable), ProtoSet::from_str("-1-2"));
assert_eq!(Err(ProtoverError::Unparseable), ProtoSet::from_str("1--2"));
}
#[test]
fn test_versions_from_str_triple() {
assert_eq!(Err(ProtoverError::Unparseable), ProtoSet::from_str("1-2-3"));
}
#[test]
fn test_versions_from_str_1exclam() {
assert_eq!(Err(ProtoverError::Unparseable), ProtoSet::from_str("1,!"));
}
#[test]
fn test_versions_from_str_percent_equal() {
assert_eq!(Err(ProtoverError::Unparseable), ProtoSet::from_str("%="));
}
#[test]
fn test_versions_from_str_whitespace() {
assert_eq!(Err(ProtoverError::Unparseable), ProtoSet::from_str("1,2\n"));
assert_eq!(Err(ProtoverError::Unparseable), ProtoSet::from_str("1\r,2"));
assert_eq!(Err(ProtoverError::Unparseable), ProtoSet::from_str("1,\t2"));
}
#[test]
fn test_versions_from_str_overlap() {
assert_eq!(Err(ProtoverError::Overlap), ProtoSet::from_str("1-3,2-4"));
}
#[test]
fn test_versions_from_slice_overlap() {
assert_eq!(
Err(ProtoverError::Overlap),
ProtoSet::from_slice(&[(1, 3), (2, 4)])
);
}
#[test]
fn test_versions_from_str_max() {
assert_eq!(
Err(ProtoverError::ExceedsMax),
ProtoSet::from_str("4294967295")
);
}
#[test]
fn test_versions_from_slice_max() {
assert_eq!(
Err(ProtoverError::ExceedsMax),
ProtoSet::from_slice(&[(4294967295, 4294967295)])
);
}
#[test]
fn test_protoset_contains() {
let protoset: ProtoSet = ProtoSet::from_slice(&[(1, 5), (7, 9), (13, 14)]).unwrap();
for x in 1..6 {
assert!(protoset.contains(&x), format!("should contain {}", x));
}
for x in 7..10 {
assert!(protoset.contains(&x), format!("should contain {}", x));
}
for x in 13..15 {
assert!(protoset.contains(&x), format!("should contain {}", x));
}
for x in [6, 10, 11, 12, 15, 42, 43, 44, 45, 1234584].iter() {
assert!(!protoset.contains(&x), format!("should not contain {}", x));
}
}
#[test]
fn test_protoset_contains_1_3() {
let protoset: ProtoSet = ProtoSet::from_slice(&[(1, 3)]).unwrap();
for x in 1..4 {
assert!(protoset.contains(&x), format!("should contain {}", x));
}
}
macro_rules! assert_protoset_from_vec_contains_all {
($($x:expr),*) => (
let vec: Vec<Version> = vec!($($x),*);
let protoset: ProtoSet = vec.clone().into();
for x in vec.iter() {
assert!(protoset.contains(&x));
}
)
}
#[test]
fn test_protoset_from_vec_123() {
assert_protoset_from_vec_contains_all!(1, 2, 3);
}
#[test]
fn test_protoset_from_vec_1_315() {
assert_protoset_from_vec_contains_all!(1, 2, 3, 15);
}
#[test]
fn test_protoset_from_vec_unordered() {
let v: Vec<Version> = vec![2, 3, 8, 4, 3, 9, 7, 2];
let ps: ProtoSet = v.into();
assert_eq!(ps.to_string(), "2-4,7-9");
}
#[test]
fn test_protoset_into_vec() {
let ps: ProtoSet = "1-13,42".parse().unwrap();
let v: Vec<Version> = ps.into();
assert!(v.contains(&7));
assert!(v.contains(&42));
}
}
#[cfg(all(test, feature = "bench"))]
mod bench {
use super::*;
}

View File

@ -1,984 +0,0 @@
// Copyright (c) 2016-2019, The Tor Project, Inc. */
// See LICENSE for licensing information */
use std::collections::hash_map;
use std::collections::HashMap;
use std::ffi::CStr;
use std::fmt;
use std::str;
use std::str::FromStr;
use std::string::String;
use external::c_tor_version_as_new_as;
use errors::ProtoverError;
use protoset::ProtoSet;
use protoset::Version;
/// The first version of Tor that included "proto" entries in its descriptors.
/// Authorities should use this to decide whether to guess proto lines.
///
/// C_RUST_COUPLED:
/// protover.h `FIRST_TOR_VERSION_TO_ADVERTISE_PROTOCOLS`
const FIRST_TOR_VERSION_TO_ADVERTISE_PROTOCOLS: &'static str = "0.2.9.3-alpha";
/// The maximum number of subprotocol version numbers we will attempt to expand
/// before concluding that someone is trying to DoS us
///
/// C_RUST_COUPLED: protover.c `MAX_PROTOCOLS_TO_EXPAND`
const MAX_PROTOCOLS_TO_EXPAND: usize = 1 << 16;
/// The maximum size an `UnknownProtocol`'s name may be.
pub(crate) const MAX_PROTOCOL_NAME_LENGTH: usize = 100;
/// Known subprotocols in Tor. Indicates which subprotocol a relay supports.
///
/// C_RUST_COUPLED: protover.h `protocol_type_t`
#[derive(Clone, Hash, Eq, PartialEq, Debug)]
pub enum Protocol {
Cons,
Desc,
DirCache,
HSDir,
HSIntro,
HSRend,
Link,
LinkAuth,
Microdesc,
Relay,
Padding,
FlowCtrl,
}
impl fmt::Display for Protocol {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
/// Translates a string representation of a protocol into a Proto type.
/// Error if the string is an unrecognized protocol name.
///
/// C_RUST_COUPLED: protover.c `PROTOCOL_NAMES`
impl FromStr for Protocol {
type Err = ProtoverError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"Cons" => Ok(Protocol::Cons),
"Desc" => Ok(Protocol::Desc),
"DirCache" => Ok(Protocol::DirCache),
"HSDir" => Ok(Protocol::HSDir),
"HSIntro" => Ok(Protocol::HSIntro),
"HSRend" => Ok(Protocol::HSRend),
"Link" => Ok(Protocol::Link),
"LinkAuth" => Ok(Protocol::LinkAuth),
"Microdesc" => Ok(Protocol::Microdesc),
"Relay" => Ok(Protocol::Relay),
"Padding" => Ok(Protocol::Padding),
"FlowCtrl" => Ok(Protocol::FlowCtrl),
_ => Err(ProtoverError::UnknownProtocol),
}
}
}
/// A protocol string which is not one of the `Protocols` we currently know
/// about.
#[derive(Clone, Debug, Hash, Eq, PartialEq)]
pub struct UnknownProtocol(String);
impl fmt::Display for UnknownProtocol {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
fn is_valid_proto(s: &str) -> bool {
s.chars().all(|c| c.is_ascii_alphanumeric() || c == '-')
}
impl FromStr for UnknownProtocol {
type Err = ProtoverError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if !is_valid_proto(s) {
Err(ProtoverError::InvalidProtocol)
} else if s.len() <= MAX_PROTOCOL_NAME_LENGTH {
Ok(UnknownProtocol(s.to_string()))
} else {
Err(ProtoverError::ExceedsNameLimit)
}
}
}
impl UnknownProtocol {
/// Create an `UnknownProtocol`, ignoring whether or not it
/// exceeds MAX_PROTOCOL_NAME_LENGTH.
fn from_str_any_len(s: &str) -> Result<Self, ProtoverError> {
if !is_valid_proto(s) {
return Err(ProtoverError::InvalidProtocol);
}
Ok(UnknownProtocol(s.to_string()))
}
}
impl From<Protocol> for UnknownProtocol {
fn from(p: Protocol) -> UnknownProtocol {
UnknownProtocol(p.to_string())
}
}
#[cfg(feature = "test_linking_hack")]
fn have_linkauth_v1() -> bool {
true
}
#[cfg(not(feature = "test_linking_hack"))]
fn have_linkauth_v1() -> bool {
use external::c_tor_is_using_nss;
!c_tor_is_using_nss()
}
/// Get a CStr representation of current supported protocols, for
/// passing to C, or for converting to a `&str` for Rust.
///
/// # Returns
///
/// An `&'static CStr` whose value is the existing protocols supported by tor.
/// Returned data is in the format as follows:
///
/// "HSDir=1-1 LinkAuth=1"
///
/// # Note
///
/// Rust code can use the `&'static CStr` as a normal `&'a str` by
/// calling `protover::get_supported_protocols`.
///
// C_RUST_COUPLED: protover.c `protover_get_supported_protocols`
pub(crate) fn get_supported_protocols_cstr() -> &'static CStr {
if !have_linkauth_v1() {
cstr!(
"Cons=1-2 \
Desc=1-2 \
DirCache=2 \
FlowCtrl=1 \
HSDir=1-2 \
HSIntro=3-5 \
HSRend=1-2 \
Link=1-5 \
LinkAuth=3 \
Microdesc=1-2 \
Padding=2 \
Relay=1-3"
)
} else {
cstr!(
"Cons=1-2 \
Desc=1-2 \
DirCache=2 \
FlowCtrl=1 \
HSDir=1-2 \
HSIntro=3-5 \
HSRend=1-2 \
Link=1-5 \
LinkAuth=1,3 \
Microdesc=1-2 \
Padding=2 \
Relay=1-3"
)
}
}
/// A map of protocol names to the versions of them which are supported.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ProtoEntry(HashMap<Protocol, ProtoSet>);
impl Default for ProtoEntry {
fn default() -> ProtoEntry {
ProtoEntry(HashMap::new())
}
}
impl ProtoEntry {
/// Get an iterator over the `Protocol`s and their `ProtoSet`s in this `ProtoEntry`.
pub fn iter(&self) -> hash_map::Iter<Protocol, ProtoSet> {
self.0.iter()
}
/// Translate the supported tor versions from a string into a
/// ProtoEntry, which is useful when looking up a specific
/// subprotocol.
pub fn supported() -> Result<Self, ProtoverError> {
let supported_cstr: &'static CStr = get_supported_protocols_cstr();
let supported: &str = supported_cstr.to_str().unwrap_or("");
supported.parse()
}
pub fn len(&self) -> usize {
self.0.len()
}
pub fn get(&self, protocol: &Protocol) -> Option<&ProtoSet> {
self.0.get(protocol)
}
pub fn insert(&mut self, key: Protocol, value: ProtoSet) {
self.0.insert(key, value);
}
pub fn remove(&mut self, key: &Protocol) -> Option<ProtoSet> {
self.0.remove(key)
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
impl FromStr for ProtoEntry {
type Err = ProtoverError;
/// Parse a string of subprotocol types and their version numbers.
///
/// # Inputs
///
/// * A `protocol_entry` string, comprised of a keywords, an "=" sign, and
/// one or more version numbers, each separated by a space. For example,
/// `"Cons=3-4 HSDir=1"`.
///
/// # Returns
///
/// A `Result` whose `Ok` value is a `ProtoEntry`.
/// Otherwise, the `Err` value of this `Result` is a `ProtoverError`.
fn from_str(protocol_entry: &str) -> Result<ProtoEntry, ProtoverError> {
let mut proto_entry: ProtoEntry = ProtoEntry::default();
if protocol_entry.is_empty() {
return Ok(proto_entry);
}
let entries = protocol_entry.split(' ');
for entry in entries {
let mut parts = entry.splitn(2, '=');
let proto = match parts.next() {
Some(n) => n,
None => return Err(ProtoverError::Unparseable),
};
let vers = match parts.next() {
Some(n) => n,
None => return Err(ProtoverError::Unparseable),
};
let versions: ProtoSet = vers.parse()?;
let proto_name: Protocol = proto.parse()?;
proto_entry.insert(proto_name, versions);
if proto_entry.len() > MAX_PROTOCOLS_TO_EXPAND {
return Err(ProtoverError::ExceedsMax);
}
}
Ok(proto_entry)
}
}
/// Generate an implementation of `ToString` for either a `ProtoEntry` or an
/// `UnvalidatedProtoEntry`.
macro_rules! impl_to_string_for_proto_entry {
($t:ty) => {
impl ToString for $t {
fn to_string(&self) -> String {
let mut parts: Vec<String> = Vec::new();
for (protocol, versions) in self.iter() {
parts.push(format!("{}={}", protocol.to_string(), versions.to_string()));
}
parts.sort_unstable();
parts.join(" ")
}
}
};
}
impl_to_string_for_proto_entry!(ProtoEntry);
impl_to_string_for_proto_entry!(UnvalidatedProtoEntry);
/// A `ProtoEntry`, but whose `Protocols` can be any `UnknownProtocol`, not just
/// the supported ones enumerated in `Protocols`. The protocol versions are
/// validated, however.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct UnvalidatedProtoEntry(HashMap<UnknownProtocol, ProtoSet>);
impl Default for UnvalidatedProtoEntry {
fn default() -> UnvalidatedProtoEntry {
UnvalidatedProtoEntry(HashMap::new())
}
}
impl UnvalidatedProtoEntry {
/// Get an iterator over the `Protocol`s and their `ProtoSet`s in this `ProtoEntry`.
pub fn iter(&self) -> hash_map::Iter<UnknownProtocol, ProtoSet> {
self.0.iter()
}
pub fn get(&self, protocol: &UnknownProtocol) -> Option<&ProtoSet> {
self.0.get(protocol)
}
pub fn insert(&mut self, key: UnknownProtocol, value: ProtoSet) {
self.0.insert(key, value);
}
pub fn remove(&mut self, key: &UnknownProtocol) -> Option<ProtoSet> {
self.0.remove(key)
}
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub fn len(&self) -> usize {
let mut total: usize = 0;
for (_, versions) in self.iter() {
total += versions.len();
}
total
}
/// Determine if we support every protocol a client supports, and if not,
/// determine which protocols we do not have support for.
///
/// # Returns
///
/// Optionally, return parameters which the client supports but which we do not.
///
/// # Examples
/// ```
/// use protover::UnvalidatedProtoEntry;
///
/// let protocols: UnvalidatedProtoEntry = "LinkAuth=1 Microdesc=1-2 Relay=2".parse().unwrap();
/// let unsupported: Option<UnvalidatedProtoEntry> = protocols.all_supported();
/// assert_eq!(true, unsupported.is_none());
///
/// let protocols: UnvalidatedProtoEntry = "Link=1-2 Wombat=9".parse().unwrap();
/// let unsupported: Option<UnvalidatedProtoEntry> = protocols.all_supported();
/// assert_eq!(true, unsupported.is_some());
/// assert_eq!("Wombat=9", &unsupported.unwrap().to_string());
/// ```
pub fn all_supported(&self) -> Option<UnvalidatedProtoEntry> {
let mut unsupported: UnvalidatedProtoEntry = UnvalidatedProtoEntry::default();
let supported: ProtoEntry = match ProtoEntry::supported() {
Ok(x) => x,
Err(_) => return None,
};
for (protocol, versions) in self.iter() {
let is_supported: Result<Protocol, ProtoverError> = protocol.0.parse();
let supported_protocol: Protocol;
// If the protocol wasn't even in the enum, then we definitely don't
// know about it and don't support any of its versions.
if is_supported.is_err() {
if !versions.is_empty() {
unsupported.insert(protocol.clone(), versions.clone());
}
continue;
} else {
supported_protocol = is_supported.unwrap();
}
let maybe_supported_versions: Option<&ProtoSet> = supported.get(&supported_protocol);
let supported_versions: &ProtoSet;
// If the protocol wasn't in the map, then we don't know about it
// and don't support any of its versions. Add its versions to the
// map (if it has versions).
if maybe_supported_versions.is_none() {
if !versions.is_empty() {
unsupported.insert(protocol.clone(), versions.clone());
}
continue;
} else {
supported_versions = maybe_supported_versions.unwrap();
}
let unsupported_versions = versions.and_not_in(supported_versions);
if !unsupported_versions.is_empty() {
unsupported.insert(protocol.clone(), unsupported_versions);
}
}
if unsupported.is_empty() {
return None;
}
Some(unsupported)
}
/// Determine if we have support for some protocol and version.
///
/// # Inputs
///
/// * `proto`, an `UnknownProtocol` to test support for
/// * `vers`, a `Version` which we will go on to determine whether the
/// specified protocol supports.
///
/// # Return
///
/// Returns `true` iff this `UnvalidatedProtoEntry` includes support for the
/// indicated protocol and version, and `false` otherwise.
///
/// # Examples
///
/// ```
/// # use std::str::FromStr;
/// use protover::*;
/// # use protover::errors::ProtoverError;
///
/// # fn do_test () -> Result<UnvalidatedProtoEntry, ProtoverError> {
/// let proto: UnvalidatedProtoEntry = "Link=3-4 Cons=1 Doggo=3-5".parse()?;
/// assert_eq!(true, proto.supports_protocol(&Protocol::Cons.into(), &1));
/// assert_eq!(false, proto.supports_protocol(&Protocol::Cons.into(), &5));
/// assert_eq!(true, proto.supports_protocol(&UnknownProtocol::from_str("Doggo")?, &4));
/// # Ok(proto)
/// # } fn main () { do_test(); }
/// ```
pub fn supports_protocol(&self, proto: &UnknownProtocol, vers: &Version) -> bool {
let supported_versions: &ProtoSet = match self.get(proto) {
Some(n) => n,
None => return false,
};
supported_versions.contains(&vers)
}
/// As `UnvalidatedProtoEntry::supports_protocol()`, but also returns `true`
/// if any later version of the protocol is supported.
///
/// # Examples
/// ```
/// use protover::*;
/// # use protover::errors::ProtoverError;
///
/// # fn do_test () -> Result<UnvalidatedProtoEntry, ProtoverError> {
/// let proto: UnvalidatedProtoEntry = "Link=3-4 Cons=5".parse()?;
///
/// assert_eq!(true, proto.supports_protocol_or_later(&Protocol::Cons.into(), &5));
/// assert_eq!(true, proto.supports_protocol_or_later(&Protocol::Cons.into(), &4));
/// assert_eq!(false, proto.supports_protocol_or_later(&Protocol::Cons.into(), &6));
/// # Ok(proto)
/// # } fn main () { do_test(); }
/// ```
pub fn supports_protocol_or_later(&self, proto: &UnknownProtocol, vers: &Version) -> bool {
let supported_versions: &ProtoSet = match self.get(&proto) {
Some(n) => n,
None => return false,
};
supported_versions.iter().any(|v| v.1 >= *vers)
}
/// Split a string containing (potentially) several protocols and their
/// versions into a `Vec` of tuples of string in `(protocol, versions)`
/// form.
///
/// # Inputs
///
/// A &str in the form `"Link=3-4 Cons=5"`.
///
/// # Returns
///
/// A `Result` whose `Ok` variant is a `Vec<(&str, &str)>` of `(protocol,
/// versions)`, or whose `Err` variant is a `ProtoverError`.
///
/// # Errors
///
/// This will error with a `ProtoverError::Unparseable` if any of the
/// following are true:
///
/// * If a protocol name is an empty string, e.g. `"Cons=1,3 =3-5"`.
/// * If an entry has no equals sign, e.g. `"Cons=1,3 Desc"`.
/// * If there is leading or trailing whitespace, e.g. `" Cons=1,3 Link=3"`.
/// * If there is any other extra whitespice, e.g. `"Cons=1,3 Link=3"`.
fn parse_protocol_and_version_str<'a>(
protocol_string: &'a str,
) -> Result<Vec<(&'a str, &'a str)>, ProtoverError> {
let mut protovers: Vec<(&str, &str)> = Vec::new();
if protocol_string.is_empty() {
return Ok(protovers);
}
for subproto in protocol_string.split(' ') {
let mut parts = subproto.splitn(2, '=');
let name = match parts.next() {
Some("") => return Err(ProtoverError::Unparseable),
Some(n) => n,
None => return Err(ProtoverError::Unparseable),
};
let vers = match parts.next() {
Some(n) => n,
None => return Err(ProtoverError::Unparseable),
};
protovers.push((name, vers));
}
Ok(protovers)
}
}
impl FromStr for UnvalidatedProtoEntry {
type Err = ProtoverError;
/// Parses a protocol list without validating the protocol names.
///
/// # Inputs
///
/// * `protocol_string`, a string comprised of keys and values, both which are
/// strings. The keys are the protocol names while values are a string
/// representation of the supported versions.
///
/// The input is _not_ expected to be a subset of the Protocol types
///
/// # Returns
///
/// A `Result` whose `Ok` value is an `UnvalidatedProtoEntry`.
///
/// The returned `Result`'s `Err` value is an `ProtoverError`.
///
/// # Errors
///
/// This function will error if:
///
/// * The protocol string does not follow the "protocol_name=version_list"
/// expected format, or
/// * If the version string is malformed. See `impl FromStr for ProtoSet`.
fn from_str(protocol_string: &str) -> Result<UnvalidatedProtoEntry, ProtoverError> {
let mut parsed: UnvalidatedProtoEntry = UnvalidatedProtoEntry::default();
let parts: Vec<(&str, &str)> =
UnvalidatedProtoEntry::parse_protocol_and_version_str(protocol_string)?;
for &(name, vers) in parts.iter() {
let versions = ProtoSet::from_str(vers)?;
let protocol = UnknownProtocol::from_str(name)?;
parsed.insert(protocol, versions);
}
Ok(parsed)
}
}
impl UnvalidatedProtoEntry {
/// Create an `UnknownProtocol`, ignoring whether or not it
/// exceeds MAX_PROTOCOL_NAME_LENGTH.
pub(crate) fn from_str_any_len(
protocol_string: &str,
) -> Result<UnvalidatedProtoEntry, ProtoverError> {
let mut parsed: UnvalidatedProtoEntry = UnvalidatedProtoEntry::default();
let parts: Vec<(&str, &str)> =
UnvalidatedProtoEntry::parse_protocol_and_version_str(protocol_string)?;
for &(name, vers) in parts.iter() {
let versions = ProtoSet::from_str(vers)?;
let protocol = UnknownProtocol::from_str_any_len(name)?;
parsed.insert(protocol, versions);
}
Ok(parsed)
}
}
/// Pretend a `ProtoEntry` is actually an `UnvalidatedProtoEntry`.
impl From<ProtoEntry> for UnvalidatedProtoEntry {
fn from(proto_entry: ProtoEntry) -> UnvalidatedProtoEntry {
let mut unvalidated: UnvalidatedProtoEntry = UnvalidatedProtoEntry::default();
for (protocol, versions) in proto_entry.iter() {
unvalidated.insert(UnknownProtocol::from(protocol.clone()), versions.clone());
}
unvalidated
}
}
/// A mapping of protocols to a count of how many times each of their `Version`s
/// were voted for or supported.
///
/// # Warning
///
/// The "protocols" are *not* guaranteed to be known/supported `Protocol`s, in
/// order to allow new subprotocols to be introduced even if Directory
/// Authorities don't yet know of them.
pub struct ProtoverVote(HashMap<UnknownProtocol, HashMap<Version, usize>>);
impl Default for ProtoverVote {
fn default() -> ProtoverVote {
ProtoverVote(HashMap::new())
}
}
impl IntoIterator for ProtoverVote {
type Item = (UnknownProtocol, HashMap<Version, usize>);
type IntoIter = hash_map::IntoIter<UnknownProtocol, HashMap<Version, usize>>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
impl ProtoverVote {
pub fn entry(
&mut self,
key: UnknownProtocol,
) -> hash_map::Entry<UnknownProtocol, HashMap<Version, usize>> {
self.0.entry(key)
}
/// Protocol voting implementation.
///
/// Given a slice of `UnvalidatedProtoEntry`s and a vote `threshold`, return
/// a new `UnvalidatedProtoEntry` encoding all of the protocols that are
/// listed by at least `threshold` of the inputs.
///
/// # Examples
///
/// ```
/// use protover::ProtoverVote;
/// use protover::UnvalidatedProtoEntry;
///
/// let protos: &[UnvalidatedProtoEntry] = &["Link=3-4".parse().unwrap(),
/// "Link=3".parse().unwrap()];
/// let vote = ProtoverVote::compute(protos, &2);
/// assert_eq!("Link=3", vote.to_string());
/// ```
// C_RUST_COUPLED: protover.c protover_compute_vote
pub fn compute(
proto_entries: &[UnvalidatedProtoEntry],
threshold: &usize,
) -> UnvalidatedProtoEntry {
let mut all_count: ProtoverVote = ProtoverVote::default();
let mut final_output: UnvalidatedProtoEntry = UnvalidatedProtoEntry::default();
if proto_entries.is_empty() {
return final_output;
}
// parse and collect all of the protos and their versions and collect them
for vote in proto_entries {
// C_RUST_DIFFERS: This doesn't actually differ, bu this check on
// the total is here to make it match. Because the C version calls
// expand_protocol_list() which checks if there would be too many
// subprotocols *or* individual version numbers, i.e. more than
// MAX_PROTOCOLS_TO_EXPAND, and does this *per vote*, we need to
// match it's behaviour and ensure we're not allowing more than it
// would.
if vote.len() > MAX_PROTOCOLS_TO_EXPAND {
continue;
}
for (protocol, versions) in vote.iter() {
let supported_vers: &mut HashMap<Version, usize> =
all_count.entry(protocol.clone()).or_insert(HashMap::new());
for version in versions.clone().expand() {
let counter: &mut usize = supported_vers.entry(version).or_insert(0);
*counter += 1;
}
}
}
for (protocol, mut versions) in all_count {
// Go through and remove versions that are less than the threshold
versions.retain(|_, count| *count as usize >= *threshold);
if versions.len() > 0 {
let voted_versions: Vec<Version> = versions.keys().cloned().collect();
let voted_protoset: ProtoSet = ProtoSet::from(voted_versions);
final_output.insert(protocol, voted_protoset);
}
}
final_output
}
}
/// Returns a boolean indicating whether the given protocol and version is
/// supported in any of the existing Tor protocols
///
/// # Examples
/// ```
/// use protover::is_supported_here;
/// use protover::Protocol;
///
/// let is_supported = is_supported_here(&Protocol::Link, &10);
/// assert_eq!(false, is_supported);
///
/// let is_supported = is_supported_here(&Protocol::Link, &1);
/// assert_eq!(true, is_supported);
/// ```
pub fn is_supported_here(proto: &Protocol, vers: &Version) -> bool {
let currently_supported: ProtoEntry = match ProtoEntry::supported() {
Ok(result) => result,
Err(_) => return false,
};
let supported_versions = match currently_supported.get(proto) {
Some(n) => n,
None => return false,
};
supported_versions.contains(vers)
}
/// Since older versions of Tor cannot infer their own subprotocols,
/// determine which subprotocols are supported by older Tor versions.
///
/// # Inputs
///
/// * `version`, a string comprised of "[0-9a-z.-]"
///
/// # Returns
///
/// A `&'static CStr` encoding a list of protocol names and supported
/// versions. The string takes the following format:
///
/// "HSDir=1-1 LinkAuth=1"
///
/// This function returns the protocols that are supported by the version input,
/// only for tor versions older than `FIRST_TOR_VERSION_TO_ADVERTISE_PROTOCOLS`
/// (but not older than 0.2.4.19). For newer tors (or older than 0.2.4.19), it
/// returns an empty string.
///
/// # Note
///
/// This function is meant to be called for/within FFI code. If you'd
/// like to use this code in Rust, please see `compute_for_old_tor()`.
//
// C_RUST_COUPLED: src/rust/protover.c `compute_for_old_tor`
pub(crate) fn compute_for_old_tor_cstr(version: &str) -> &'static CStr {
let empty: &'static CStr = cstr!("");
if c_tor_version_as_new_as(version, FIRST_TOR_VERSION_TO_ADVERTISE_PROTOCOLS) {
return empty;
}
if c_tor_version_as_new_as(version, "0.2.9.1-alpha") {
return cstr!(
"Cons=1-2 Desc=1-2 DirCache=1 HSDir=1 HSIntro=3 HSRend=1-2 \
Link=1-4 LinkAuth=1 Microdesc=1-2 Relay=1-2"
);
}
if c_tor_version_as_new_as(version, "0.2.7.5") {
return cstr!(
"Cons=1-2 Desc=1-2 DirCache=1 HSDir=1 HSIntro=3 HSRend=1 \
Link=1-4 LinkAuth=1 Microdesc=1-2 Relay=1-2"
);
}
if c_tor_version_as_new_as(version, "0.2.4.19") {
return cstr!(
"Cons=1 Desc=1 DirCache=1 HSDir=1 HSIntro=3 HSRend=1 \
Link=1-4 LinkAuth=1 Microdesc=1 Relay=1-2"
);
}
empty
}
/// Since older versions of Tor cannot infer their own subprotocols,
/// determine which subprotocols are supported by older Tor versions.
///
/// # Inputs
///
/// * `version`, a string comprised of "[0-9a-z.-]"
///
/// # Returns
///
/// A `Result` whose `Ok` value is an `&'static str` encoding a list of protocol
/// names and supported versions. The string takes the following format:
///
/// "HSDir=1-1 LinkAuth=1"
///
/// This function returns the protocols that are supported by the version input,
/// only for tor versions older than `FIRST_TOR_VERSION_TO_ADVERTISE_PROTOCOLS`.
/// (but not older than 0.2.4.19). For newer tors (or older than 0.2.4.19), its
/// `Ok` `Result` contains an empty string.
///
/// Otherwise, its `Err` contains a `ProtoverError::Unparseable` if the
/// `version` string was invalid utf-8.
///
/// # Note
///
/// This function is meant to be called for/within non-FFI Rust code.
//
// C_RUST_COUPLED: src/rust/protover.c `compute_for_old_tor`
pub fn compute_for_old_tor(version: &str) -> Result<&'static str, ProtoverError> {
// .to_str() fails with a Utf8Error if it couldn't validate the
// utf-8, so convert that here into an Unparseable ProtoverError.
compute_for_old_tor_cstr(version)
.to_str()
.or(Err(ProtoverError::Unparseable))
}
#[cfg(test)]
mod test {
use std::str::FromStr;
use std::string::ToString;
use super::*;
macro_rules! parse_proto {
($e:expr) => {{
let proto: Result<UnknownProtocol, _> = $e.parse();
let proto2 = UnknownProtocol::from_str_any_len($e);
assert_eq!(proto, proto2);
proto
}};
}
#[test]
fn test_protocol_from_str() {
assert!(parse_proto!("Cons").is_ok());
assert!(parse_proto!("123").is_ok());
assert!(parse_proto!("1-2-3").is_ok());
let err = Err(ProtoverError::InvalidProtocol);
assert_eq!(err, parse_proto!("a_b_c"));
assert_eq!(err, parse_proto!("a b"));
assert_eq!(err, parse_proto!("a,"));
assert_eq!(err, parse_proto!("b."));
assert_eq!(err, parse_proto!(""));
}
macro_rules! assert_protoentry_is_parseable {
($e:expr) => {
let protoentry: Result<ProtoEntry, ProtoverError> = $e.parse();
assert!(protoentry.is_ok(), format!("{:?}", protoentry.err()));
};
}
macro_rules! assert_protoentry_is_unparseable {
($e:expr) => {
let protoentry: Result<ProtoEntry, ProtoverError> = $e.parse();
assert!(protoentry.is_err());
};
}
#[test]
fn test_protoentry_from_str_multiple_protocols_multiple_versions() {
assert_protoentry_is_parseable!("Cons=3-4 Link=1,3-5");
}
#[test]
fn test_protoentry_from_str_empty() {
assert_protoentry_is_parseable!("");
assert!(UnvalidatedProtoEntry::from_str("").is_ok());
}
#[test]
fn test_protoentry_from_str_single_protocol_single_version() {
assert_protoentry_is_parseable!("HSDir=1");
}
#[test]
fn test_protoentry_from_str_unknown_protocol() {
assert_protoentry_is_unparseable!("Ducks=5-7,8");
}
#[test]
fn test_protoentry_from_str_allowed_number_of_versions() {
assert_protoentry_is_parseable!("Desc=1-63");
}
#[test]
fn test_protoentry_from_str_too_many_versions() {
assert_protoentry_is_unparseable!("Desc=1-64");
}
#[test]
fn test_protoentry_all_supported_single_protocol_single_version() {
let protocol: UnvalidatedProtoEntry = "Cons=1".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocol.all_supported();
assert_eq!(true, unsupported.is_none());
}
#[test]
fn test_protoentry_all_supported_multiple_protocol_multiple_versions() {
let protocols: UnvalidatedProtoEntry = "Link=3-4 Desc=2".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocols.all_supported();
assert_eq!(true, unsupported.is_none());
}
#[test]
fn test_protoentry_all_supported_three_values() {
let protocols: UnvalidatedProtoEntry = "LinkAuth=1 Microdesc=1-2 Relay=2".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocols.all_supported();
assert_eq!(true, unsupported.is_none());
}
#[test]
fn test_protoentry_all_supported_unknown_protocol() {
let protocols: UnvalidatedProtoEntry = "Wombat=9".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocols.all_supported();
assert_eq!(true, unsupported.is_some());
assert_eq!("Wombat=9", &unsupported.unwrap().to_string());
}
#[test]
fn test_protoentry_all_supported_unsupported_high_version() {
let protocols: UnvalidatedProtoEntry = "HSDir=12-60".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocols.all_supported();
assert_eq!(true, unsupported.is_some());
assert_eq!("HSDir=12-60", &unsupported.unwrap().to_string());
}
#[test]
fn test_protoentry_all_supported_unsupported_low_version() {
let protocols: UnvalidatedProtoEntry = "HSIntro=2-3".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocols.all_supported();
assert_eq!(true, unsupported.is_some());
assert_eq!("HSIntro=2", &unsupported.unwrap().to_string());
}
#[test]
fn test_contract_protocol_list() {
let mut versions = "";
assert_eq!(
String::from(versions),
ProtoSet::from_str(&versions).unwrap().to_string()
);
versions = "1";
assert_eq!(
String::from(versions),
ProtoSet::from_str(&versions).unwrap().to_string()
);
versions = "1-2";
assert_eq!(
String::from(versions),
ProtoSet::from_str(&versions).unwrap().to_string()
);
versions = "1,3";
assert_eq!(
String::from(versions),
ProtoSet::from_str(&versions).unwrap().to_string()
);
versions = "1-4";
assert_eq!(
String::from(versions),
ProtoSet::from_str(&versions).unwrap().to_string()
);
versions = "1,3,5-7";
assert_eq!(
String::from(versions),
ProtoSet::from_str(&versions).unwrap().to_string()
);
versions = "1-3,50";
assert_eq!(
String::from(versions),
ProtoSet::from_str(&versions).unwrap().to_string()
);
}
}

View File

@ -1,365 +0,0 @@
// Copyright (c) 2016-2019, The Tor Project, Inc. */
// See LICENSE for licensing information */
extern crate protover;
use protover::errors::ProtoverError;
use protover::ProtoEntry;
use protover::ProtoverVote;
use protover::UnvalidatedProtoEntry;
#[test]
fn parse_protocol_with_single_proto_and_single_version() {
let _: ProtoEntry = "Cons=1".parse().unwrap();
}
#[test]
fn parse_protocol_with_single_protocol_and_multiple_versions() {
let _: ProtoEntry = "Cons=1-2".parse().unwrap();
}
#[test]
fn parse_protocol_with_different_single_protocol_and_single_version() {
let _: ProtoEntry = "HSDir=1".parse().unwrap();
}
#[test]
fn parse_protocol_with_single_protocol_and_supported_version() {
let _: ProtoEntry = "Desc=2".parse().unwrap();
}
#[test]
fn parse_protocol_with_two_protocols_and_single_version() {
let _: ProtoEntry = "Cons=1 HSDir=1".parse().unwrap();
}
#[test]
fn parse_protocol_with_single_protocol_and_two_sequential_versions() {
let _: ProtoEntry = "Desc=1-2".parse().unwrap();
}
#[test]
fn parse_protocol_with_single_protocol_and_protocol_range() {
let _: ProtoEntry = "Link=1-4".parse().unwrap();
}
#[test]
fn parse_protocol_with_single_protocol_and_protocol_set() {
let _: ProtoEntry = "Link=3-4 Desc=2".parse().unwrap();
}
#[test]
fn protocol_all_supported_with_single_protocol_and_protocol_set() {
let protocols: UnvalidatedProtoEntry = "Link=3-4 Desc=2".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocols.all_supported();
assert_eq!(true, unsupported.is_none());
}
#[test]
fn protocol_all_supported_with_two_values() {
let protocols: UnvalidatedProtoEntry = "Microdesc=1-2 Relay=2".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocols.all_supported();
assert_eq!(true, unsupported.is_none());
}
#[test]
fn protocol_all_supported_with_one_value() {
let protocols: UnvalidatedProtoEntry = "Microdesc=1-2".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocols.all_supported();
assert_eq!(true, unsupported.is_none());
}
#[test]
fn protocol_all_supported_with_three_values() {
let protocols: UnvalidatedProtoEntry = "LinkAuth=1 Microdesc=1-2 Relay=2".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocols.all_supported();
assert_eq!(true, unsupported.is_none());
}
#[test]
fn protocol_all_supported_with_unsupported_protocol() {
let protocols: UnvalidatedProtoEntry = "Wombat=9".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocols.all_supported();
assert_eq!(true, unsupported.is_some());
assert_eq!("Wombat=9", &unsupported.unwrap().to_string());
}
#[test]
fn protocol_all_supported_with_unsupported_versions() {
let protocols: UnvalidatedProtoEntry = "Link=3-63".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocols.all_supported();
assert_eq!(true, unsupported.is_some());
assert_eq!("Link=6-63", &unsupported.unwrap().to_string());
}
#[test]
fn protocol_all_supported_with_unsupported_low_version() {
let protocols: UnvalidatedProtoEntry = "HSIntro=2-3".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocols.all_supported();
assert_eq!(true, unsupported.is_some());
assert_eq!("HSIntro=2", &unsupported.unwrap().to_string());
}
#[test]
fn protocol_all_supported_with_unsupported_high_version() {
let protocols: UnvalidatedProtoEntry = "Cons=1-2,60".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocols.all_supported();
assert_eq!(true, unsupported.is_some());
assert_eq!("Cons=60", &unsupported.unwrap().to_string());
}
#[test]
fn protocol_all_supported_with_mix_of_supported_and_unsupproted() {
let protocols: UnvalidatedProtoEntry = "Link=3-4 Wombat=9".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocols.all_supported();
assert_eq!(true, unsupported.is_some());
assert_eq!("Wombat=9", &unsupported.unwrap().to_string());
}
#[test]
fn protover_string_supports_protocol_returns_true_for_single_supported() {
let protocols: UnvalidatedProtoEntry = "Link=3-4 Cons=1".parse().unwrap();
let is_supported = protocols.supports_protocol(&protover::Protocol::Cons.into(), &1);
assert_eq!(true, is_supported);
}
#[test]
fn protover_string_supports_protocol_returns_false_for_single_unsupported() {
let protocols: UnvalidatedProtoEntry = "Link=3-4 Cons=1".parse().unwrap();
let is_supported = protocols.supports_protocol(&protover::Protocol::Cons.into(), &2);
assert_eq!(false, is_supported);
}
#[test]
fn protover_string_supports_protocol_returns_false_for_unsupported() {
let protocols: UnvalidatedProtoEntry = "Link=3-4".parse().unwrap();
let is_supported = protocols.supports_protocol(&protover::Protocol::Cons.into(), &2);
assert_eq!(false, is_supported);
}
#[test]
#[should_panic]
fn parse_protocol_with_unexpected_characters() {
let _: UnvalidatedProtoEntry = "Cons=*-%".parse().unwrap();
}
#[test]
fn protover_compute_vote_returns_empty_for_empty_string() {
let protocols: &[UnvalidatedProtoEntry] = &["".parse().unwrap()];
let listed = ProtoverVote::compute(protocols, &1);
assert_eq!("", listed.to_string());
}
#[test]
fn protover_compute_vote_returns_single_protocol_for_matching() {
let protocols: &[UnvalidatedProtoEntry] = &["Cons=1".parse().unwrap()];
let listed = ProtoverVote::compute(protocols, &1);
assert_eq!("Cons=1", listed.to_string());
}
#[test]
fn protover_compute_vote_returns_two_protocols_for_two_matching() {
let protocols: &[UnvalidatedProtoEntry] = &["Link=1 Cons=1".parse().unwrap()];
let listed = ProtoverVote::compute(protocols, &1);
assert_eq!("Cons=1 Link=1", listed.to_string());
}
#[test]
fn protover_compute_vote_returns_one_protocol_when_one_out_of_two_matches() {
let protocols: &[UnvalidatedProtoEntry] =
&["Cons=1 Link=2".parse().unwrap(), "Cons=1".parse().unwrap()];
let listed = ProtoverVote::compute(protocols, &2);
assert_eq!("Cons=1", listed.to_string());
}
#[test]
fn protover_compute_vote_returns_protocols_that_it_doesnt_currently_support() {
let protocols: &[UnvalidatedProtoEntry] =
&["Foo=1 Cons=2".parse().unwrap(), "Bar=1".parse().unwrap()];
let listed = ProtoverVote::compute(protocols, &1);
assert_eq!("Bar=1 Cons=2 Foo=1", listed.to_string());
}
#[test]
fn protover_compute_vote_returns_matching_for_mix() {
let protocols: &[UnvalidatedProtoEntry] = &["Link=1-10,50 Cons=1,3-7,8".parse().unwrap()];
let listed = ProtoverVote::compute(protocols, &1);
assert_eq!("Cons=1,3-8 Link=1-10,50", listed.to_string());
}
#[test]
fn protover_compute_vote_returns_matching_for_longer_mix() {
let protocols: &[UnvalidatedProtoEntry] = &[
"Desc=1-10,50 Cons=1,3-7,8".parse().unwrap(),
"Link=12-45,8 Cons=2-6,8 Desc=9".parse().unwrap(),
];
let listed = ProtoverVote::compute(protocols, &1);
assert_eq!("Cons=1-8 Desc=1-10,50 Link=8,12-45", listed.to_string());
}
#[test]
fn protover_compute_vote_returns_matching_for_longer_mix_with_threshold_two() {
let protocols: &[UnvalidatedProtoEntry] = &[
"Desc=1-10,50 Cons=1,3-7,8".parse().unwrap(),
"Link=8,12-45 Cons=2-6,8 Desc=9".parse().unwrap(),
];
let listed = ProtoverVote::compute(protocols, &2);
assert_eq!("Cons=3-6,8 Desc=9", listed.to_string());
}
#[test]
fn protover_compute_vote_handles_duplicated_versions() {
let protocols: &[UnvalidatedProtoEntry] =
&["Cons=1".parse().unwrap(), "Cons=1".parse().unwrap()];
assert_eq!("Cons=1", ProtoverVote::compute(protocols, &2).to_string());
let protocols: &[UnvalidatedProtoEntry] =
&["Cons=1-2".parse().unwrap(), "Cons=1-2".parse().unwrap()];
assert_eq!("Cons=1-2", ProtoverVote::compute(protocols, &2).to_string());
}
#[test]
fn protover_compute_vote_handles_invalid_proto_entries() {
let protocols: &[UnvalidatedProtoEntry] = &[
"Cons=1".parse().unwrap(),
"Cons=1".parse().unwrap(),
"Dinosaur=1".parse().unwrap(),
];
assert_eq!("Cons=1", ProtoverVote::compute(protocols, &2).to_string());
}
#[test]
fn parse_protocol_with_single_protocol_and_two_nonsequential_versions() {
let _: ProtoEntry = "Desc=1,2".parse().unwrap();
}
#[test]
fn protover_is_supported_here_returns_true_for_supported_protocol() {
assert_eq!(
true,
protover::is_supported_here(&protover::Protocol::Cons, &1)
);
}
#[test]
fn protover_is_supported_here_returns_false_for_unsupported_protocol() {
assert_eq!(
false,
protover::is_supported_here(&protover::Protocol::Cons, &5)
);
}
#[test]
fn protocol_all_supported_with_single_proto_and_single_version() {
let protocol: UnvalidatedProtoEntry = "Cons=1".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocol.all_supported();
assert_eq!(true, unsupported.is_none());
}
#[test]
fn protocol_all_supported_with_single_protocol_and_multiple_versions() {
let protocol: UnvalidatedProtoEntry = "Cons=1-2".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocol.all_supported();
assert_eq!(true, unsupported.is_none());
}
#[test]
fn protocol_all_supported_with_different_single_protocol_and_single_version() {
let protocol: UnvalidatedProtoEntry = "HSDir=1".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocol.all_supported();
assert_eq!(true, unsupported.is_none());
}
#[test]
fn protocol_all_supported_with_single_protocol_and_supported_version() {
let protocol: UnvalidatedProtoEntry = "Desc=2".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocol.all_supported();
assert_eq!(true, unsupported.is_none());
}
#[test]
fn protocol_all_supported_with_two_protocols_and_single_version() {
let protocols: UnvalidatedProtoEntry = "Cons=1 HSDir=1".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocols.all_supported();
assert_eq!(true, unsupported.is_none());
}
#[test]
fn protocol_all_supported_with_single_protocol_and_two_nonsequential_versions() {
let protocol: UnvalidatedProtoEntry = "Desc=1,2".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocol.all_supported();
assert_eq!(true, unsupported.is_none());
}
#[test]
fn protocol_all_supported_with_single_protocol_and_two_sequential_versions() {
let protocol: UnvalidatedProtoEntry = "Desc=1-2".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocol.all_supported();
assert_eq!(true, unsupported.is_none());
}
#[test]
fn protocol_all_supported_with_single_protocol_and_protocol_range() {
let protocol: UnvalidatedProtoEntry = "Link=1-4".parse().unwrap();
let unsupported: Option<UnvalidatedProtoEntry> = protocol.all_supported();
assert_eq!(true, unsupported.is_none());
}
#[test]
fn protover_all_supported_should_exclude_versions_we_actually_do_support() {
let proto: UnvalidatedProtoEntry = "Link=3-63".parse().unwrap();
let result: String = proto.all_supported().unwrap().to_string();
assert_eq!(result, "Link=6-63".to_string());
}
#[test]
fn protover_all_supported_should_exclude_versions_we_actually_do_support_complex1() {
let proto: UnvalidatedProtoEntry = "Link=1-3,30-63".parse().unwrap();
let result: String = proto.all_supported().unwrap().to_string();
assert_eq!(result, "Link=30-63".to_string());
}
#[test]
fn protover_all_supported_should_exclude_versions_we_actually_do_support_complex2() {
let proto: UnvalidatedProtoEntry = "Link=1-3,5-12".parse().unwrap();
let result: String = proto.all_supported().unwrap().to_string();
assert_eq!(result, "Link=6-12".to_string());
}
#[test]
fn protover_all_supported_should_exclude_some_versions_and_entire_protocols() {
let proto: UnvalidatedProtoEntry = "Link=1-3,5-12 Quokka=50-51".parse().unwrap();
let result: String = proto.all_supported().unwrap().to_string();
assert_eq!(result, "Link=6-12 Quokka=50-51".to_string());
}
#[test]
// C_RUST_DIFFERS: The C will return true (e.g. saying "yes, that's supported")
// but set the msg to NULL (??? seems maybe potentially bad). The Rust will
// simply return a None.
fn protover_all_supported_should_return_empty_string_for_weird_thing() {
let proto: UnvalidatedProtoEntry = "Fribble=".parse().unwrap();
let result: Option<UnvalidatedProtoEntry> = proto.all_supported();
assert!(result.is_none());
}
#[test]
fn protover_unvalidatedprotoentry_should_err_entirely_unparseable_things() {
let proto: Result<UnvalidatedProtoEntry, ProtoverError> = "Fribble".parse();
assert_eq!(Err(ProtoverError::Unparseable), proto);
}
#[test]
fn protover_all_supported_over_maximum_limit() {
let proto: Result<UnvalidatedProtoEntry, ProtoverError> = "Sleen=1-4294967295".parse();
assert_eq!(Err(ProtoverError::ExceedsMax), proto);
}

View File

@ -1,18 +0,0 @@
[package]
authors = ["The Tor Project"]
version = "0.0.1"
name = "smartlist"
[dependencies]
libc = "0.2.39"
[lib]
name = "smartlist"
path = "lib.rs"
[features]
# We have to define a feature here because doctests don't get cfg(test),
# and we need to disable some C dependencies when running the doctests
# because of the various linker issues. See
# https://github.com/rust-lang/rust/issues/45599
test_linking_hack = []

View File

@ -1,17 +0,0 @@
// Copyright (c) 2016-2019, The Tor Project, Inc. */
// See LICENSE for licensing information */
extern crate libc;
mod smartlist;
pub use smartlist::*;
// When testing we may be compiled with sanitizers which are incompatible with
// Rust's default allocator, jemalloc (unsure why at this time). Most crates
// link to `tor_allocate` which switches by default to a non-jemalloc allocator,
// but we don't already depend on `tor_allocate` so make sure that while testing
// we don't use jemalloc. (but rather malloc/free)
#[global_allocator]
#[cfg(test)]
static A: std::alloc::System = std::alloc::System;

View File

@ -1,115 +0,0 @@
// Copyright (c) 2016-2019, The Tor Project, Inc. */
// See LICENSE for licensing information */
use libc::{c_char, c_int};
use std::ffi::CStr;
use std::slice;
/// Smartlists are a type used in C code in tor to define a collection of a
/// generic type, which has a capacity and a number used. Each Smartlist
/// defines how to extract the list of values from the underlying C structure
///
/// Implementations are required to have a C representation, as this module
/// serves purely to translate smartlists as defined in tor to vectors in Rust.
pub trait Smartlist<T> {
fn get_list(&self) -> Vec<T>;
}
#[repr(C)]
pub struct Stringlist {
pub list: *const *const c_char,
pub num_used: c_int,
pub capacity: c_int,
}
impl Smartlist<String> for Stringlist {
fn get_list(&self) -> Vec<String> {
let empty: Vec<String> = Vec::new();
let mut rust_list: Vec<String> = Vec::new();
if self.list.is_null() || self.num_used == 0 {
return empty;
}
// unsafe, as we need to extract the smartlist list into a vector of
// pointers, and then transform each element into a Rust string.
let elems: &[*const c_char] =
unsafe { slice::from_raw_parts(self.list, self.num_used as usize) };
for elem in elems.iter() {
if elem.is_null() {
continue;
}
// unsafe, as we need to create a cstring from the referenced
// element
let c_string = unsafe { CStr::from_ptr(*elem) };
let r_string = match c_string.to_str() {
Ok(n) => n,
Err(_) => return empty,
};
rust_list.push(String::from(r_string));
}
rust_list
}
}
// TODO: CHK: this module maybe should be tested from a test in C with a
// smartlist as defined in tor.
#[cfg(test)]
mod test {
#[test]
fn test_get_list_of_strings() {
extern crate libc;
use libc::c_char;
use std::ffi::CString;
use super::Smartlist;
use super::Stringlist;
{
// test to verify that null pointers are gracefully handled
use std::ptr;
let sl = Stringlist {
list: ptr::null(),
num_used: 0,
capacity: 0,
};
let data = sl.get_list();
assert_eq!(0, data.len());
}
{
let args = vec![String::from("a"), String::from("b")];
// for each string, transform it into a CString
let c_strings: Vec<_> = args
.iter()
.map(|arg| CString::new(arg.as_str()).unwrap())
.collect();
// then, collect a pointer for each CString
let p_args: Vec<_> = c_strings.iter().map(|arg| arg.as_ptr()).collect();
let p: *const *const c_char = p_args.as_ptr();
// This is the representation that we expect when receiving a
// smartlist at the Rust/C FFI layer.
let sl = Stringlist {
list: p,
num_used: 2,
capacity: 2,
};
let data = sl.get_list();
assert_eq!("a", &data[0]);
assert_eq!("b", &data[1]);
}
}
}

View File

@ -1,18 +0,0 @@
[package]
authors = ["The Tor Project"]
version = "0.0.1"
name = "tor_allocate"
[dependencies]
libc = "=0.2.39"
[lib]
name = "tor_allocate"
path = "lib.rs"
[features]
# We have to define a feature here because doctests don't get cfg(test),
# and we need to disable some C dependencies when running the doctests
# because of the various linker issues. See
# https://github.com/rust-lang/rust/issues/45599
test_linking_hack = []

View File

@ -1,20 +0,0 @@
// Copyright (c) 2016-2019, The Tor Project, Inc. */
// See LICENSE for licensing information */
//! Allocation helper functions that allow data to be allocated in Rust
//! using tor's specified allocator. In doing so, this can be later freed
//! from C.
//!
//! This is currently a temporary solution, we will later use tor's allocator
//! by default for any allocation that occurs in Rust. However, as this will
//! stabalize in 2018, we can use this as a temporary measure.
extern crate libc;
use std::alloc::System;
mod tor_allocate;
pub use tor_allocate::*;
#[global_allocator]
static A: System = System;

View File

@ -1,104 +0,0 @@
// Copyright (c) 2016-2019, The Tor Project, Inc. */
// See LICENSE for licensing information */
// No-op defined purely for testing at the module level
use libc::c_char;
use libc::c_void;
#[cfg(not(feature = "testing"))]
use std::{mem, ptr, slice};
// Define a no-op implementation for testing Rust modules without linking to C
#[cfg(feature = "testing")]
pub fn allocate_and_copy_string(s: &str) -> *mut c_char {
use std::ffi::CString;
CString::new(s).unwrap().into_raw()
}
// Defined only for tests, used for testing purposes, so that we don't need
// to link to tor C files. Uses the system allocator
#[cfg(test)]
unsafe extern "C" fn tor_malloc_(size: usize) -> *mut c_void {
use libc::malloc;
malloc(size)
}
#[cfg(all(not(test), not(feature = "testing")))]
extern "C" {
fn tor_malloc_(size: usize) -> *mut c_void;
}
/// Allocate memory using tor_malloc_ and copy an existing string into the
/// allocated buffer, returning a pointer that can later be called in C.
///
/// # Inputs
///
/// * `src`, a reference to a String.
///
/// # Returns
///
/// A `*mut c_char` that should be freed by tor_free in C
///
#[cfg(not(feature = "testing"))]
pub fn allocate_and_copy_string(src: &str) -> *mut c_char {
let bytes: &[u8] = src.as_bytes();
let size = mem::size_of_val::<[u8]>(bytes);
let size_one_byte = mem::size_of::<u8>();
// handle integer overflow when adding one to the calculated length
let size_with_null_byte = match size.checked_add(size_one_byte) {
Some(n) => n,
None => return ptr::null_mut(),
};
let dest = unsafe { tor_malloc_(size_with_null_byte) as *mut u8 };
if dest.is_null() {
return ptr::null_mut();
}
unsafe { ptr::copy_nonoverlapping(bytes.as_ptr(), dest, size) };
// set the last byte as null, using the ability to index into a slice
// rather than doing pointer arithmetic
let slice = unsafe { slice::from_raw_parts_mut(dest, size_with_null_byte) };
slice[size] = 0; // add a null terminator
dest as *mut c_char
}
#[cfg(test)]
mod test {
#[test]
fn test_allocate_and_copy_string_with_empty() {
use libc::{c_void, free};
use std::ffi::CStr;
use tor_allocate::allocate_and_copy_string;
let allocated_empty = allocate_and_copy_string("");
let allocated_empty_rust = unsafe { CStr::from_ptr(allocated_empty).to_str().unwrap() };
assert_eq!("", allocated_empty_rust);
unsafe { free(allocated_empty as *mut c_void) };
}
#[test]
fn test_allocate_and_copy_string_with_not_empty_string() {
use libc::{c_void, free};
use std::ffi::CStr;
use tor_allocate::allocate_and_copy_string;
let allocated_empty = allocate_and_copy_string("foo bar biz");
let allocated_empty_rust = unsafe { CStr::from_ptr(allocated_empty).to_str().unwrap() };
assert_eq!("foo bar biz", allocated_empty_rust);
unsafe { free(allocated_empty as *mut c_void) };
}
}

View File

@ -1,21 +0,0 @@
[package]
name = "tor_log"
version = "0.1.0"
authors = ["The Tor Project"]
[lib]
name = "tor_log"
path = "lib.rs"
[features]
# We have to define a feature here because doctests don't get cfg(test),
# and we need to disable some C dependencies when running the doctests
# because of the various linker issues. See
# https://github.com/rust-lang/rust/issues/45599
test_linking_hack = []
[dependencies]
libc = "0.2.39"
[dependencies.tor_allocate]
path = "../tor_allocate"

View File

@ -1,16 +0,0 @@
//! Copyright (c) 2016-2019, The Tor Project, Inc. */
//! See LICENSE for licensing information */
//! Logging wrapper for Rust to utilize Tor's logger, found at
//! src/common/log.c and src/common/torlog.h
//!
//! Exposes different interfaces depending on whether we are running in test
//! or non-test mode. When testing, we use a no-op implementation,
//! otherwise we link directly to C.
extern crate libc;
extern crate tor_allocate;
mod tor_log;
pub use tor_log::*;

View File

@ -1,265 +0,0 @@
// Copyright (c) 2016-2019, The Tor Project, Inc. */
// See LICENSE for licensing information */
// Note that these functions are untested due to the fact that there are no
// return variables to test and they are calling into a C API.
/// The related domain which the logging message is relevant. For example,
/// log messages relevant to networking would use LogDomain::LdNet, whereas
/// general messages can use LdGeneral.
#[derive(Eq, PartialEq)]
pub enum LogDomain {
Net,
General,
}
/// The severity level at which to log messages.
#[derive(Eq, PartialEq)]
pub enum LogSeverity {
Notice,
Warn,
}
/// Main entry point for Rust modules to log messages.
///
/// # Inputs
///
/// * A `severity` of type LogSeverity, which defines the level of severity the
/// message will be logged.
/// * A `domain` of type LogDomain, which defines the domain the log message
/// will be associated with.
/// * A `function` of type &str, which defines the name of the function where
/// the message is being logged. There is a current RFC for a macro that
/// defines function names. When it is, we should use it. See
/// https://github.com/rust-lang/rfcs/pull/1719
/// * A `message` of type &str, which is the log message itself.
#[macro_export]
macro_rules! tor_log_msg {
($severity: path,
$domain: path,
$function: expr,
$($message:tt)*) =>
{
{
let msg = format!($($message)*);
$crate::tor_log_msg_impl($severity, $domain, $function, msg)
}
};
}
#[inline]
pub fn tor_log_msg_impl(severity: LogSeverity, domain: LogDomain, function: &str, message: String) {
use std::ffi::CString;
/// Default function name to log in case of errors when converting
/// a function name to a CString
const ERR_LOG_FUNCTION: &str = "tor_log_msg";
/// Default message to log in case of errors when converting a log
/// message to a CString
const ERR_LOG_MSG: &str = "Unable to log message from Rust \
module due to error when converting to CString";
let func = match CString::new(function) {
Ok(n) => n,
Err(_) => CString::new(ERR_LOG_FUNCTION).unwrap(),
};
let msg = match CString::new(message) {
Ok(n) => n,
Err(_) => CString::new(ERR_LOG_MSG).unwrap(),
};
// Bind to a local variable to preserve ownership. This is essential so
// that ownership is guaranteed until these local variables go out of scope
let func_ptr = func.as_ptr();
let msg_ptr = msg.as_ptr();
let c_severity = unsafe { log::translate_severity(severity) };
let c_domain = unsafe { log::translate_domain(domain) };
unsafe { log::tor_log_string(c_severity, c_domain, func_ptr, msg_ptr) }
}
/// This implementation is used when compiling for actual use, as opposed to
/// testing.
#[cfg(not(test))]
pub mod log {
use super::LogDomain;
use super::LogSeverity;
use libc::{c_char, c_int};
/// Severity log types. These mirror definitions in src/lib/log/log.h
/// C_RUST_COUPLED: src/lib/log/log.c, log domain types
extern "C" {
static LOG_WARN_: c_int;
static LOG_NOTICE_: c_int;
}
/// Domain log types. These mirror definitions in src/lib/log/log.h
/// C_RUST_COUPLED: src/lib/log/log.c, log severity types
extern "C" {
static LD_NET_: u64;
static LD_GENERAL_: u64;
}
/// Translate Rust definitions of log domain levels to C. This exposes a 1:1
/// mapping between types.
#[inline]
pub unsafe fn translate_domain(domain: LogDomain) -> u64 {
match domain {
LogDomain::Net => LD_NET_,
LogDomain::General => LD_GENERAL_,
}
}
/// Translate Rust definitions of log severity levels to C. This exposes a
/// 1:1 mapping between types.
#[inline]
pub unsafe fn translate_severity(severity: LogSeverity) -> c_int {
match severity {
LogSeverity::Warn => LOG_WARN_,
LogSeverity::Notice => LOG_NOTICE_,
}
}
/// The main entry point into Tor's logger. When in non-test mode, this
/// will link directly with `tor_log_string` in torlog.c
extern "C" {
pub fn tor_log_string(
severity: c_int,
domain: u64,
function: *const c_char,
string: *const c_char,
);
}
}
/// This module exposes no-op functionality for testing other Rust modules
/// without linking to C.
#[cfg(test)]
pub mod log {
use super::LogDomain;
use super::LogSeverity;
use libc::{c_char, c_int};
pub static mut LAST_LOGGED_FUNCTION: *mut String = 0 as *mut String;
pub static mut LAST_LOGGED_MESSAGE: *mut String = 0 as *mut String;
pub unsafe fn tor_log_string(
_severity: c_int,
_domain: u32,
function: *const c_char,
message: *const c_char,
) {
use std::ffi::CStr;
let f = CStr::from_ptr(function);
let fct = match f.to_str() {
Ok(n) => n,
Err(_) => "",
};
LAST_LOGGED_FUNCTION = Box::into_raw(Box::new(String::from(fct)));
let m = CStr::from_ptr(message);
let msg = match m.to_str() {
Ok(n) => n,
Err(_) => "",
};
LAST_LOGGED_MESSAGE = Box::into_raw(Box::new(String::from(msg)));
}
pub unsafe fn translate_domain(_domain: LogDomain) -> u32 {
1
}
pub unsafe fn translate_severity(_severity: LogSeverity) -> c_int {
1
}
}
#[cfg(test)]
mod test {
use tor_log::log::{LAST_LOGGED_FUNCTION, LAST_LOGGED_MESSAGE};
use tor_log::*;
#[test]
fn test_get_log_message() {
{
fn test_macro() {
tor_log_msg!(
LogSeverity::Warn,
LogDomain::Net,
"test_macro",
"test log message {}",
"a",
);
}
test_macro();
let function = unsafe { Box::from_raw(LAST_LOGGED_FUNCTION) };
assert_eq!("test_macro", *function);
let message = unsafe { Box::from_raw(LAST_LOGGED_MESSAGE) };
assert_eq!("test log message a", *message);
}
// test multiple inputs into the log message
{
fn test_macro() {
tor_log_msg!(
LogSeverity::Warn,
LogDomain::Net,
"next_test_macro",
"test log message {} {} {} {} {}",
1,
2,
3,
4,
5
);
}
test_macro();
let function = unsafe { Box::from_raw(LAST_LOGGED_FUNCTION) };
assert_eq!("next_test_macro", *function);
let message = unsafe { Box::from_raw(LAST_LOGGED_MESSAGE) };
assert_eq!("test log message 1 2 3 4 5", *message);
}
// test how a long log message will be formatted
{
fn test_macro() {
tor_log_msg!(
LogSeverity::Warn,
LogDomain::Net,
"test_macro",
"{}",
"All the world's a stage, and all the men and women \
merely players: they have their exits and their \
entrances; and one man in his time plays many parts, his \
acts being seven ages."
);
}
test_macro();
let expected_string = "All the world's a \
stage, and all the men \
and women merely players: \
they have their exits and \
their entrances; and one man \
in his time plays many parts, \
his acts being seven ages.";
let function = unsafe { Box::from_raw(LAST_LOGGED_FUNCTION) };
assert_eq!("test_macro", *function);
let message = unsafe { Box::from_raw(LAST_LOGGED_MESSAGE) };
assert_eq!(expected_string, *message);
}
}
}

View File

@ -1,22 +0,0 @@
[package]
authors = ["The Tor Project"]
name = "tor_rust"
version = "0.1.0"
[lib]
name = "tor_rust"
path = "lib.rs"
crate_type = ["staticlib"]
[dependencies.tor_util]
path = "../tor_util"
[dependencies.protover]
path = "../protover"
[features]
# We have to define a feature here because doctests don't get cfg(test),
# and we need to disable some C dependencies when running the doctests
# because of the various linker issues. See
# https://github.com/rust-lang/rust/issues/45599
test_linking_hack = []

View File

@ -1,28 +0,0 @@
EXTRA_DIST +=\
src/rust/tor_rust/Cargo.toml \
src/rust/tor_rust/lib.rs
EXTRA_CARGO_OPTIONS=
@TOR_RUST_LIB_PATH@: FORCE
( cd "$(abs_top_builddir)/src/rust" ; \
CARGO_TARGET_DIR="$(abs_top_builddir)/src/rust/target" \
$(CARGO) build --release $(EXTRA_CARGO_OPTIONS) \
$(CARGO_ONLINE) \
--manifest-path "$(abs_top_srcdir)/src/rust/tor_rust/Cargo.toml" )
distclean-rust:
( cd "$(abs_top_builddir)/src/rust" ; \
CARGO_TARGET_DIR="$(abs_top_builddir)/src/rust/target" \
$(CARGO) clean $(EXTRA_CARGO_OPTIONS) \
$(CARGO_ONLINE) \
--manifest-path "$(abs_top_srcdir)/src/rust/tor_rust/Cargo.toml" )
rm -rf "$(abs_top_builddir)/src/rust/registry"
if USE_RUST
build-rust: @TOR_RUST_LIB_PATH@
else
build-rust:
endif
FORCE:

View File

@ -1,5 +0,0 @@
extern crate protover;
extern crate tor_util;
pub use protover::*;
pub use tor_util::*;

View File

@ -1,24 +0,0 @@
[package]
authors = ["The Tor Project"]
name = "tor_util"
version = "0.0.1"
[lib]
name = "tor_util"
path = "lib.rs"
[dependencies.tor_allocate]
path = "../tor_allocate"
[dependencies.tor_log]
path = "../tor_log"
[dependencies]
libc = "=0.2.39"
[features]
# We have to define a feature here because doctests don't get cfg(test),
# and we need to disable some C dependencies when running the doctests
# because of the various linker issues. See
# https://github.com/rust-lang/rust/issues/45599
test_linking_hack = []

View File

@ -1,27 +0,0 @@
// Copyright (c) 2016-2019, The Tor Project, Inc. */
// See LICENSE for licensing information */
//! FFI functions to announce Rust support during tor startup, only to be
//! called from C.
//!
use tor_log::{LogDomain, LogSeverity};
/// Returns a short string to announce Rust support during startup.
///
/// # Examples
/// ```c
/// char *rust_str = rust_welcome_string();
/// printf("%s", rust_str);
/// tor_free(rust_str);
/// ```
#[no_mangle]
pub extern "C" fn rust_log_welcome_string() {
tor_log_msg!(
LogSeverity::Notice,
LogDomain::General,
"rust_log_welcome_string",
"Tor is running with Rust integration. Please report \
any bugs you encounter."
);
}

View File

@ -1,14 +0,0 @@
// Copyright (c) 2016-2019, The Tor Project, Inc. */
// See LICENSE for licensing information */
//! Small module to announce Rust support during startup for demonstration
//! purposes.
extern crate libc;
extern crate tor_allocate;
#[macro_use]
extern crate tor_log;
pub mod ffi;
pub mod strings;

View File

@ -1,136 +0,0 @@
// Copyright (c) 2016-2019, The Tor Project, Inc. */
// See LICENSE for licensing information */
//! Utilities for working with static strings.
/// Create a `CStr` from a literal byte slice, appending a NUL byte to it first.
///
/// # Warning
///
/// The literal byte slice which is taken as an argument *MUST NOT* have any NUL
/// bytes (`b"\0"`) in it, anywhere, or else an empty string will be returned
/// (`CStr::from_bytes_with_nul_unchecked(b"\0")`) so as to avoid `panic!()`ing.
///
/// # Examples
///
/// ```
/// #[macro_use]
/// extern crate tor_util;
///
/// use std::ffi::CStr;
///
/// # fn do_test() -> Result<&'static CStr, &'static str> {
/// let message: &'static str = "This is a test of the tsunami warning system.";
/// let tuesday: &'static CStr;
/// let original: &str;
///
/// tuesday = cstr!("This is a test of the tsunami warning system.");
/// original = tuesday.to_str().or(Err("Couldn't unwrap CStr!"))?;
///
/// assert!(original == message);
/// #
/// # Ok(tuesday)
/// # }
/// # fn main() {
/// # do_test(); // so that we can use the ? operator in the test
/// # }
/// ```
/// It is also possible to pass several string literals to this macro. They
/// will be concatenated together in the order of the arguments, unmodified,
/// before finally being suffixed with a NUL byte:
///
/// ```
/// #[macro_use]
/// extern crate tor_util;
/// #
/// # use std::ffi::CStr;
/// #
/// # fn do_test() -> Result<&'static CStr, &'static str> {
///
/// let quux: &'static CStr = cstr!("foo", "bar", "baz");
/// let orig: &'static str = quux.to_str().or(Err("Couldn't unwrap CStr!"))?;
///
/// assert!(orig == "foobarbaz");
/// # Ok(quux)
/// # }
/// # fn main() {
/// # do_test(); // so that we can use the ? operator in the test
/// # }
/// ```
/// This is useful for passing static strings to C from Rust FFI code. To do so
/// so, use the `.as_ptr()` method on the resulting `&'static CStr` to convert
/// it to the Rust equivalent of a C `const char*`:
///
/// ```
/// #[macro_use]
/// extern crate tor_util;
///
/// use std::ffi::CStr;
/// use std::os::raw::c_char;
///
/// pub extern "C" fn give_static_borrowed_string_to_c() -> *const c_char {
/// let hello: &'static CStr = cstr!("Hello, language my parents wrote.");
///
/// hello.as_ptr()
/// }
/// # fn main() {
/// # let greetings = give_static_borrowed_string_to_c();
/// # }
/// ```
/// Note that the C code this static borrowed string is passed to *MUST NOT*
/// attempt to free the memory for the string.
///
/// # Note
///
/// An unfortunate limitation of the rustc compiler (as of 1.25.0-nightly), is
/// that the first example above compiles, but if we were to change the
/// assignment of `tuesday` as follows, it will fail to compile, because Rust
/// macros are expanded at parse time, and at parse time there is no symbol
/// table available.
///
/// ```ignore
/// tuesday = cstr!(message);
/// ```
/// with the error message `error: expected a literal`.
///
/// # Returns
///
/// If the string literals passed as arguments contain no NUL bytes anywhere,
/// then an `&'static CStr` containing the (concatenated) bytes of the string
/// literal(s) passed as arguments, with a NUL byte appended, is returned.
/// Otherwise, an `&'static CStr` containing a single NUL byte is returned (an
/// "empty" string in C).
#[macro_export]
macro_rules! cstr {
($($bytes:expr),*) => (
::std::ffi::CStr::from_bytes_with_nul(
concat!($($bytes),*, "\0").as_bytes()
).unwrap_or_default()
)
}
#[cfg(test)]
mod test {
use std::ffi::CStr;
#[test]
fn cstr_macro() {
let _: &'static CStr = cstr!("boo");
}
#[test]
fn cstr_macro_multi_input() {
let quux: &'static CStr = cstr!("foo", "bar", "baz");
assert!(quux.to_str().unwrap() == "foobarbaz");
}
#[test]
fn cstr_macro_bad_input() {
let waving: &'static CStr = cstr!("waving not drowning o/");
let drowning: &'static CStr = cstr!("\0 drowning not waving");
assert!(waving.to_str().unwrap() == "waving not drowning o/");
assert!(drowning.to_str().unwrap() == "")
}
}

View File

@ -8,7 +8,6 @@ FUZZING_LDFLAG = \
@TOR_LDFLAGS_zlib@ $(TOR_LDFLAGS_CRYPTLIB) @TOR_LDFLAGS_libevent@
FUZZING_LIBS = \
src/test/libtor-testing.a \
$(rust_ldadd) \
@TOR_ZLIB_LIBS@ @TOR_LIB_MATH@ \
@TOR_LIBEVENT_LIBS@ $(TOR_LIBS_CRYPTLIB) \
@TOR_LIB_WS32@ @TOR_LIB_IPHLPAPI@ @TOR_LIB_SHLWAPI@ @TOR_LIB_GDI@ @TOR_LIB_USERENV@ @CURVE25519_LIBS@ \

View File

@ -8,11 +8,7 @@ TESTS_ENVIRONMENT = \
export abs_top_builddir="$(abs_top_builddir)"; \
export builddir="$(builddir)"; \
export TESTING_TOR_BINARY="$(TESTING_TOR_BINARY)"; \
export CARGO="$(CARGO)"; \
export EXTRA_CARGO_OPTIONS="$(EXTRA_CARGO_OPTIONS)"; \
export CARGO_ONLINE="$(CARGO_ONLINE)"; \
export CCLD="$(CCLD)"; \
export RUSTFLAGS="-C linker=`echo '$(CC)' | cut -d' ' -f 1` $(RUST_LINKER_OPTIONS)";
export CCLD="$(CCLD)";
TESTSCRIPTS = \
src/test/fuzz_static_testcases.sh \
@ -35,11 +31,6 @@ TESTSCRIPTS = \
src/test/unittest_part7.sh \
src/test/unittest_part8.sh
if USE_RUST
TESTSCRIPTS += \
src/test/test_rust.sh
endif
if USEPYTHON
TESTSCRIPTS += \
src/test/test_ntor.sh \
@ -306,7 +297,6 @@ src_test_test_switch_id_CFLAGS = $(AM_CFLAGS) $(TEST_CFLAGS)
src_test_test_switch_id_LDFLAGS = @TOR_LDFLAGS_zlib@
src_test_test_switch_id_LDADD = \
$(TOR_UTIL_TESTING_LIBS) \
$(rust_ldadd) \
@TOR_ZLIB_LIBS@ @TOR_LIB_MATH@ \
@TOR_LIB_WS32@ @TOR_LIB_IPHLPAPI@ @TOR_LIB_SHLWAPI@ @TOR_LIB_USERENV@ \
@TOR_LZMA_LIBS@ @TOR_ZSTD_LIBS@ @TOR_TRACE_LIBS@
@ -314,7 +304,6 @@ src_test_test_LDFLAGS = @TOR_LDFLAGS_zlib@ $(TOR_LDFLAGS_CRYPTLIB) \
@TOR_LDFLAGS_libevent@
src_test_test_LDADD = \
src/test/libtor-testing.a \
$(rust_ldadd) \
@TOR_ZLIB_LIBS@ @TOR_LIB_MATH@ @TOR_LIBEVENT_LIBS@ \
$(TOR_LIBS_CRYPTLIB) @TOR_LIB_WS32@ @TOR_LIB_IPHLPAPI@ @TOR_LIB_SHLWAPI@ @TOR_LIB_GDI@ @TOR_LIB_USERENV@ \
@CURVE25519_LIBS@ \
@ -343,7 +332,6 @@ src_test_bench_LDFLAGS = @TOR_LDFLAGS_zlib@ $(TOR_LDFLAGS_CRYPTLIB) \
@TOR_LDFLAGS_libevent@
src_test_bench_LDADD = \
libtor.a \
$(rust_ldadd) \
@TOR_ZLIB_LIBS@ @TOR_LIB_MATH@ @TOR_LIBEVENT_LIBS@ \
$(TOR_LIBS_CRYPTLIB) @TOR_LIB_WS32@ @TOR_LIB_IPHLPAPI@ @TOR_LIB_SHLWAPI@ @TOR_LIB_GDI@ @TOR_LIB_USERENV@ \
@CURVE25519_LIBS@ \
@ -353,7 +341,6 @@ src_test_test_workqueue_LDFLAGS = @TOR_LDFLAGS_zlib@ $(TOR_LDFLAGS_CRYPTLIB) \
@TOR_LDFLAGS_libevent@
src_test_test_workqueue_LDADD = \
src/test/libtor-testing.a \
$(rust_ldadd) \
@TOR_ZLIB_LIBS@ @TOR_LIB_MATH@ @TOR_LIBEVENT_LIBS@ \
$(TOR_LIBS_CRYPTLIB) @TOR_LIB_WS32@ @TOR_LIB_IPHLPAPI@ @TOR_LIB_SHLWAPI@ @TOR_LIB_GDI@ @TOR_LIB_USERENV@ \
@CURVE25519_LIBS@ \
@ -365,7 +352,6 @@ src_test_test_timers_LDADD = \
src/lib/libtor-evloop-testing.a \
$(TOR_CRYPTO_TESTING_LIBS) \
$(TOR_UTIL_TESTING_LIBS) \
$(rust_ldadd) \
@TOR_ZLIB_LIBS@ @TOR_LIB_MATH@ @TOR_LIBEVENT_LIBS@ \
$(TOR_LIBS_CRYPTLIB) @TOR_LIB_WS32@ @TOR_LIB_IPHLPAPI@ @TOR_LIB_SHLWAPI@ @TOR_LIB_GDI@ @TOR_LIB_USERENV@ \
@CURVE25519_LIBS@ \
@ -401,7 +387,6 @@ src_test_test_ntor_cl_SOURCES = src/test/test_ntor_cl.c
src_test_test_ntor_cl_LDFLAGS = @TOR_LDFLAGS_zlib@ $(TOR_LDFLAGS_CRYPTLIB)
src_test_test_ntor_cl_LDADD = \
libtor.a \
$(rust_ldadd) \
@TOR_ZLIB_LIBS@ @TOR_LIB_MATH@ \
$(TOR_LIBS_CRYPTLIB) @TOR_LIB_WS32@ @TOR_LIB_IPHLPAPI@ @TOR_LIB_SHLWAPI@ @TOR_LIB_GDI@ @TOR_LIB_USERENV@ \
@CURVE25519_LIBS@ @TOR_LZMA_LIBS@ @TOR_TRACE_LIBS@
@ -424,7 +409,6 @@ noinst_PROGRAMS += src/test/test-bt-cl
src_test_test_bt_cl_SOURCES = src/test/test_bt_cl.c
src_test_test_bt_cl_LDADD = \
$(TOR_UTIL_TESTING_LIBS) \
$(rust_ldadd) \
@TOR_LIB_MATH@ \
@TOR_LIB_WS32@ @TOR_LIB_IPHLPAPI@ @TOR_LIB_SHLWAPI@ @TOR_LIB_GDI@ @TOR_LIB_USERENV@ \
@TOR_TRACE_LIBS@
@ -446,13 +430,11 @@ EXTRA_DIST += \
src/test/test_include.py \
src/test/zero_length_keys.sh \
scripts/maint/run_check_subsystem_order.sh \
src/test/rust_supp.txt \
src/test/test_keygen.sh \
src/test/test_key_expiration.sh \
src/test/test_zero_length_keys.sh \
src/test/test_ntor.sh src/test/test_hs_ntor.sh src/test/test_bt.sh \
src/test/test-network.sh \
src/test/test_rust.sh \
src/test/test_switch_id.sh \
src/test/test_workqueue_cancel.sh \
src/test/test_workqueue_efd.sh \
@ -470,6 +452,3 @@ EXTRA_DIST += \
src/test/unittest_part6.sh \
src/test/unittest_part7.sh \
src/test/unittest_part8.sh
test-rust:
$(TESTS_ENVIRONMENT) "$(abs_top_srcdir)/src/test/test_rust.sh"

View File

@ -23,13 +23,6 @@ static void
test_protover_parse(void *arg)
{
(void) arg;
#ifdef HAVE_RUST
/** This test is disabled on rust builds, because it only exists to test
* internal C functions. */
tt_skip();
done:
;
#else /* !defined(HAVE_RUST) */
char *re_encoded = NULL;
const char *orig = "Foo=1,3 Bar=3 Baz= Quux=9-12,14,15-16";
@ -64,18 +57,12 @@ test_protover_parse(void *arg)
SMARTLIST_FOREACH(elts, proto_entry_t *, ent, proto_entry_free(ent));
smartlist_free(elts);
tor_free(re_encoded);
#endif /* defined(HAVE_RUST) */
}
static void
test_protover_parse_fail(void *arg)
{
(void)arg;
#ifdef HAVE_RUST
/** This test is disabled on rust builds, because it only exists to test
* internal C functions. */
tt_skip();
#else
smartlist_t *elts;
/* random junk */
@ -108,7 +95,6 @@ test_protover_parse_fail(void *arg)
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
tt_ptr_op(elts, OP_EQ, NULL);
#endif /* defined(HAVE_RUST) */
done:
;
}
@ -265,7 +251,7 @@ test_protover_all_supported(void *arg)
#endif /* !defined(ALL_BUGS_ARE_FATAL) */
/* Protocol name too long */
#if !defined(HAVE_RUST) && !defined(ALL_BUGS_ARE_FATAL)
#if !defined(ALL_BUGS_ARE_FATAL)
tor_capture_bugs_(1);
tt_assert(protover_all_supported(
"DoSaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
@ -273,7 +259,7 @@ test_protover_all_supported(void *arg)
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
"aaaaaaaaaaaa=1-65536", &msg));
tor_end_capture_bugs_();
#endif /* !defined(HAVE_RUST) && !defined(ALL_BUGS_ARE_FATAL) */
#endif /* !defined(ALL_BUGS_ARE_FATAL) */
done:
tor_end_capture_bugs_();

View File

@ -1,28 +0,0 @@
#!/bin/sh
# Test all Rust crates
set -e
export LSAN_OPTIONS=suppressions=${abs_top_srcdir:-../../..}/src/test/rust_supp.txt
# When testing Cargo we pass a number of very specific linker flags down
# through Cargo. We do not, however, want these flags to affect things like
# build scripts, only the tests that we're compiling. To ensure this happens
# we unconditionally pass `--target` into Cargo, ensuring that `RUSTFLAGS` in
# the environment won't make their way into build scripts.
rustc_host=$(rustc -vV | grep host | sed 's/host: //')
for cargo_toml_dir in "${abs_top_srcdir:-../../..}"/src/rust/*; do
if [ -e "${cargo_toml_dir}/Cargo.toml" ]; then
# shellcheck disable=SC2086
cd "${abs_top_builddir:-../../..}/src/rust" && \
CARGO_TARGET_DIR="${abs_top_builddir:-../../..}/src/rust/target" \
"${CARGO:-cargo}" test "${CARGO_ONLINE-'--frozen'}" \
--features "test_linking_hack" \
--target "$rustc_host" \
${EXTRA_CARGO_OPTIONS} \
--manifest-path "${cargo_toml_dir}/Cargo.toml" || exitcode=1
fi
done
exit $exitcode

View File

@ -10,7 +10,6 @@ src_tools_tor_resolve_LDADD = \
src/trunnel/libor-trunnel.a \
$(TOR_UTIL_LIBS) \
$(TOR_CRYPTO_LIBS) $(TOR_LIBS_CRYPTLIB)\
$(rust_ldadd) \
@TOR_LIB_MATH@ @TOR_LIB_WS32@ @TOR_LIB_IPHLPAPI@ @TOR_LIB_SHLWAPI@ @TOR_LIB_USERENV@
if COVERAGE_ENABLED
@ -34,7 +33,6 @@ src_tools_tor_gencert_LDFLAGS = @TOR_LDFLAGS_zlib@ $(TOR_LDFLAGS_CRYPTLIB)
src_tools_tor_gencert_LDADD = \
$(TOR_CRYPTO_LIBS) \
$(TOR_UTIL_LIBS) \
$(rust_ldadd) \
@TOR_LIB_MATH@ @TOR_ZLIB_LIBS@ $(TOR_LIBS_CRYPTLIB) \
@TOR_LIB_WS32@ @TOR_LIB_IPHLPAPI@ @TOR_LIB_SHLWAPI@ @TOR_LIB_GDI@ @TOR_LIB_USERENV@ @CURVE25519_LIBS@
endif