Compare commits
25 Commits
v20250625.
...
main
Author | SHA1 | Date |
---|---|---|
![]() |
ae2d98750c | |
![]() |
7d41c1219b | |
![]() |
65e1f1b3a9 | |
![]() |
437b823c84 | |
![]() |
c9f21d5970 | |
![]() |
80c11e7eda | |
![]() |
0745cabc87 | |
![]() |
3e80669f4e | |
![]() |
b81cd9ec61 | |
![]() |
da6ed94443 | |
![]() |
96d41b3716 | |
![]() |
7dddc4d759 | |
![]() |
a87690d817 | |
![]() |
18ef3da261 | |
![]() |
f4698dd5b2 | |
![]() |
d4322a2ed4 | |
![]() |
7260a9d5b4 | |
![]() |
12b4ceb4aa | |
![]() |
fa1cd5c263 | |
![]() |
f8da13912d | |
![]() |
a3b3bf86ba | |
![]() |
a99f7bb87d | |
![]() |
d6f14d02dd | |
![]() |
d18671eaf9 | |
![]() |
87c30d00e8 |
|
@ -12,6 +12,13 @@ jobs:
|
|||
container: ubuntu:22.04
|
||||
timeout-minutes: 20
|
||||
|
||||
services:
|
||||
foundationdb:
|
||||
# Note: this should generally match the version of the FoundationDB SERVER deployed in production; it's okay if
|
||||
# it's a little behind the CLIENT version.
|
||||
image: foundationdb/foundationdb:7.3.62
|
||||
options: --name foundationdb
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- name: Set up JDK 21
|
||||
|
@ -26,6 +33,28 @@ jobs:
|
|||
HOME: /root
|
||||
- name: Install APT packages
|
||||
# ca-certificates: required for AWS CRT client
|
||||
run: apt update && apt install -y ca-certificates
|
||||
run: |
|
||||
# Add Docker's official GPG key:
|
||||
apt update
|
||||
apt install -y ca-certificates curl
|
||||
install -m 0755 -d /etc/apt/keyrings
|
||||
curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc
|
||||
chmod a+r /etc/apt/keyrings/docker.asc
|
||||
|
||||
# Add Docker repository to apt sources:
|
||||
echo \
|
||||
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \
|
||||
$(. /etc/os-release && echo "${UBUNTU_CODENAME:-$VERSION_CODENAME}") stable" | \
|
||||
tee /etc/apt/sources.list.d/docker.list > /dev/null
|
||||
|
||||
# ca-certificates: required for AWS CRT client
|
||||
apt update && apt install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin ca-certificates
|
||||
- name: Configure FoundationDB database
|
||||
run: docker exec foundationdb /usr/bin/fdbcli --exec 'configure new single memory'
|
||||
- name: Download and install FoundationDB client
|
||||
run: |
|
||||
./mvnw -e -B -Pexclude-spam-filter clean prepare-package -DskipTests=true
|
||||
cp service/target/jib-extra/usr/lib/libfdb_c.x86_64.so /usr/lib/libfdb_c.x86_64.so
|
||||
ldconfig
|
||||
- name: Build with Maven
|
||||
run: ./mvnw -e -B verify
|
||||
run: ./mvnw -e -B clean verify -DfoundationDb.serviceContainerName=foundationdb
|
||||
|
|
|
@ -11,6 +11,8 @@ https://signal.org/docs/
|
|||
How to Build
|
||||
------------
|
||||
|
||||
This project uses [FoundationDB](https://www.foundationdb.org/) and requires the FoundationDB client library to be installed on the host system. With that in place, the server can be built and tested with:
|
||||
|
||||
```shell script
|
||||
$ ./mvnw clean test
|
||||
```
|
||||
|
|
56
pom.xml
56
pom.xml
|
@ -37,47 +37,54 @@
|
|||
</modules>
|
||||
|
||||
<properties>
|
||||
<aws.sdk2.version>2.31.9</aws.sdk2.version>
|
||||
<braintree.version>3.40.0</braintree.version>
|
||||
<aws.sdk2.version>2.31.70</aws.sdk2.version>
|
||||
<braintree.version>3.42.0</braintree.version>
|
||||
<commons-csv.version>1.14.0</commons-csv.version>
|
||||
<commons-io.version>2.18.0</commons-io.version>
|
||||
<commons-io.version>2.19.0</commons-io.version>
|
||||
<dropwizard.version>4.0.12</dropwizard.version>
|
||||
<dropwizard-metrics-datadog.version>1.1.14</dropwizard-metrics-datadog.version>
|
||||
<!-- can be updated to latest version with Dropwizard 5 (Jetty 12); will then need to disable telemetry -->
|
||||
<dynamodblocal.version>2.2.1</dynamodblocal.version>
|
||||
<google-cloud-libraries.version>26.57.0</google-cloud-libraries.version>
|
||||
<grpc.version>1.70.0</grpc.version> <!-- should be kept in sync with the value from Google libraries-bom -->
|
||||
<gson.version>2.12.1</gson.version>
|
||||
<!-- Note: when updating FoundationDB, also include a copy of `libfdb_c.so` from the FoundationDB release at
|
||||
src/main/jib/usr/lib/libfdb_c.so. We use x86_64 builds without AVX instructions enabled (i.e. FoundationDB versions
|
||||
with even-numbered patch versions). Also when updating FoundationDB, make sure to update the version of FoundationDB
|
||||
used by GitHub Actions. -->
|
||||
<foundationdb.version>7.3.62</foundationdb.version>
|
||||
<foundationdb.api-version>730</foundationdb.api-version>
|
||||
<foundationdb.client-library-sha256>bfed237b787fae3cde1222676e6bfbb0d218fc27bf9e903397a7a7aa96fb2d33</foundationdb.client-library-sha256>
|
||||
<google-cloud-libraries.version>26.62.0</google-cloud-libraries.version>
|
||||
<grpc.version>1.73.0</grpc.version> <!-- should be kept in sync with the value from Google libraries-bom -->
|
||||
<gson.version>2.13.1</gson.version>
|
||||
<!-- several libraries (AWS, Google Cloud) use Apache http components transitively, and we need to align them -->
|
||||
<httpcore.version>4.4.16</httpcore.version>
|
||||
<httpclient.version>4.5.14</httpclient.version>
|
||||
<jackson.version>2.18.3</jackson.version>
|
||||
<jackson.version>2.19.1</jackson.version>
|
||||
<junit-pioneer.version>2.3.0</junit-pioneer.version>
|
||||
<jsr305.version>3.0.2</jsr305.version>
|
||||
<kotlin.version>2.1.20</kotlin.version>
|
||||
<kotlin.version>2.2.0</kotlin.version>
|
||||
<!-- Logback 1.5.14+ has a null pointer bug: https://github.com/qos-ch/logback/issues/929. -->
|
||||
<logback.version>1.5.13</logback.version>
|
||||
<logback-access.version>2.0.5</logback-access.version>
|
||||
<lettuce.version>6.5.5.RELEASE</lettuce.version>
|
||||
<libphonenumber.version>9.0.2</libphonenumber.version>
|
||||
<logstash.logback.version>7.3</logstash.logback.version>
|
||||
<log4j-bom.version>2.24.3</log4j-bom.version>
|
||||
<logback-access-common.version>2.0.5</logback-access-common.version>
|
||||
<lettuce.version>6.7.1.RELEASE</lettuce.version>
|
||||
<libphonenumber.version>9.0.8</libphonenumber.version>
|
||||
<logstash.logback.version>8.1</logstash.logback.version>
|
||||
<log4j-bom.version>2.25.0</log4j-bom.version>
|
||||
<luajava.version>3.5.0</luajava.version>
|
||||
<micrometer.version>1.14.5</micrometer.version>
|
||||
<netty.version>4.1.119.Final</netty.version>
|
||||
<micrometer.version>1.15.1</micrometer.version>
|
||||
<netty.version>4.1.122.Final</netty.version>
|
||||
<!-- Must be less than or equal to the value from Google libraries-bom which controls the protobuf runtime version.
|
||||
See https://protobuf.dev/support/cross-version-runtime-guarantee/. -->
|
||||
<protoc.version>4.29.4</protoc.version>
|
||||
<pushy.version>0.15.4</pushy.version>
|
||||
<reactive.grpc.version>1.2.4</reactive.grpc.version>
|
||||
<reactor-bom.version>2024.0.4</reactor-bom.version> <!-- 3.7.4, see https://github.com/reactor/reactor#bom-versioning-scheme -->
|
||||
<reactor-bom.version>2024.0.7</reactor-bom.version> <!-- 3.7.4, see https://github.com/reactor/reactor#bom-versioning-scheme -->
|
||||
<resilience4j.version>2.3.0</resilience4j.version>
|
||||
<semver4j.version>3.1.0</semver4j.version>
|
||||
<simple-grpc.version>0.1.0</simple-grpc.version>
|
||||
<slf4j.version>2.0.17</slf4j.version>
|
||||
<stripe.version>23.10.0</stripe.version>
|
||||
<swagger.version>2.2.27</swagger.version>
|
||||
<testcontainers.version>1.21.1</testcontainers.version>
|
||||
<swagger.version>2.2.31</swagger.version>
|
||||
<testcontainers.version>1.21.2</testcontainers.version>
|
||||
|
||||
<!-- image to use in tests that run localstack via docker. -->
|
||||
<localstack.image>localstack/localstack:3.5.0</localstack.image>
|
||||
|
@ -214,6 +221,11 @@
|
|||
<artifactId>dropwizard-metrics-datadog</artifactId>
|
||||
<version>${dropwizard-metrics-datadog.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.foundationdb</groupId>
|
||||
<artifactId>fdb-java</artifactId>
|
||||
<version>${foundationdb.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
|
@ -313,7 +325,7 @@
|
|||
<dependency>
|
||||
<groupId>ch.qos.logback.access</groupId>
|
||||
<artifactId>logback-access-common</artifactId>
|
||||
<version>${logback-access.version}</version>
|
||||
<version>${logback-access-common.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.testcontainers</groupId>
|
||||
|
@ -322,6 +334,12 @@
|
|||
<type>pom</type>
|
||||
<scope>import</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>earth.adi</groupId>
|
||||
<artifactId>testcontainers-foundationdb</artifactId>
|
||||
<version>1.1.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</dependencyManagement>
|
||||
|
||||
|
|
|
@ -16,6 +16,9 @@ directoryV2.client.userIdTokenSharedSecret: bbcdefghijklmnopqrstuvwxyz0123456789
|
|||
svr2.userAuthenticationTokenSharedSecret: abcdefghijklmnopqrstuvwxyz0123456789ABCDEFG= # base64-encoded secret shared with SVR2 to generate auth tokens for Signal users
|
||||
svr2.userIdTokenSharedSecret: bbcdefghijklmnopqrstuvwxyz0123456789ABCDEFG= # base64-encoded secret shared with SVR2 to generate auth identity tokens for Signal users
|
||||
|
||||
svrb.userAuthenticationTokenSharedSecret: abcdefghijklmnopqrstuvwxyz0123456789ABCDEFG= # base64-encoded secret shared with SVRB to generate auth tokens for Signal users
|
||||
svrb.userIdTokenSharedSecret: bbcdefghijklmnopqrstuvwxyz0123456789ABCDEFG= # base64-encoded secret shared with SVRB to generate auth identity tokens for Signal users
|
||||
|
||||
tus.userAuthenticationTokenSharedSecret: abcdefghijklmnopqrstuvwxyz0123456789ABCDEFG=
|
||||
|
||||
gcpAttachments.rsaSigningKey: |
|
||||
|
|
|
@ -225,6 +225,34 @@ svr2:
|
|||
AAAAAAAAAAAAAAAAAAAA
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
svrb:
|
||||
uri: svrb.example.com
|
||||
userAuthenticationTokenSharedSecret: secret://svrb.userAuthenticationTokenSharedSecret
|
||||
userIdTokenSharedSecret: secret://svrb.userIdTokenSharedSecret
|
||||
svrCaCertificates:
|
||||
- |
|
||||
-----BEGIN CERTIFICATE-----
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ/0123456789+abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ/0123456789+abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ/0123456789+abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ/0123456789+abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ/0123456789+abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ/0123456789+abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ/0123456789+abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ/0123456789+abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ/0123456789+abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ/0123456789+abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ/0123456789+abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ/0123456789+abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ/0123456789+abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ/0123456789+abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ/0123456789+abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ/0123456789+abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ/0123456789+abcdefghijklmnopqrstuvwxyz
|
||||
ABCDEFGHIJKLMNOPQRSTUVWXYZ/0123456789+abcdefghijklmnopqrstuvwxyz
|
||||
AAAAAAAAAAAAAAAAAAAA
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
messageCache: # Redis server configuration for message store cache
|
||||
persistDelayMinutes: 1
|
||||
cluster:
|
||||
|
|
|
@ -51,6 +51,13 @@
|
|||
<groupId>io.swagger.core.v3</groupId>
|
||||
<artifactId>swagger-jaxrs2-jakarta</artifactId>
|
||||
<version>${swagger.version}</version>
|
||||
<exclusions>
|
||||
<!-- conflicts with jackson-dataformat-yaml -->
|
||||
<exclusion>
|
||||
<groupId>org.yaml</groupId>
|
||||
<artifactId>snakeyaml</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>jakarta.servlet</groupId>
|
||||
|
@ -351,6 +358,11 @@
|
|||
<artifactId>reactor-grpc-stub</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.foundationdb</groupId>
|
||||
<artifactId>fdb-java</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>software.amazon.awssdk</groupId>
|
||||
<artifactId>apache-client</artifactId>
|
||||
|
@ -497,6 +509,12 @@
|
|||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>earth.adi</groupId>
|
||||
<artifactId>testcontainers-foundationdb</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.auth</groupId>
|
||||
<artifactId>google-auth-library-oauth2-http</artifactId>
|
||||
|
@ -532,6 +550,28 @@
|
|||
<id>exclude-spam-filter</id>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>io.github.download-maven-plugin</groupId>
|
||||
<artifactId>download-maven-plugin</artifactId>
|
||||
<version>2.0.0</version>
|
||||
|
||||
<executions>
|
||||
<execution>
|
||||
<id>install-foundationdb-client-library</id>
|
||||
<phase>prepare-package</phase>
|
||||
<goals>
|
||||
<goal>wget</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
|
||||
<configuration>
|
||||
<url>https://github.com/apple/foundationdb/releases/download/${foundationdb.version}/libfdb_c.x86_64.so</url>
|
||||
<outputDirectory>${project.build.directory}/jib-extra/usr/lib</outputDirectory>
|
||||
<sha256>${foundationdb.client-library-sha256}</sha256>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-shade-plugin</artifactId>
|
||||
|
@ -653,6 +693,9 @@
|
|||
<includes>*.yml</includes>
|
||||
<into>/usr/share/signal/</into>
|
||||
</path>
|
||||
<path>
|
||||
<from>${project.build.directory}/jib-extra</from>
|
||||
</path>
|
||||
</paths>
|
||||
</extraDirectories>
|
||||
</configuration>
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
/*
|
||||
* Copyright 2025 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
package org.whispersystems.textsecuregcm.storage;
|
||||
|
||||
public class FoundationDbVersion {
|
||||
|
||||
private static final String VERSION = "${foundationdb.version}";
|
||||
private static final int API_VERSION = ${foundationdb.api-version};
|
||||
|
||||
public static String getFoundationDbVersion() {
|
||||
return VERSION;
|
||||
}
|
||||
|
||||
public static int getFoundationDbApiVersion() {
|
||||
return API_VERSION;
|
||||
}
|
||||
}
|
|
@ -52,7 +52,7 @@ import org.whispersystems.textsecuregcm.configuration.RemoteConfigConfiguration;
|
|||
import org.whispersystems.textsecuregcm.configuration.ReportMessageConfiguration;
|
||||
import org.whispersystems.textsecuregcm.configuration.S3ObjectMonitorFactory;
|
||||
import org.whispersystems.textsecuregcm.configuration.SecureStorageServiceConfiguration;
|
||||
import org.whispersystems.textsecuregcm.configuration.SecureValueRecovery2Configuration;
|
||||
import org.whispersystems.textsecuregcm.configuration.SecureValueRecoveryConfiguration;
|
||||
import org.whispersystems.textsecuregcm.configuration.ShortCodeExpanderConfiguration;
|
||||
import org.whispersystems.textsecuregcm.configuration.SpamFilterConfiguration;
|
||||
import org.whispersystems.textsecuregcm.configuration.StripeConfiguration;
|
||||
|
@ -156,7 +156,12 @@ public class WhisperServerConfiguration extends Configuration {
|
|||
@NotNull
|
||||
@Valid
|
||||
@JsonProperty
|
||||
private SecureValueRecovery2Configuration svr2;
|
||||
private SecureValueRecoveryConfiguration svr2;
|
||||
|
||||
@NotNull
|
||||
@Valid
|
||||
@JsonProperty
|
||||
private SecureValueRecoveryConfiguration svrb;
|
||||
|
||||
@NotNull
|
||||
@Valid
|
||||
|
@ -389,10 +394,14 @@ public class WhisperServerConfiguration extends Configuration {
|
|||
return pubsub;
|
||||
}
|
||||
|
||||
public SecureValueRecovery2Configuration getSvr2Configuration() {
|
||||
public SecureValueRecoveryConfiguration getSvr2Configuration() {
|
||||
return svr2;
|
||||
}
|
||||
|
||||
public SecureValueRecoveryConfiguration getSvrbConfiguration() {
|
||||
return svrb;
|
||||
}
|
||||
|
||||
public DirectoryV2Configuration getDirectoryV2Configuration() {
|
||||
return directoryV2;
|
||||
}
|
||||
|
|
|
@ -124,6 +124,7 @@ import org.whispersystems.textsecuregcm.controllers.RegistrationController;
|
|||
import org.whispersystems.textsecuregcm.controllers.RemoteConfigController;
|
||||
import org.whispersystems.textsecuregcm.controllers.SecureStorageController;
|
||||
import org.whispersystems.textsecuregcm.controllers.SecureValueRecovery2Controller;
|
||||
import org.whispersystems.textsecuregcm.controllers.SecureValueRecoveryBController;
|
||||
import org.whispersystems.textsecuregcm.controllers.StickerController;
|
||||
import org.whispersystems.textsecuregcm.controllers.SubscriptionController;
|
||||
import org.whispersystems.textsecuregcm.controllers.VerificationController;
|
||||
|
@ -279,6 +280,7 @@ import org.whispersystems.textsecuregcm.workers.RemoveExpiredAccountsCommand;
|
|||
import org.whispersystems.textsecuregcm.workers.RemoveExpiredBackupsCommand;
|
||||
import org.whispersystems.textsecuregcm.workers.RemoveExpiredLinkedDevicesCommand;
|
||||
import org.whispersystems.textsecuregcm.workers.RemoveExpiredUsernameHoldsCommand;
|
||||
import org.whispersystems.textsecuregcm.workers.RemoveOrphanedPreKeyPagesCommand;
|
||||
import org.whispersystems.textsecuregcm.workers.ScheduledApnPushNotificationSenderServiceCommand;
|
||||
import org.whispersystems.textsecuregcm.workers.ServerVersionCommand;
|
||||
import org.whispersystems.textsecuregcm.workers.SetRequestLoggingEnabledTask;
|
||||
|
@ -332,6 +334,7 @@ public class WhisperServerService extends Application<WhisperServerConfiguration
|
|||
bootstrap.addCommand(new RemoveExpiredAccountsCommand(Clock.systemUTC()));
|
||||
bootstrap.addCommand(new RemoveExpiredUsernameHoldsCommand(Clock.systemUTC()));
|
||||
bootstrap.addCommand(new RemoveExpiredBackupsCommand(Clock.systemUTC()));
|
||||
bootstrap.addCommand(new RemoveOrphanedPreKeyPagesCommand(Clock.systemUTC()));
|
||||
bootstrap.addCommand(new BackupMetricsCommand(Clock.systemUTC()));
|
||||
bootstrap.addCommand(new BackupUsageRecalculationCommand());
|
||||
bootstrap.addCommand(new RemoveExpiredLinkedDevicesCommand());
|
||||
|
@ -368,6 +371,8 @@ public class WhisperServerService extends Application<WhisperServerConfiguration
|
|||
|
||||
MetricsUtil.configureRegistries(config, environment, dynamicConfigurationManager);
|
||||
|
||||
ExperimentEnrollmentManager experimentEnrollmentManager = new ExperimentEnrollmentManager(dynamicConfigurationManager);
|
||||
|
||||
if (config.getServerFactory() instanceof DefaultServerFactory defaultServerFactory) {
|
||||
defaultServerFactory.getApplicationConnectors()
|
||||
.forEach(connectorFactory -> {
|
||||
|
@ -444,7 +449,8 @@ public class WhisperServerService extends Application<WhisperServerConfiguration
|
|||
config.getDynamoDbTables().getPagedKemKeys().getTableName(),
|
||||
config.getPagedSingleUseKEMPreKeyStore().bucket()),
|
||||
new RepeatedUseECSignedPreKeyStore(dynamoDbAsyncClient, config.getDynamoDbTables().getEcSignedPreKeys().getTableName()),
|
||||
new RepeatedUseKEMSignedPreKeyStore(dynamoDbAsyncClient, config.getDynamoDbTables().getKemLastResortKeys().getTableName()));
|
||||
new RepeatedUseKEMSignedPreKeyStore(dynamoDbAsyncClient, config.getDynamoDbTables().getKemLastResortKeys().getTableName()),
|
||||
experimentEnrollmentManager);
|
||||
MessagesDynamoDb messagesDynamoDb = new MessagesDynamoDb(dynamoDbClient, dynamoDbAsyncClient,
|
||||
config.getDynamoDbTables().getMessages().getTableName(),
|
||||
config.getDynamoDbTables().getMessages().getExpiration(),
|
||||
|
@ -603,9 +609,9 @@ public class WhisperServerService extends Application<WhisperServerConfiguration
|
|||
config.getPaymentsServiceConfiguration());
|
||||
ExternalServiceCredentialsGenerator svr2CredentialsGenerator = SecureValueRecovery2Controller.credentialsGenerator(
|
||||
config.getSvr2Configuration());
|
||||
ExternalServiceCredentialsGenerator svrbCredentialsGenerator = SecureValueRecoveryBController.credentialsGenerator(
|
||||
config.getSvrbConfiguration());
|
||||
|
||||
ExperimentEnrollmentManager experimentEnrollmentManager = new ExperimentEnrollmentManager(
|
||||
dynamicConfigurationManager);
|
||||
RegistrationRecoveryPasswordsManager registrationRecoveryPasswordsManager =
|
||||
new RegistrationRecoveryPasswordsManager(registrationRecoveryPasswords);
|
||||
UsernameHashZkProofVerifier usernameHashZkProofVerifier = new UsernameHashZkProofVerifier();
|
||||
|
@ -1118,6 +1124,7 @@ public class WhisperServerService extends Application<WhisperServerConfiguration
|
|||
new RemoteConfigController(remoteConfigsManager, config.getRemoteConfigConfiguration().globalConfig(), clock),
|
||||
new SecureStorageController(storageCredentialsGenerator),
|
||||
new SecureValueRecovery2Controller(svr2CredentialsGenerator, accountsManager),
|
||||
new SecureValueRecoveryBController(svrbCredentialsGenerator),
|
||||
new StickerController(rateLimiters, config.getCdnConfiguration().credentials().accessKeyId().value(),
|
||||
config.getCdnConfiguration().credentials().secretAccessKey().value(), config.getCdnConfiguration().region(),
|
||||
config.getCdnConfiguration().bucket()),
|
||||
|
|
|
@ -297,7 +297,7 @@ public class BackupsDb {
|
|||
.tags(tags)
|
||||
.publishPercentileHistogram()
|
||||
.register(Metrics.globalRegistry)
|
||||
.record(mediaCount);
|
||||
.record(bytesUsed);
|
||||
|
||||
// Report that the backup is out of quota if it cannot store a max size media object
|
||||
final boolean quotaExhausted = bytesUsed >=
|
||||
|
|
|
@ -1,34 +0,0 @@
|
|||
/*
|
||||
* Copyright 2024 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
package org.whispersystems.textsecuregcm.calls.routing;
|
||||
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import java.net.InetAddress;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public record CallDnsRecords(
|
||||
@NotNull
|
||||
Map<String, List<InetAddress>> aByRegion,
|
||||
@NotNull
|
||||
Map<String, List<InetAddress>> aaaaByRegion
|
||||
) {
|
||||
public String getSummary() {
|
||||
int numARecords = aByRegion.values().stream().mapToInt(List::size).sum();
|
||||
int numAAAARecords = aaaaByRegion.values().stream().mapToInt(List::size).sum();
|
||||
return String.format(
|
||||
"(A records, %s regions, %s records), (AAAA records, %s regions, %s records)",
|
||||
aByRegion.size(),
|
||||
numARecords,
|
||||
aaaaByRegion.size(),
|
||||
numAAAARecords
|
||||
);
|
||||
}
|
||||
|
||||
public static CallDnsRecords empty() {
|
||||
return new CallDnsRecords(Map.of(), Map.of());
|
||||
}
|
||||
}
|
|
@ -1,80 +0,0 @@
|
|||
/*
|
||||
* Copyright 2023 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
package org.whispersystems.textsecuregcm.calls.routing;
|
||||
|
||||
import com.fasterxml.jackson.core.StreamReadFeature;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.json.JsonMapper;
|
||||
import io.dropwizard.lifecycle.Managed;
|
||||
import io.micrometer.core.instrument.Metrics;
|
||||
import io.micrometer.core.instrument.Timer;
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.function.Supplier;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.whispersystems.textsecuregcm.configuration.S3ObjectMonitorFactory;
|
||||
import org.whispersystems.textsecuregcm.metrics.MetricsUtil;
|
||||
import org.whispersystems.textsecuregcm.s3.S3ObjectMonitor;
|
||||
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
|
||||
|
||||
public class CallDnsRecordsManager implements Supplier<CallDnsRecords>, Managed {
|
||||
|
||||
private final S3ObjectMonitor objectMonitor;
|
||||
|
||||
private final AtomicReference<CallDnsRecords> callDnsRecords = new AtomicReference<>();
|
||||
|
||||
private final Timer refreshTimer;
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(CallDnsRecordsManager.class);
|
||||
|
||||
private static final ObjectMapper objectMapper = JsonMapper.builder()
|
||||
.enable(StreamReadFeature.INCLUDE_SOURCE_IN_LOCATION)
|
||||
.build();
|
||||
|
||||
public CallDnsRecordsManager(final ScheduledExecutorService executorService,
|
||||
final AwsCredentialsProvider awsCredentialsProvider, final S3ObjectMonitorFactory configuration) {
|
||||
|
||||
this.objectMonitor = configuration.build(awsCredentialsProvider, executorService);
|
||||
this.callDnsRecords.set(CallDnsRecords.empty());
|
||||
this.refreshTimer = Metrics.timer(MetricsUtil.name(CallDnsRecordsManager.class, "refresh"));
|
||||
}
|
||||
|
||||
private void handleDatabaseChanged(final InputStream inputStream) {
|
||||
refreshTimer.record(() -> {
|
||||
try (final InputStream bufferedInputStream = new BufferedInputStream(inputStream)) {
|
||||
final CallDnsRecords newRecords = parseRecords(bufferedInputStream);
|
||||
final CallDnsRecords oldRecords = callDnsRecords.getAndSet(newRecords);
|
||||
log.info("Replaced dns records, old summary=[{}], new summary=[{}]", oldRecords != null ? oldRecords.getSummary() : "null", newRecords);
|
||||
} catch (final IOException e) {
|
||||
log.error("Failed to load Call DNS Records");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
static CallDnsRecords parseRecords(InputStream inputStream) throws IOException {
|
||||
return objectMapper.readValue(inputStream, CallDnsRecords.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void start() throws Exception {
|
||||
objectMonitor.start(this::handleDatabaseChanged);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() throws Exception {
|
||||
objectMonitor.stop();
|
||||
callDnsRecords.getAndSet(null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CallDnsRecords get() {
|
||||
return this.callDnsRecords.get();
|
||||
}
|
||||
}
|
|
@ -1,193 +0,0 @@
|
|||
/*
|
||||
* Copyright 2024 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
package org.whispersystems.textsecuregcm.calls.routing;
|
||||
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import java.math.BigInteger;
|
||||
import java.net.Inet4Address;
|
||||
import java.net.Inet6Address;
|
||||
import java.net.InetAddress;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.TreeMap;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
public class CallRoutingTable {
|
||||
private final TreeMap<Integer, Map<Integer, List<String>>> ipv4Map;
|
||||
private final TreeMap<Integer, Map<BigInteger, List<String>>> ipv6Map;
|
||||
private final Map<GeoKey, List<String>> geoToDatacenter;
|
||||
|
||||
public CallRoutingTable(
|
||||
Map<CidrBlock.IpV4CidrBlock, List<String>> ipv4SubnetToDatacenter,
|
||||
Map<CidrBlock.IpV6CidrBlock, List<String>> ipv6SubnetToDatacenter,
|
||||
Map<GeoKey, List<String>> geoToDatacenter
|
||||
) {
|
||||
this.ipv4Map = new TreeMap<>();
|
||||
for (Map.Entry<CidrBlock.IpV4CidrBlock, List<String>> t : ipv4SubnetToDatacenter.entrySet()) {
|
||||
if (!this.ipv4Map.containsKey(t.getKey().cidrBlockSize())) {
|
||||
this.ipv4Map.put(t.getKey().cidrBlockSize(), new HashMap<>());
|
||||
}
|
||||
this.ipv4Map
|
||||
.get(t.getKey().cidrBlockSize())
|
||||
.put(t.getKey().subnet(), t.getValue());
|
||||
}
|
||||
|
||||
this.ipv6Map = new TreeMap<>();
|
||||
for (Map.Entry<CidrBlock.IpV6CidrBlock, List<String>> t : ipv6SubnetToDatacenter.entrySet()) {
|
||||
if (!this.ipv6Map.containsKey(t.getKey().cidrBlockSize())) {
|
||||
this.ipv6Map.put(t.getKey().cidrBlockSize(), new HashMap<>());
|
||||
}
|
||||
this.ipv6Map
|
||||
.get(t.getKey().cidrBlockSize())
|
||||
.put(t.getKey().subnet(), t.getValue());
|
||||
}
|
||||
|
||||
this.geoToDatacenter = geoToDatacenter;
|
||||
}
|
||||
|
||||
public static CallRoutingTable empty() {
|
||||
return new CallRoutingTable(Map.of(), Map.of(), Map.of());
|
||||
}
|
||||
|
||||
public enum Protocol {
|
||||
v4,
|
||||
v6
|
||||
}
|
||||
|
||||
public record GeoKey(
|
||||
@NotBlank String continent,
|
||||
@NotBlank String country,
|
||||
@NotNull Optional<String> subdivision,
|
||||
@NotBlank Protocol protocol
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Returns ordered list of fastest datacenters based on IP & Geo info. Prioritize the results based on subnet.
|
||||
* Returns at most three, 2 by subnet and 1 by geo. Takes more from either bucket to hit 3.
|
||||
*/
|
||||
public List<String> getDatacentersFor(
|
||||
InetAddress address,
|
||||
String continent,
|
||||
String country,
|
||||
Optional<String> subdivision
|
||||
) {
|
||||
final int NUM_DATACENTERS = 3;
|
||||
|
||||
if(this.isEmpty()) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
List<String> dcsBySubnet = getDatacentersBySubnet(address);
|
||||
List<String> dcsByGeo = getDatacentersByGeo(continent, country, subdivision).stream()
|
||||
.limit(NUM_DATACENTERS)
|
||||
.filter(dc ->
|
||||
(dcsBySubnet.isEmpty() || !dc.equals(dcsBySubnet.getFirst()))
|
||||
&& (dcsBySubnet.size() < 2 || !dc.equals(dcsBySubnet.get(1)))
|
||||
).toList();
|
||||
|
||||
return Stream.concat(
|
||||
dcsBySubnet.stream().limit(dcsByGeo.isEmpty() ? NUM_DATACENTERS : NUM_DATACENTERS - 1),
|
||||
dcsByGeo.stream())
|
||||
.limit(NUM_DATACENTERS)
|
||||
.toList();
|
||||
}
|
||||
|
||||
public boolean isEmpty() {
|
||||
return this.ipv4Map.isEmpty() && this.ipv6Map.isEmpty() && this.geoToDatacenter.isEmpty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns ordered list of fastest datacenters based on ip info. Prioritizes V4 connections.
|
||||
*/
|
||||
public List<String> getDatacentersBySubnet(InetAddress address) throws IllegalArgumentException {
|
||||
if(address instanceof Inet4Address) {
|
||||
for(Map.Entry<Integer, Map<Integer, List<String>>> t: this.ipv4Map.descendingMap().entrySet()) {
|
||||
int maskedIp = CidrBlock.IpV4CidrBlock.maskToSize((Inet4Address) address, t.getKey());
|
||||
if(t.getValue().containsKey(maskedIp)) {
|
||||
return t.getValue().get(maskedIp);
|
||||
}
|
||||
}
|
||||
} else if (address instanceof Inet6Address) {
|
||||
for(Map.Entry<Integer, Map<BigInteger, List<String>>> t: this.ipv6Map.descendingMap().entrySet()) {
|
||||
BigInteger maskedIp = CidrBlock.IpV6CidrBlock.maskToSize((Inet6Address) address, t.getKey());
|
||||
if(t.getValue().containsKey(maskedIp)) {
|
||||
return t.getValue().get(maskedIp);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("Expected either an Inet4Address or Inet6Address");
|
||||
}
|
||||
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns ordered list of fastest datacenters based on geo info. Attempts to match based on subdivision, falls back
|
||||
* to country based lookup. Does not attempt to look for nearby subdivisions. Prioritizes V4 connections.
|
||||
*/
|
||||
public List<String> getDatacentersByGeo(
|
||||
String continent,
|
||||
String country,
|
||||
Optional<String> subdivision
|
||||
) {
|
||||
GeoKey v4Key = new GeoKey(continent, country, subdivision, Protocol.v4);
|
||||
List<String> v4Options = this.geoToDatacenter.getOrDefault(v4Key, Collections.emptyList());
|
||||
List<String> v4OptionsBackup = v4Options.isEmpty() && subdivision.isPresent() ?
|
||||
this.geoToDatacenter.getOrDefault(
|
||||
new GeoKey(continent, country, Optional.empty(), Protocol.v4),
|
||||
Collections.emptyList())
|
||||
: Collections.emptyList();
|
||||
|
||||
GeoKey v6Key = new GeoKey(continent, country, subdivision, Protocol.v6);
|
||||
List<String> v6Options = this.geoToDatacenter.getOrDefault(v6Key, Collections.emptyList());
|
||||
List<String> v6OptionsBackup = v6Options.isEmpty() && subdivision.isPresent() ?
|
||||
this.geoToDatacenter.getOrDefault(
|
||||
new GeoKey(continent, country, Optional.empty(), Protocol.v6),
|
||||
Collections.emptyList())
|
||||
: Collections.emptyList();
|
||||
|
||||
return Stream.of(
|
||||
v4Options.stream(),
|
||||
v6Options.stream(),
|
||||
v4OptionsBackup.stream(),
|
||||
v6OptionsBackup.stream()
|
||||
)
|
||||
.flatMap(Function.identity())
|
||||
.distinct()
|
||||
.toList();
|
||||
}
|
||||
|
||||
public String toSummaryString() {
|
||||
return String.format(
|
||||
"[Ipv4Table=%s rows, Ipv6Table=%s rows, GeoTable=%s rows]",
|
||||
ipv4Map.size(),
|
||||
ipv6Map.size(),
|
||||
geoToDatacenter.size()
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object o) {
|
||||
if (this == o)
|
||||
return true;
|
||||
if (o == null || getClass() != o.getClass())
|
||||
return false;
|
||||
CallRoutingTable that = (CallRoutingTable) o;
|
||||
return Objects.equals(ipv4Map, that.ipv4Map) && Objects.equals(ipv6Map, that.ipv6Map) && Objects.equals(
|
||||
geoToDatacenter, that.geoToDatacenter);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(ipv4Map, ipv6Map, geoToDatacenter);
|
||||
}
|
||||
}
|
|
@ -1,73 +0,0 @@
|
|||
/*
|
||||
* Copyright 2023 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
package org.whispersystems.textsecuregcm.calls.routing;
|
||||
|
||||
import io.dropwizard.lifecycle.Managed;
|
||||
import io.micrometer.core.instrument.Metrics;
|
||||
import io.micrometer.core.instrument.Timer;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.function.Supplier;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.whispersystems.textsecuregcm.configuration.S3ObjectMonitorFactory;
|
||||
import org.whispersystems.textsecuregcm.metrics.MetricsUtil;
|
||||
import org.whispersystems.textsecuregcm.s3.S3ObjectMonitor;
|
||||
import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
|
||||
|
||||
public class CallRoutingTableManager implements Supplier<CallRoutingTable>, Managed {
|
||||
|
||||
private final S3ObjectMonitor objectMonitor;
|
||||
|
||||
private final AtomicReference<CallRoutingTable> routingTable = new AtomicReference<>();
|
||||
|
||||
private final String tableTag;
|
||||
|
||||
private final Timer refreshTimer;
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(CallRoutingTableManager.class);
|
||||
|
||||
public CallRoutingTableManager(final ScheduledExecutorService executorService,
|
||||
final AwsCredentialsProvider awsCredentialsProvider, final S3ObjectMonitorFactory configuration,
|
||||
final String tableTag) {
|
||||
|
||||
this.objectMonitor = configuration.build(awsCredentialsProvider, executorService);
|
||||
this.tableTag = tableTag;
|
||||
this.routingTable.set(CallRoutingTable.empty());
|
||||
this.refreshTimer = Metrics.timer(MetricsUtil.name(CallRoutingTableManager.class, tableTag));
|
||||
}
|
||||
|
||||
private void handleDatabaseChanged(final InputStream inputStream) {
|
||||
refreshTimer.record(() -> {
|
||||
try(InputStreamReader reader = new InputStreamReader(inputStream)) {
|
||||
CallRoutingTable newTable = CallRoutingTableParser.fromJson(reader);
|
||||
this.routingTable.set(newTable);
|
||||
log.info("Replaced {} call routing table: {}", tableTag, newTable.toSummaryString());
|
||||
} catch (final IOException e) {
|
||||
log.error("Failed to parse and update {} call routing table", tableTag);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void start() throws Exception {
|
||||
objectMonitor.start(this::handleDatabaseChanged);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() throws Exception {
|
||||
objectMonitor.stop();
|
||||
routingTable.getAndSet(null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CallRoutingTable get() {
|
||||
return this.routingTable.get();
|
||||
}
|
||||
}
|
|
@ -1,185 +0,0 @@
|
|||
/*
|
||||
* Copyright 2024 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
package org.whispersystems.textsecuregcm.calls.routing;
|
||||
|
||||
import com.fasterxml.jackson.core.StreamReadFeature;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.json.JsonMapper;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
|
||||
final class CallRoutingTableParser {
|
||||
|
||||
private final static int IPV4_DEFAULT_BLOCK_SIZE = 24;
|
||||
private final static int IPV6_DEFAULT_BLOCK_SIZE = 48;
|
||||
private static final ObjectMapper objectMapper = JsonMapper.builder()
|
||||
.enable(StreamReadFeature.INCLUDE_SOURCE_IN_LOCATION)
|
||||
.build();
|
||||
|
||||
/** Used for parsing JSON */
|
||||
private static class RawCallRoutingTable {
|
||||
public Map<String, List<String>> ipv4GeoToDataCenters = Map.of();
|
||||
public Map<String, List<String>> ipv6GeoToDataCenters = Map.of();
|
||||
public Map<String, List<String>> ipv4SubnetsToDatacenters = Map.of();
|
||||
public Map<String, List<String>> ipv6SubnetsToDatacenters = Map.of();
|
||||
}
|
||||
|
||||
private final static String WHITESPACE_REGEX = "\\s+";
|
||||
|
||||
public static CallRoutingTable fromJson(final Reader inputReader) throws IOException {
|
||||
try (final BufferedReader reader = new BufferedReader(inputReader)) {
|
||||
RawCallRoutingTable rawTable = objectMapper.readValue(reader, RawCallRoutingTable.class);
|
||||
|
||||
Map<CidrBlock.IpV4CidrBlock, List<String>> ipv4SubnetToDatacenter = rawTable.ipv4SubnetsToDatacenters
|
||||
.entrySet()
|
||||
.stream()
|
||||
.collect(Collectors.toUnmodifiableMap(
|
||||
e -> (CidrBlock.IpV4CidrBlock) CidrBlock.parseCidrBlock(e.getKey(), IPV4_DEFAULT_BLOCK_SIZE),
|
||||
Map.Entry::getValue
|
||||
));
|
||||
|
||||
Map<CidrBlock.IpV6CidrBlock, List<String>> ipv6SubnetToDatacenter = rawTable.ipv6SubnetsToDatacenters
|
||||
.entrySet()
|
||||
.stream()
|
||||
.collect(Collectors.toUnmodifiableMap(
|
||||
e -> (CidrBlock.IpV6CidrBlock) CidrBlock.parseCidrBlock(e.getKey(), IPV6_DEFAULT_BLOCK_SIZE),
|
||||
Map.Entry::getValue
|
||||
));
|
||||
|
||||
Map<CallRoutingTable.GeoKey, List<String>> geoToDatacenter = Stream.concat(
|
||||
rawTable.ipv4GeoToDataCenters
|
||||
.entrySet()
|
||||
.stream()
|
||||
.map(e -> Map.entry(parseRawGeoKey(e.getKey(), CallRoutingTable.Protocol.v4), e.getValue())),
|
||||
rawTable.ipv6GeoToDataCenters
|
||||
.entrySet()
|
||||
.stream()
|
||||
.map(e -> Map.entry(parseRawGeoKey(e.getKey(), CallRoutingTable.Protocol.v6), e.getValue()))
|
||||
).collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||
|
||||
return new CallRoutingTable(
|
||||
ipv4SubnetToDatacenter,
|
||||
ipv6SubnetToDatacenter,
|
||||
geoToDatacenter
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private static CallRoutingTable.GeoKey parseRawGeoKey(String rawKey, CallRoutingTable.Protocol protocol) {
|
||||
String[] splits = rawKey.split("-");
|
||||
if (splits.length < 2 || splits.length > 3) {
|
||||
throw new IllegalArgumentException("Invalid raw key");
|
||||
}
|
||||
|
||||
Optional<String> subdivision = splits.length < 3 ? Optional.empty() : Optional.of(splits[2]);
|
||||
return new CallRoutingTable.GeoKey(splits[0], splits[1], subdivision, protocol);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a call routing table in TSV format. Example below - see tests for more examples:
|
||||
192.0.2.0/24 northamerica-northeast1
|
||||
198.51.100.0/24 us-south1
|
||||
203.0.113.0/24 asia-southeast1
|
||||
|
||||
2001:db8:b0a9::/48 us-east4
|
||||
2001:db8:b0f5::/48 us-central1 northamerica-northeast1 us-east4
|
||||
2001:db8:9406::/48 us-east1 us-central1
|
||||
|
||||
SA-SR-v4 us-east1 us-east4
|
||||
SA-SR-v6 us-east1 us-south1
|
||||
SA-UY-v4 southamerica-west1 southamerica-east1 europe-west3
|
||||
SA-UY-v6 southamerica-west1 europe-west4
|
||||
SA-VE-v4 us-east1 us-east4 us-south1
|
||||
SA-VE-v6 us-east1 northamerica-northeast1 us-east4
|
||||
ZZ-ZZ-v4 asia-south1 europe-southwest1 australia-southeast1
|
||||
*/
|
||||
public static CallRoutingTable fromTsv(final Reader inputReader) throws IOException {
|
||||
try (final BufferedReader reader = new BufferedReader(inputReader)) {
|
||||
// use maps to silently dedupe CidrBlocks
|
||||
Map<CidrBlock.IpV4CidrBlock, List<String>> ipv4Map = new HashMap<>();
|
||||
Map<CidrBlock.IpV6CidrBlock, List<String>> ipv6Map = new HashMap<>();
|
||||
Map<CallRoutingTable.GeoKey, List<String>> ipGeoTable = new HashMap<>();
|
||||
String line;
|
||||
while((line = reader.readLine()) != null) {
|
||||
if(line.isBlank()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
List<String> splits = Arrays.stream(line.split(WHITESPACE_REGEX)).filter(s -> !s.isBlank()).toList();
|
||||
if (splits.size() < 2) {
|
||||
throw new IllegalStateException("Invalid row, expected some key and list of values");
|
||||
}
|
||||
|
||||
List<String> datacenters = splits.subList(1, splits.size());
|
||||
switch (guessLineType(splits)) {
|
||||
case v4 -> {
|
||||
CidrBlock cidrBlock = CidrBlock.parseCidrBlock(splits.getFirst());
|
||||
if(!(cidrBlock instanceof CidrBlock.IpV4CidrBlock)) {
|
||||
throw new IllegalArgumentException("Expected an ipv4 cidr block");
|
||||
}
|
||||
ipv4Map.put((CidrBlock.IpV4CidrBlock) cidrBlock, datacenters);
|
||||
}
|
||||
case v6 -> {
|
||||
CidrBlock cidrBlock = CidrBlock.parseCidrBlock(splits.getFirst());
|
||||
if(!(cidrBlock instanceof CidrBlock.IpV6CidrBlock)) {
|
||||
throw new IllegalArgumentException("Expected an ipv6 cidr block");
|
||||
}
|
||||
ipv6Map.put((CidrBlock.IpV6CidrBlock) cidrBlock, datacenters);
|
||||
}
|
||||
case Geo -> {
|
||||
String[] geo = splits.getFirst().split("-");
|
||||
if(geo.length < 3) {
|
||||
throw new IllegalStateException("Geo row key invalid, expected atleast continent, country, and protocol");
|
||||
}
|
||||
String continent = geo[0];
|
||||
String country = geo[1];
|
||||
Optional<String> subdivision = geo.length > 3 ? Optional.of(geo[2]) : Optional.empty();
|
||||
CallRoutingTable.Protocol protocol = CallRoutingTable.Protocol.valueOf(geo[geo.length - 1].toLowerCase());
|
||||
CallRoutingTable.GeoKey tableKey = new CallRoutingTable.GeoKey(
|
||||
continent,
|
||||
country,
|
||||
subdivision,
|
||||
protocol
|
||||
);
|
||||
ipGeoTable.put(tableKey, datacenters);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new CallRoutingTable(
|
||||
ipv4Map,
|
||||
ipv6Map,
|
||||
ipGeoTable
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private static LineType guessLineType(List<String> splits) {
|
||||
String first = splits.getFirst();
|
||||
if (first.contains("-")) {
|
||||
return LineType.Geo;
|
||||
} else if(first.contains(":")) {
|
||||
return LineType.v6;
|
||||
} else if (first.contains(".")) {
|
||||
return LineType.v4;
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException(String.format("Invalid line, could not determine type from '%s'", first));
|
||||
}
|
||||
|
||||
private enum LineType {
|
||||
v4, v6, Geo
|
||||
}
|
||||
}
|
|
@ -1,137 +0,0 @@
|
|||
/*
|
||||
* Copyright 2024 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
package org.whispersystems.textsecuregcm.calls.routing;
|
||||
|
||||
import java.math.BigInteger;
|
||||
import java.net.Inet4Address;
|
||||
import java.net.Inet6Address;
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
/**
|
||||
* Can be used to check if an IP is in the CIDR block
|
||||
*/
|
||||
public interface CidrBlock {
|
||||
|
||||
boolean ipInBlock(InetAddress address);
|
||||
|
||||
static CidrBlock parseCidrBlock(String cidrBlock, int defaultBlockSize) {
|
||||
String[] splits = cidrBlock.split("/");
|
||||
if(splits.length > 2) {
|
||||
throw new IllegalArgumentException("Invalid cidr block format, expected {address}/{blocksize}");
|
||||
}
|
||||
|
||||
try {
|
||||
int blockSize = splits.length == 2 ? Integer.parseInt(splits[1]) : defaultBlockSize;
|
||||
return parseCidrBlockInner(splits[0], blockSize);
|
||||
} catch (NumberFormatException e) {
|
||||
throw new IllegalArgumentException(String.format("Invalid block size specified: '%s'", splits[1]));
|
||||
}
|
||||
}
|
||||
|
||||
static CidrBlock parseCidrBlock(String cidrBlock) {
|
||||
String[] splits = cidrBlock.split("/");
|
||||
if (splits.length != 2) {
|
||||
throw new IllegalArgumentException("Invalid cidr block format, expected {address}/{blocksize}");
|
||||
}
|
||||
|
||||
try {
|
||||
int blockSize = Integer.parseInt(splits[1]);
|
||||
return parseCidrBlockInner(splits[0], blockSize);
|
||||
} catch (NumberFormatException e) {
|
||||
throw new IllegalArgumentException(String.format("Invalid block size specified: '%s'", splits[1]));
|
||||
}
|
||||
}
|
||||
|
||||
private static CidrBlock parseCidrBlockInner(String rawAddress, int blockSize) {
|
||||
try {
|
||||
InetAddress address = InetAddress.getByName(rawAddress);
|
||||
if(address instanceof Inet4Address) {
|
||||
return IpV4CidrBlock.of((Inet4Address) address, blockSize);
|
||||
} else if (address instanceof Inet6Address) {
|
||||
return IpV6CidrBlock.of((Inet6Address) address, blockSize);
|
||||
} else {
|
||||
throw new IllegalArgumentException("Must be an ipv4 or ipv6 string");
|
||||
}
|
||||
} catch (UnknownHostException e) {
|
||||
throw new IllegalArgumentException(e);
|
||||
}
|
||||
}
|
||||
|
||||
record IpV4CidrBlock(int subnet, int subnetMask, int cidrBlockSize) implements CidrBlock {
|
||||
public static IpV4CidrBlock of(Inet4Address subnet, int cidrBlockSize) {
|
||||
if(cidrBlockSize > 32 || cidrBlockSize < 0) {
|
||||
throw new IllegalArgumentException("Invalid cidrBlockSize");
|
||||
}
|
||||
|
||||
int subnetMask = mask(cidrBlockSize);
|
||||
int maskedIp = ipToInt(subnet) & subnetMask;
|
||||
return new IpV4CidrBlock(maskedIp, subnetMask, cidrBlockSize);
|
||||
}
|
||||
|
||||
public boolean ipInBlock(InetAddress address) {
|
||||
if(!(address instanceof Inet4Address)) {
|
||||
return false;
|
||||
}
|
||||
int ip = ipToInt((Inet4Address) address);
|
||||
return (ip & subnetMask) == subnet;
|
||||
}
|
||||
|
||||
private static int ipToInt(Inet4Address address) {
|
||||
byte[] octets = address.getAddress();
|
||||
return (octets[0] & 0xff) << 24 |
|
||||
(octets[1] & 0xff) << 16 |
|
||||
(octets[2] & 0xff) << 8 |
|
||||
octets[3] & 0xff;
|
||||
}
|
||||
|
||||
private static int mask(int cidrBlockSize) {
|
||||
return (int) (-1L << (32 - cidrBlockSize));
|
||||
}
|
||||
|
||||
public static int maskToSize(Inet4Address address, int cidrBlockSize) {
|
||||
return ipToInt(address) & mask(cidrBlockSize);
|
||||
}
|
||||
}
|
||||
|
||||
record IpV6CidrBlock(BigInteger subnet, BigInteger subnetMask, int cidrBlockSize) implements CidrBlock {
|
||||
|
||||
private static final BigInteger MINUS_ONE = BigInteger.valueOf(-1);
|
||||
|
||||
public static IpV6CidrBlock of(Inet6Address subnet, int cidrBlockSize) {
|
||||
if(cidrBlockSize > 128 || cidrBlockSize < 0) {
|
||||
throw new IllegalArgumentException("Invalid cidrBlockSize");
|
||||
}
|
||||
|
||||
BigInteger subnetMask = mask(cidrBlockSize);
|
||||
BigInteger maskedIp = ipToInt(subnet).and(subnetMask);
|
||||
return new IpV6CidrBlock(maskedIp, subnetMask, cidrBlockSize);
|
||||
}
|
||||
|
||||
public boolean ipInBlock(InetAddress address) {
|
||||
if(!(address instanceof Inet6Address)) {
|
||||
return false;
|
||||
}
|
||||
BigInteger ip = ipToInt((Inet6Address) address);
|
||||
return ip.and(subnetMask).equals(subnet);
|
||||
}
|
||||
|
||||
private static BigInteger ipToInt(Inet6Address ipAddress) {
|
||||
byte[] octets = ipAddress.getAddress();
|
||||
assert octets.length == 16;
|
||||
|
||||
return new BigInteger(octets);
|
||||
}
|
||||
|
||||
private static BigInteger mask(int cidrBlockSize) {
|
||||
return MINUS_ONE.shiftLeft(128 - cidrBlockSize);
|
||||
}
|
||||
|
||||
public static BigInteger maskToSize(Inet6Address address, int cidrBlockSize) {
|
||||
return ipToInt(address).and(mask(cidrBlockSize));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
/*
|
||||
* Copyright 2024 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
package org.whispersystems.textsecuregcm.calls.routing;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
public record TurnServerOptions(
|
||||
String hostname,
|
||||
Optional<List<String>> urlsWithIps,
|
||||
Optional<List<String>> urlsWithHostname
|
||||
) {
|
||||
}
|
|
@ -12,7 +12,7 @@ import java.util.List;
|
|||
import org.whispersystems.textsecuregcm.configuration.secrets.SecretBytes;
|
||||
import org.whispersystems.textsecuregcm.util.ExactlySize;
|
||||
|
||||
public record SecureValueRecovery2Configuration(
|
||||
public record SecureValueRecoveryConfiguration(
|
||||
@NotBlank String uri,
|
||||
@ExactlySize(32) SecretBytes userAuthenticationTokenSharedSecret,
|
||||
@ExactlySize(32) SecretBytes userIdTokenSharedSecret,
|
||||
|
@ -20,7 +20,7 @@ public record SecureValueRecovery2Configuration(
|
|||
@NotNull @Valid CircuitBreakerConfiguration circuitBreaker,
|
||||
@NotNull @Valid RetryConfiguration retry) {
|
||||
|
||||
public SecureValueRecovery2Configuration {
|
||||
public SecureValueRecoveryConfiguration {
|
||||
if (circuitBreaker == null) {
|
||||
circuitBreaker = new CircuitBreakerConfiguration();
|
||||
}
|
|
@ -8,6 +8,7 @@ package org.whispersystems.textsecuregcm.controllers;
|
|||
import com.google.common.annotations.VisibleForTesting;
|
||||
import io.dropwizard.auth.Auth;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import jakarta.validation.Valid;
|
||||
|
@ -27,7 +28,7 @@ import org.whispersystems.textsecuregcm.auth.AuthenticatedDevice;
|
|||
import org.whispersystems.textsecuregcm.auth.ExternalServiceCredentials;
|
||||
import org.whispersystems.textsecuregcm.auth.ExternalServiceCredentialsGenerator;
|
||||
import org.whispersystems.textsecuregcm.auth.ExternalServiceCredentialsSelector;
|
||||
import org.whispersystems.textsecuregcm.configuration.SecureValueRecovery2Configuration;
|
||||
import org.whispersystems.textsecuregcm.configuration.SecureValueRecoveryConfiguration;
|
||||
import org.whispersystems.textsecuregcm.entities.AuthCheckRequest;
|
||||
import org.whispersystems.textsecuregcm.entities.AuthCheckResponseV2;
|
||||
import org.whispersystems.textsecuregcm.limits.RateLimitedByIp;
|
||||
|
@ -35,18 +36,19 @@ import org.whispersystems.textsecuregcm.limits.RateLimiters;
|
|||
import org.whispersystems.textsecuregcm.storage.Account;
|
||||
import org.whispersystems.textsecuregcm.storage.AccountsManager;
|
||||
|
||||
@Path("/v2/backup")
|
||||
@Path("/v2/{name: backup|svr}")
|
||||
@Tag(name = "Secure Value Recovery")
|
||||
@Schema(description = "Note: /v2/backup is deprecated. Use /v2/svr instead.")
|
||||
public class SecureValueRecovery2Controller {
|
||||
|
||||
private static final long MAX_AGE_SECONDS = TimeUnit.DAYS.toSeconds(30);
|
||||
|
||||
public static ExternalServiceCredentialsGenerator credentialsGenerator(final SecureValueRecovery2Configuration cfg) {
|
||||
public static ExternalServiceCredentialsGenerator credentialsGenerator(final SecureValueRecoveryConfiguration cfg) {
|
||||
return credentialsGenerator(cfg, Clock.systemUTC());
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public static ExternalServiceCredentialsGenerator credentialsGenerator(final SecureValueRecovery2Configuration cfg, final Clock clock) {
|
||||
public static ExternalServiceCredentialsGenerator credentialsGenerator(final SecureValueRecoveryConfiguration cfg, final Clock clock) {
|
||||
return ExternalServiceCredentialsGenerator
|
||||
.builder(cfg.userAuthenticationTokenSharedSecret())
|
||||
.withUserDerivationKey(cfg.userIdTokenSharedSecret().value())
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
/*
|
||||
* Copyright 2025 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
package org.whispersystems.textsecuregcm.controllers;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import io.dropwizard.auth.Auth;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
import io.swagger.v3.oas.annotations.tags.Tag;
|
||||
import jakarta.ws.rs.GET;
|
||||
import jakarta.ws.rs.Path;
|
||||
import jakarta.ws.rs.Produces;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
import java.time.Clock;
|
||||
import org.whispersystems.textsecuregcm.auth.AuthenticatedDevice;
|
||||
import org.whispersystems.textsecuregcm.auth.ExternalServiceCredentials;
|
||||
import org.whispersystems.textsecuregcm.auth.ExternalServiceCredentialsGenerator;
|
||||
import org.whispersystems.textsecuregcm.configuration.SecureValueRecoveryConfiguration;
|
||||
|
||||
@Path("/v1/svrb")
|
||||
@Tag(name = "Secure Value Recovery B")
|
||||
public class SecureValueRecoveryBController {
|
||||
|
||||
public static ExternalServiceCredentialsGenerator credentialsGenerator(final SecureValueRecoveryConfiguration cfg) {
|
||||
return credentialsGenerator(cfg, Clock.systemUTC());
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public static ExternalServiceCredentialsGenerator credentialsGenerator(final SecureValueRecoveryConfiguration cfg,
|
||||
final Clock clock) {
|
||||
return ExternalServiceCredentialsGenerator
|
||||
.builder(cfg.userAuthenticationTokenSharedSecret())
|
||||
.withUserDerivationKey(cfg.userIdTokenSharedSecret().value())
|
||||
.prependUsername(false)
|
||||
.withDerivedUsernameTruncateLength(16)
|
||||
.withClock(clock)
|
||||
.build();
|
||||
}
|
||||
|
||||
private final ExternalServiceCredentialsGenerator svrbCredentialGenerator;
|
||||
|
||||
public SecureValueRecoveryBController(final ExternalServiceCredentialsGenerator svrbCredentialGenerator) {
|
||||
this.svrbCredentialGenerator = svrbCredentialGenerator;
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/auth")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
@Operation(
|
||||
summary = "Generate credentials for SVRB",
|
||||
description = """
|
||||
Generate SVRB service credentials. Generated credentials have an expiration time of 1 day (subject to change)
|
||||
"""
|
||||
)
|
||||
@ApiResponse(responseCode = "200", description = "`JSON` with generated credentials.", useReturnTypeSchema = true)
|
||||
@ApiResponse(responseCode = "401", description = "Account authentication check failed.")
|
||||
public ExternalServiceCredentials getAuth(@Auth final AuthenticatedDevice auth) {
|
||||
return svrbCredentialGenerator.generateFor(auth.accountIdentifier().toString());
|
||||
}
|
||||
}
|
|
@ -15,6 +15,7 @@ import io.micrometer.core.instrument.Tag;
|
|||
import io.micrometer.core.instrument.Tags;
|
||||
import io.swagger.v3.oas.annotations.ExternalDocumentation;
|
||||
import io.swagger.v3.oas.annotations.Operation;
|
||||
import io.swagger.v3.oas.annotations.headers.Header;
|
||||
import io.swagger.v3.oas.annotations.media.Content;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import io.swagger.v3.oas.annotations.responses.ApiResponse;
|
||||
|
@ -66,6 +67,7 @@ import org.whispersystems.textsecuregcm.configuration.SubscriptionConfiguration;
|
|||
import org.whispersystems.textsecuregcm.configuration.SubscriptionLevelConfiguration;
|
||||
import org.whispersystems.textsecuregcm.entities.Badge;
|
||||
import org.whispersystems.textsecuregcm.entities.PurchasableBadge;
|
||||
import org.whispersystems.textsecuregcm.mappers.SubscriptionExceptionMapper;
|
||||
import org.whispersystems.textsecuregcm.metrics.MetricsUtil;
|
||||
import org.whispersystems.textsecuregcm.metrics.UserAgentTagUtil;
|
||||
import org.whispersystems.textsecuregcm.storage.PaymentTime;
|
||||
|
@ -218,6 +220,19 @@ public class SubscriptionController {
|
|||
@DELETE
|
||||
@Path("/{subscriberId}")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
@Operation(summary = "Cancel a subscription", description = """
|
||||
Cancels any current subscription at the end of the current subscription period.
|
||||
|
||||
Note: Apple IAP subscriptions do not support server-side cancellation, so this method should only be called after
|
||||
cancelling a subscription from storekit to keep server data up to date.
|
||||
""")
|
||||
@ApiResponse(responseCode = "200", description = "All subscriptions cancelled")
|
||||
@ApiResponse(responseCode = "403", description = "Account authentication is present")
|
||||
@ApiResponse(responseCode = "404", description = "subscriberId is not found or malformed")
|
||||
@ApiResponse(responseCode = "400", description = "The associated subscription is not a type that can be cancelled")
|
||||
@ApiResponse(responseCode = "429", description = "Too many attempts", headers = @Header(
|
||||
name = "Retry-After",
|
||||
description = "If present, a positive integer indicating the number of seconds before a subsequent attempt could succeed"))
|
||||
public CompletableFuture<Response> deleteSubscriber(
|
||||
@Auth Optional<AuthenticatedDevice> authenticatedAccount,
|
||||
@PathParam("subscriberId") String subscriberId) throws SubscriptionException {
|
||||
|
@ -230,6 +245,16 @@ public class SubscriptionController {
|
|||
@Path("/{subscriberId}")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
@Operation(summary = "Create/refresh a subscriber", description = """
|
||||
Creates a subscriber record if it does not exist, otherwise refreshes its last access time.
|
||||
|
||||
Subscribers MUST periodically hit this endpoint to update the access time on the subscription record. Subscribers
|
||||
SHOULD attempt to make an update call approximately every 3 days. Not accessing this endpoint for an extended
|
||||
period of time will result in the subscription being canceled.
|
||||
""")
|
||||
@ApiResponse(responseCode = "200", description = "The subscriber was successfully created or refreshed")
|
||||
@ApiResponse(responseCode = "403", description = "subscriberId authentication failure OR account authentication is present")
|
||||
@ApiResponse(responseCode = "404", description = "subscriberId is malformed")
|
||||
public CompletableFuture<Response> updateSubscriber(
|
||||
@Auth Optional<AuthenticatedDevice> authenticatedAccount,
|
||||
@PathParam("subscriberId") String subscriberId) throws SubscriptionException {
|
||||
|
@ -429,7 +454,9 @@ public class SubscriptionController {
|
|||
@ApiResponse(responseCode = "403", description = "subscriberId authentication failure OR account authentication is present")
|
||||
@ApiResponse(responseCode = "404", description = "No such subscriberId exists or subscriberId is malformed or the specified transaction does not exist")
|
||||
@ApiResponse(responseCode = "409", description = "subscriberId is already linked to a processor that does not support appstore payments. Delete this subscriberId and use a new one.")
|
||||
@ApiResponse(responseCode = "429", description = "Rate limit exceeded.")
|
||||
@ApiResponse(responseCode = "429", description = "Too many attempts", headers = @Header(
|
||||
name = "Retry-After",
|
||||
description = "If present, a positive integer indicating the number of seconds before a subsequent attempt could succeed"))
|
||||
public CompletableFuture<SetSubscriptionLevelSuccessResponse> setAppStoreSubscription(
|
||||
@Auth Optional<AuthenticatedDevice> authenticatedAccount,
|
||||
@PathParam("subscriberId") String subscriberId,
|
||||
|
@ -471,6 +498,9 @@ public class SubscriptionController {
|
|||
@ApiResponse(responseCode = "403", description = "subscriberId authentication failure OR account authentication is present")
|
||||
@ApiResponse(responseCode = "404", description = "No such subscriberId exists or subscriberId is malformed or the purchaseToken does not exist")
|
||||
@ApiResponse(responseCode = "409", description = "subscriberId is already linked to a processor that does not support Play Billing. Delete this subscriberId and use a new one.")
|
||||
@ApiResponse(responseCode = "429", description = "Too many attempts", headers = @Header(
|
||||
name = "Retry-After",
|
||||
description = "If present, a positive integer indicating the number of seconds before a subsequent attempt could succeed"))
|
||||
public CompletableFuture<SetSubscriptionLevelSuccessResponse> setPlayStoreSubscription(
|
||||
@Auth Optional<AuthenticatedDevice> authenticatedAccount,
|
||||
@PathParam("subscriberId") String subscriberId,
|
||||
|
@ -625,6 +655,9 @@ public class SubscriptionController {
|
|||
@ApiResponse(responseCode = "200", description = "The subscriberId exists", content = @Content(schema = @Schema(implementation = GetSubscriptionInformationResponse.class)))
|
||||
@ApiResponse(responseCode = "403", description = "subscriberId authentication failure OR account authentication is present")
|
||||
@ApiResponse(responseCode = "404", description = "No such subscriberId exists or subscriberId is malformed")
|
||||
@ApiResponse(responseCode = "429", description = "Too many attempts", headers = @Header(
|
||||
name = "Retry-After",
|
||||
description = "If present, a positive integer indicating the number of seconds before a subsequent attempt could succeed"))
|
||||
public CompletableFuture<Response> getSubscriptionInformation(
|
||||
@Auth Optional<AuthenticatedDevice> authenticatedAccount,
|
||||
@PathParam("subscriberId") String subscriberId) throws SubscriptionException {
|
||||
|
@ -650,16 +683,64 @@ public class SubscriptionController {
|
|||
.orElseGet(() -> Response.ok(new GetSubscriptionInformationResponse(null, null)).build()));
|
||||
}
|
||||
|
||||
public record GetReceiptCredentialsRequest(@NotEmpty byte[] receiptCredentialRequest) {
|
||||
public record GetReceiptCredentialsRequest(
|
||||
@Schema(description = "A ReceiptCredentialRequest encoded in standard base64 with padding")
|
||||
@NotEmpty byte[] receiptCredentialRequest) {
|
||||
}
|
||||
|
||||
public record GetReceiptCredentialsResponse(@NotEmpty byte[] receiptCredentialResponse) {
|
||||
public record GetReceiptCredentialsResponse(
|
||||
@Schema(description = "A ReceiptCredentialResponse encoded in standard base64 with padding")
|
||||
@NotEmpty byte[] receiptCredentialResponse) {
|
||||
}
|
||||
|
||||
@POST
|
||||
@Path("/{subscriberId}/receipt_credentials")
|
||||
@Consumes(MediaType.APPLICATION_JSON)
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
@Operation(summary = "Create receipt credentials", description = """
|
||||
Create a receipt from a valid payment invoice that can be used to obtain an entitlement
|
||||
|
||||
This request is repeatable so long as the ReceiptCredentialRequest remains the same. Clients should use the same
|
||||
ReceiptCredentialRequest value until they attempt to redeem the resulting ReceiptCredentialPresentation. After
|
||||
this point, the ReceiptCredentialRequest MUST NOT be reused or you may not be able to redeem a valid payment
|
||||
invoice. Clients SHOULD retry requests at this endpoint with the same ReceiptCredentialRequest value until
|
||||
receiving a response. After receiving a response, clients should then compute the ReceiptCredentialPresentation
|
||||
and redeem it at the receipt redemption endpoint. Once the first attempt is made there, the same
|
||||
ReceiptCredentialRequest MUST NOT be used again to request receipt credentials.
|
||||
|
||||
Note that you may in fact redeem TWO or more invoices for the same ReceiptCredentialRequest while retrying this
|
||||
operation if a later invoice gets paid while you are retrying. However, the returned receipt is always for the
|
||||
latest invoice, so it will have the latest expiration possible and no entitlement time will be lost. The important
|
||||
thing is not to reuse ReceiptCredentialRequest after you have started attempting to redeem the associated
|
||||
ReceiptCredentialPresentation. Then you may produce a ReceiptCredentialPresentation for a later invoice that
|
||||
cannot be redeemed.
|
||||
|
||||
Clients MUST validate that the generated receipt credential's level and expiration matches their expectations.
|
||||
""")
|
||||
@ApiResponse(responseCode = "200", description = "Successfully created receipt", content = @Content(schema = @Schema(implementation = GetReceiptCredentialsResponse.class)))
|
||||
@ApiResponse(responseCode = "204", description = "No invoice has been issued for this subscription OR invoice is in 'open' state")
|
||||
@ApiResponse(responseCode = "400", description = "Bad ReceiptCredentialRequest")
|
||||
@ApiResponse(responseCode = "402", description = "Invoice is in any state other than 'open' or 'paid'. May include chargeFailure details in body.",
|
||||
content = @Content(schema = @Schema(
|
||||
nullable = true,
|
||||
example = """
|
||||
{
|
||||
"chargeFailure": {
|
||||
"code": "incorrect_account_holder_name",
|
||||
"message": "The transaction can't be processed because your customer's account information is missing [...]",
|
||||
"outcomeNetworkStatus": "declined_by_network",
|
||||
"outcomeReason": "generic_decline",
|
||||
"outcomeType": "issuer_declined"
|
||||
}
|
||||
}
|
||||
""",
|
||||
implementation = SubscriptionExceptionMapper.ChargeFailureResponse.class)))
|
||||
@ApiResponse(responseCode = "403", description = "subscriberId authentication failure OR account authentication is present")
|
||||
@ApiResponse(responseCode = "404", description = "subscriberId is not found OR malformed OR no subscription setup on the subscriber id")
|
||||
@ApiResponse(responseCode = "409", description = "latest paid receipt on subscription was already redeemed for a receipt credential but with a different receipt credential request")
|
||||
@ApiResponse(responseCode = "429", description = "Too many attempts", headers = @Header(
|
||||
name = "Retry-After",
|
||||
description = "If present, a positive integer indicating the number of seconds before a subsequent attempt could succeed"))
|
||||
public CompletableFuture<Response> createSubscriptionReceiptCredentials(
|
||||
@Auth Optional<AuthenticatedDevice> authenticatedAccount,
|
||||
@HeaderParam(HttpHeaders.USER_AGENT) final String userAgent,
|
||||
|
|
|
@ -1,64 +0,0 @@
|
|||
/*
|
||||
* Copyright 2024 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
package org.whispersystems.textsecuregcm.entities;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonValue;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
|
||||
import io.swagger.v3.oas.annotations.media.Schema;
|
||||
import java.util.Map;
|
||||
import javax.annotation.Nullable;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import org.whispersystems.textsecuregcm.util.ByteArrayAdapter;
|
||||
|
||||
public record AuthCheckResponseV3(
|
||||
@Schema(description = """
|
||||
A dictionary with the auth check results, keyed by the token corresponding token provided in the request.
|
||||
""")
|
||||
@NotNull Map<String, Result> matches) {
|
||||
|
||||
public record Result(
|
||||
@Schema(description = "The status of the credential. Either match, no-match, or invalid")
|
||||
CredentialStatus status,
|
||||
|
||||
@Schema(description = """
|
||||
If the credential was a match, the stored shareSet that can be used to restore a value from SVR. Encoded in
|
||||
standard un-padded base64.
|
||||
""", implementation = String.class)
|
||||
@JsonSerialize(using = ByteArrayAdapter.Serializing.class)
|
||||
@JsonDeserialize(using = ByteArrayAdapter.Deserializing.class)
|
||||
@Nullable byte[] shareSet) {
|
||||
|
||||
public static Result invalid() {
|
||||
return new Result(CredentialStatus.INVALID, null);
|
||||
}
|
||||
|
||||
public static Result noMatch() {
|
||||
return new Result(CredentialStatus.NO_MATCH, null);
|
||||
}
|
||||
|
||||
public static Result match(@Nullable final byte[] shareSet) {
|
||||
return new Result(CredentialStatus.MATCH, shareSet);
|
||||
}
|
||||
}
|
||||
|
||||
public enum CredentialStatus {
|
||||
MATCH("match"),
|
||||
NO_MATCH("no-match"),
|
||||
INVALID("invalid");
|
||||
|
||||
private final String clientCode;
|
||||
|
||||
CredentialStatus(final String clientCode) {
|
||||
this.clientCode = clientCode;
|
||||
}
|
||||
|
||||
@JsonValue
|
||||
public String clientCode() {
|
||||
return clientCode;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -18,7 +18,7 @@ import org.whispersystems.textsecuregcm.WhisperServerConfiguration;
|
|||
import org.whispersystems.textsecuregcm.auth.ExternalServiceCredentialsGenerator;
|
||||
import org.whispersystems.textsecuregcm.configuration.DirectoryV2ClientConfiguration;
|
||||
import org.whispersystems.textsecuregcm.configuration.PaymentsServiceConfiguration;
|
||||
import org.whispersystems.textsecuregcm.configuration.SecureValueRecovery2Configuration;
|
||||
import org.whispersystems.textsecuregcm.configuration.SecureValueRecoveryConfiguration;
|
||||
|
||||
enum ExternalServiceDefinitions {
|
||||
DIRECTORY(ExternalServiceType.EXTERNAL_SERVICE_TYPE_DIRECTORY, (chatConfig, clock) -> {
|
||||
|
@ -38,7 +38,17 @@ enum ExternalServiceDefinitions {
|
|||
.build();
|
||||
}),
|
||||
SVR(ExternalServiceType.EXTERNAL_SERVICE_TYPE_SVR, (chatConfig, clock) -> {
|
||||
final SecureValueRecovery2Configuration cfg = chatConfig.getSvr2Configuration();
|
||||
final SecureValueRecoveryConfiguration cfg = chatConfig.getSvr2Configuration();
|
||||
return ExternalServiceCredentialsGenerator
|
||||
.builder(cfg.userAuthenticationTokenSharedSecret())
|
||||
.withUserDerivationKey(cfg.userIdTokenSharedSecret().value())
|
||||
.prependUsername(false)
|
||||
.withDerivedUsernameTruncateLength(16)
|
||||
.withClock(clock)
|
||||
.build();
|
||||
}),
|
||||
SVRB(ExternalServiceType.EXTERNAL_SERVICE_TYPE_SVRB, (chatConfig, clock) -> {
|
||||
final SecureValueRecoveryConfiguration cfg = chatConfig.getSvrbConfiguration();
|
||||
return ExternalServiceCredentialsGenerator
|
||||
.builder(cfg.userAuthenticationTokenSharedSecret())
|
||||
.withUserDerivationKey(cfg.userIdTokenSharedSecret().value())
|
||||
|
|
|
@ -13,11 +13,14 @@ import jakarta.ws.rs.core.Response;
|
|||
import jakarta.ws.rs.ext.ExceptionMapper;
|
||||
import java.util.Map;
|
||||
import org.whispersystems.textsecuregcm.storage.SubscriptionException;
|
||||
import org.whispersystems.textsecuregcm.subscriptions.ChargeFailure;
|
||||
|
||||
public class SubscriptionExceptionMapper implements ExceptionMapper<SubscriptionException> {
|
||||
@VisibleForTesting
|
||||
public static final int PROCESSOR_ERROR_STATUS_CODE = 440;
|
||||
|
||||
public record ChargeFailureResponse(String processor, ChargeFailure chargeFailure) {}
|
||||
|
||||
@Override
|
||||
public Response toResponse(final SubscriptionException exception) {
|
||||
|
||||
|
@ -31,17 +34,14 @@ public class SubscriptionExceptionMapper implements ExceptionMapper<Subscription
|
|||
}
|
||||
if (exception instanceof SubscriptionException.ProcessorException e) {
|
||||
return Response.status(PROCESSOR_ERROR_STATUS_CODE)
|
||||
.entity(Map.of(
|
||||
"processor", e.getProcessor().name(),
|
||||
"chargeFailure", e.getChargeFailure()
|
||||
))
|
||||
.entity(new ChargeFailureResponse(e.getProcessor().name(), e.getChargeFailure()))
|
||||
.type(MediaType.APPLICATION_JSON_TYPE)
|
||||
.build();
|
||||
}
|
||||
if (exception instanceof SubscriptionException.ChargeFailurePaymentRequired e) {
|
||||
return Response
|
||||
.status(Response.Status.PAYMENT_REQUIRED)
|
||||
.entity(Map.of("chargeFailure", e.getChargeFailure()))
|
||||
.entity(new ChargeFailureResponse(e.getProcessor().name(), e.getChargeFailure()))
|
||||
.type(MediaType.APPLICATION_JSON_TYPE)
|
||||
.build();
|
||||
}
|
||||
|
|
|
@ -248,7 +248,7 @@ public class LettuceShardCircuitBreaker implements NettyCustomizer {
|
|||
// RedisNoScriptException doesn’t indicate a fault the breaker can protect
|
||||
if (throwable != null && !(throwable instanceof RedisNoScriptException)) {
|
||||
breaker.onError(durationNanos, TimeUnit.NANOSECONDS, throwable);
|
||||
logger.warn("Command completed with error", throwable);
|
||||
logger.warn("Command completed with error for: {}/{}", clusterName, shardAddress, throwable);
|
||||
} else {
|
||||
breaker.onSuccess(durationNanos, TimeUnit.NANOSECONDS);
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ import java.util.concurrent.Executor;
|
|||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import org.whispersystems.textsecuregcm.auth.ExternalServiceCredentials;
|
||||
import org.whispersystems.textsecuregcm.auth.ExternalServiceCredentialsGenerator;
|
||||
import org.whispersystems.textsecuregcm.configuration.SecureValueRecovery2Configuration;
|
||||
import org.whispersystems.textsecuregcm.configuration.SecureValueRecoveryConfiguration;
|
||||
import org.whispersystems.textsecuregcm.http.FaultTolerantHttpClient;
|
||||
import org.whispersystems.textsecuregcm.util.HttpUtils;
|
||||
|
||||
|
@ -39,7 +39,7 @@ public class SecureValueRecovery2Client {
|
|||
|
||||
public SecureValueRecovery2Client(final ExternalServiceCredentialsGenerator secureValueRecoveryCredentialsGenerator,
|
||||
final Executor executor, final ScheduledExecutorService retryExecutor,
|
||||
final SecureValueRecovery2Configuration configuration)
|
||||
final SecureValueRecoveryConfiguration configuration)
|
||||
throws CertificateException {
|
||||
this.secureValueRecoveryCredentialsGenerator = secureValueRecoveryCredentialsGenerator;
|
||||
this.deleteUri = URI.create(configuration.uri()).resolve(DELETE_PATH);
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
package org.whispersystems.textsecuregcm.storage;
|
||||
|
||||
import java.time.Instant;
|
||||
import io.micrometer.core.instrument.Metrics;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
@ -13,6 +13,8 @@ import java.util.concurrent.CompletableFuture;
|
|||
import org.whispersystems.textsecuregcm.entities.ECPreKey;
|
||||
import org.whispersystems.textsecuregcm.entities.ECSignedPreKey;
|
||||
import org.whispersystems.textsecuregcm.entities.KEMSignedPreKey;
|
||||
import org.whispersystems.textsecuregcm.experiment.ExperimentEnrollmentManager;
|
||||
import org.whispersystems.textsecuregcm.metrics.MetricsUtil;
|
||||
import reactor.core.publisher.Flux;
|
||||
import software.amazon.awssdk.services.dynamodb.model.TransactWriteItem;
|
||||
|
||||
|
@ -23,18 +25,25 @@ public class KeysManager {
|
|||
private final PagedSingleUseKEMPreKeyStore pagedPqPreKeys;
|
||||
private final RepeatedUseECSignedPreKeyStore ecSignedPreKeys;
|
||||
private final RepeatedUseKEMSignedPreKeyStore pqLastResortKeys;
|
||||
private final ExperimentEnrollmentManager experimentEnrollmentManager;
|
||||
|
||||
public static String PAGED_KEYS_EXPERIMENT_NAME = "pagedPreKeys";
|
||||
|
||||
private static final String TAKE_PQ_NAME = MetricsUtil.name(KeysManager.class, "takePq");
|
||||
|
||||
public KeysManager(
|
||||
final SingleUseECPreKeyStore ecPreKeys,
|
||||
final SingleUseKEMPreKeyStore pqPreKeys,
|
||||
final PagedSingleUseKEMPreKeyStore pagedPqPreKeys,
|
||||
final RepeatedUseECSignedPreKeyStore ecSignedPreKeys,
|
||||
final RepeatedUseKEMSignedPreKeyStore pqLastResortKeys) {
|
||||
final RepeatedUseKEMSignedPreKeyStore pqLastResortKeys,
|
||||
final ExperimentEnrollmentManager experimentEnrollmentManager) {
|
||||
this.ecPreKeys = ecPreKeys;
|
||||
this.pqPreKeys = pqPreKeys;
|
||||
this.pagedPqPreKeys = pagedPqPreKeys;
|
||||
this.ecSignedPreKeys = ecSignedPreKeys;
|
||||
this.pqLastResortKeys = pqLastResortKeys;
|
||||
this.experimentEnrollmentManager = experimentEnrollmentManager;
|
||||
}
|
||||
|
||||
public TransactWriteItem buildWriteItemForEcSignedPreKey(final UUID identifier,
|
||||
|
@ -79,22 +88,31 @@ public class KeysManager {
|
|||
);
|
||||
}
|
||||
|
||||
public CompletableFuture<Void> storeEcSignedPreKeys(final UUID identifier, final byte deviceId, final ECSignedPreKey ecSignedPreKey) {
|
||||
public CompletableFuture<Void> storeEcSignedPreKeys(final UUID identifier, final byte deviceId,
|
||||
final ECSignedPreKey ecSignedPreKey) {
|
||||
return ecSignedPreKeys.store(identifier, deviceId, ecSignedPreKey);
|
||||
}
|
||||
|
||||
public CompletableFuture<Void> storePqLastResort(final UUID identifier, final byte deviceId, final KEMSignedPreKey lastResortKey) {
|
||||
public CompletableFuture<Void> storePqLastResort(final UUID identifier, final byte deviceId,
|
||||
final KEMSignedPreKey lastResortKey) {
|
||||
return pqLastResortKeys.store(identifier, deviceId, lastResortKey);
|
||||
}
|
||||
|
||||
public CompletableFuture<Void> storeEcOneTimePreKeys(final UUID identifier, final byte deviceId,
|
||||
final List<ECPreKey> preKeys) {
|
||||
final List<ECPreKey> preKeys) {
|
||||
return ecPreKeys.store(identifier, deviceId, preKeys);
|
||||
}
|
||||
|
||||
public CompletableFuture<Void> storeKemOneTimePreKeys(final UUID identifier, final byte deviceId,
|
||||
final List<KEMSignedPreKey> preKeys) {
|
||||
return pqPreKeys.store(identifier, deviceId, preKeys);
|
||||
final List<KEMSignedPreKey> preKeys) {
|
||||
final boolean enrolledInPagedKeys = experimentEnrollmentManager.isEnrolled(identifier, PAGED_KEYS_EXPERIMENT_NAME);
|
||||
final CompletableFuture<Void> deleteOtherKeys = enrolledInPagedKeys
|
||||
? pqPreKeys.delete(identifier, deviceId)
|
||||
: pagedPqPreKeys.delete(identifier, deviceId);
|
||||
return deleteOtherKeys.thenCompose(ignored -> enrolledInPagedKeys
|
||||
? pagedPqPreKeys.store(identifier, deviceId, preKeys)
|
||||
: pqPreKeys.store(identifier, deviceId, preKeys));
|
||||
|
||||
}
|
||||
|
||||
public CompletableFuture<Optional<ECPreKey>> takeEC(final UUID identifier, final byte deviceId) {
|
||||
|
@ -102,10 +120,36 @@ public class KeysManager {
|
|||
}
|
||||
|
||||
public CompletableFuture<Optional<KEMSignedPreKey>> takePQ(final UUID identifier, final byte deviceId) {
|
||||
return pqPreKeys.take(identifier, deviceId)
|
||||
final boolean enrolledInPagedKeys = experimentEnrollmentManager.isEnrolled(identifier, PAGED_KEYS_EXPERIMENT_NAME);
|
||||
return tagTakePQ(pagedPqPreKeys.take(identifier, deviceId), PQSource.PAGE, enrolledInPagedKeys)
|
||||
.thenCompose(maybeSingleUsePreKey -> maybeSingleUsePreKey
|
||||
.map(ignored -> CompletableFuture.completedFuture(maybeSingleUsePreKey))
|
||||
.orElseGet(() -> tagTakePQ(pqPreKeys.take(identifier, deviceId), PQSource.ROW, enrolledInPagedKeys)))
|
||||
.thenCompose(maybeSingleUsePreKey -> maybeSingleUsePreKey
|
||||
.map(singleUsePreKey -> CompletableFuture.completedFuture(maybeSingleUsePreKey))
|
||||
.orElseGet(() -> pqLastResortKeys.find(identifier, deviceId)));
|
||||
.orElseGet(() -> tagTakePQ(pqLastResortKeys.find(identifier, deviceId), PQSource.LAST_RESORT, enrolledInPagedKeys)));
|
||||
}
|
||||
|
||||
private enum PQSource {
|
||||
PAGE,
|
||||
ROW,
|
||||
LAST_RESORT
|
||||
}
|
||||
private CompletableFuture<Optional<KEMSignedPreKey>> tagTakePQ(CompletableFuture<Optional<KEMSignedPreKey>> prekey, final PQSource source, final boolean enrolledInPagedKeys) {
|
||||
return prekey.thenApply(maybeSingleUsePreKey -> {
|
||||
final Optional<String> maybeSourceTag = maybeSingleUsePreKey
|
||||
// If we found a PK, use this source tag
|
||||
.map(ignore -> source.name())
|
||||
// If we didn't and this is our last resort, we didn't find a PK
|
||||
.or(() -> source == PQSource.LAST_RESORT ? Optional.of("absent") : Optional.empty());
|
||||
maybeSourceTag.ifPresent(sourceTag -> {
|
||||
Metrics.counter(TAKE_PQ_NAME,
|
||||
"source", sourceTag,
|
||||
"enrolled", Boolean.toString(enrolledInPagedKeys))
|
||||
.increment();
|
||||
});
|
||||
return maybeSingleUsePreKey;
|
||||
});
|
||||
}
|
||||
|
||||
public CompletableFuture<Optional<KEMSignedPreKey>> getLastResort(final UUID identifier, final byte deviceId) {
|
||||
|
@ -121,20 +165,24 @@ public class KeysManager {
|
|||
}
|
||||
|
||||
public CompletableFuture<Integer> getPqCount(final UUID identifier, final byte deviceId) {
|
||||
return pqPreKeys.getCount(identifier, deviceId);
|
||||
return pagedPqPreKeys.getCount(identifier, deviceId).thenCompose(count -> count == 0
|
||||
? pqPreKeys.getCount(identifier, deviceId)
|
||||
: CompletableFuture.completedFuture(count));
|
||||
}
|
||||
|
||||
public CompletableFuture<Void> deleteSingleUsePreKeys(final UUID identifier) {
|
||||
return CompletableFuture.allOf(
|
||||
ecPreKeys.delete(identifier),
|
||||
pqPreKeys.delete(identifier)
|
||||
pqPreKeys.delete(identifier),
|
||||
pagedPqPreKeys.delete(identifier)
|
||||
);
|
||||
}
|
||||
|
||||
public CompletableFuture<Void> deleteSingleUsePreKeys(final UUID accountUuid, final byte deviceId) {
|
||||
return CompletableFuture.allOf(
|
||||
ecPreKeys.delete(accountUuid, deviceId),
|
||||
pqPreKeys.delete(accountUuid, deviceId)
|
||||
pqPreKeys.delete(accountUuid, deviceId),
|
||||
pagedPqPreKeys.delete(accountUuid, deviceId)
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -57,7 +57,7 @@ class MessagesCacheInsertScript {
|
|||
);
|
||||
|
||||
final List<byte[]> args = new ArrayList<>(Arrays.asList(
|
||||
envelope.toByteArray(), // message
|
||||
EnvelopeUtil.compress(envelope).toByteArray(), // message
|
||||
String.valueOf(envelope.getServerTimestamp()).getBytes(StandardCharsets.UTF_8), // currentTime
|
||||
envelope.getServerGuid().getBytes(StandardCharsets.UTF_8), // guid
|
||||
NEW_MESSAGE_EVENT_BYTES // eventPayload
|
||||
|
|
|
@ -105,7 +105,7 @@ public class MessagesDynamoDb extends AbstractDynamoDbStore {
|
|||
.put(KEY_SORT, convertSortKey(message.getServerTimestamp(), messageUuid))
|
||||
.put(LOCAL_INDEX_MESSAGE_UUID_KEY_SORT, convertLocalIndexMessageUuidSortKey(messageUuid))
|
||||
.put(KEY_TTL, AttributeValues.fromLong(getTtlForMessage(message)))
|
||||
.put(KEY_ENVELOPE_BYTES, AttributeValue.builder().b(SdkBytes.fromByteArray(message.toByteArray())).build());
|
||||
.put(KEY_ENVELOPE_BYTES, AttributeValue.builder().b(SdkBytes.fromByteArray(EnvelopeUtil.compress(message).toByteArray())).build());
|
||||
|
||||
writeItems.add(WriteRequest.builder().putRequest(PutRequest.builder()
|
||||
.item(item.build())
|
||||
|
|
|
@ -22,7 +22,6 @@ import java.util.concurrent.CompletableFuture;
|
|||
import java.util.concurrent.CompletionException;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.signal.libsignal.protocol.InvalidKeyException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -44,7 +43,12 @@ import software.amazon.awssdk.services.dynamodb.model.QueryRequest;
|
|||
import software.amazon.awssdk.services.dynamodb.model.ReturnValue;
|
||||
import software.amazon.awssdk.services.dynamodb.model.UpdateItemRequest;
|
||||
import software.amazon.awssdk.services.s3.S3AsyncClient;
|
||||
import software.amazon.awssdk.services.s3.model.*;
|
||||
import software.amazon.awssdk.services.s3.model.DeleteObjectRequest;
|
||||
import software.amazon.awssdk.services.s3.model.GetObjectRequest;
|
||||
import software.amazon.awssdk.services.s3.model.ListObjectsV2Request;
|
||||
import software.amazon.awssdk.services.s3.model.ListObjectsV2Response;
|
||||
import software.amazon.awssdk.services.s3.model.PutObjectRequest;
|
||||
import software.amazon.awssdk.services.s3.model.S3Object;
|
||||
|
||||
/**
|
||||
* @implNote This version of a {@link SingleUsePreKeyStore} store bundles prekeys into "pages", which are stored in on
|
||||
|
|
|
@ -102,16 +102,23 @@ public class SubscriptionException extends Exception {
|
|||
|
||||
public static class ChargeFailurePaymentRequired extends SubscriptionException {
|
||||
|
||||
private final PaymentProvider processor;
|
||||
private final ChargeFailure chargeFailure;
|
||||
|
||||
public ChargeFailurePaymentRequired(final ChargeFailure chargeFailure) {
|
||||
public ChargeFailurePaymentRequired(final PaymentProvider processor, final ChargeFailure chargeFailure) {
|
||||
super(null, null);
|
||||
this.processor = processor;
|
||||
this.chargeFailure = chargeFailure;
|
||||
}
|
||||
|
||||
public PaymentProvider getProcessor() {
|
||||
return processor;
|
||||
}
|
||||
|
||||
public ChargeFailure getChargeFailure() {
|
||||
return chargeFailure;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class ProcessorException extends SubscriptionException {
|
||||
|
|
|
@ -633,7 +633,7 @@ public class BraintreeManager implements CustomerAwareSubscriptionPaymentProcess
|
|||
if (subscriptionStatus.equals(SubscriptionStatus.ACTIVE) || subscriptionStatus.equals(SubscriptionStatus.PAST_DUE)) {
|
||||
throw ExceptionUtils.wrap(new SubscriptionException.ReceiptRequestedForOpenPayment());
|
||||
}
|
||||
throw ExceptionUtils.wrap(new SubscriptionException.ChargeFailurePaymentRequired(createChargeFailure(transaction)));
|
||||
throw ExceptionUtils.wrap(new SubscriptionException.ChargeFailurePaymentRequired(getProvider(), createChargeFailure(transaction)));
|
||||
}
|
||||
|
||||
final Instant paidAt = transaction.getSubscriptionDetails().getBillingPeriodStartDate().toInstant();
|
||||
|
|
|
@ -43,6 +43,7 @@ import java.util.stream.Collectors;
|
|||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.whispersystems.textsecuregcm.controllers.RateLimitExceededException;
|
||||
import org.whispersystems.textsecuregcm.metrics.MetricsUtil;
|
||||
import org.whispersystems.textsecuregcm.storage.PaymentTime;
|
||||
import org.whispersystems.textsecuregcm.storage.SubscriptionException;
|
||||
|
@ -362,10 +363,14 @@ public class GooglePlayBillingManager implements SubscriptionPaymentProcessor {
|
|||
|| e.getStatusCode() == Response.Status.GONE.getStatusCode()) {
|
||||
throw ExceptionUtils.wrap(new SubscriptionException.NotFound());
|
||||
}
|
||||
if (e.getStatusCode() == Response.Status.TOO_MANY_REQUESTS.getStatusCode()) {
|
||||
throw ExceptionUtils.wrap(new RateLimitExceededException(null));
|
||||
}
|
||||
final String details = e instanceof GoogleJsonResponseException
|
||||
? ((GoogleJsonResponseException) e).getDetails().toString()
|
||||
: "";
|
||||
logger.warn("Unexpected HTTP status code {} from androidpublisher: {}", e.getStatusCode(), details, e);
|
||||
|
||||
logger.warn("Unexpected HTTP status code {} from androidpublisher: {}", e.getStatusCode(), details);
|
||||
throw ExceptionUtils.wrap(e);
|
||||
}));
|
||||
}
|
||||
|
|
|
@ -645,7 +645,8 @@ public class StripeManager implements CustomerAwareSubscriptionPaymentProcessor
|
|||
|
||||
// If the charge object has a failure reason we can present to the user, create a detailed exception
|
||||
.filter(charge -> charge.getFailureCode() != null || charge.getFailureMessage() != null)
|
||||
.<SubscriptionException> map(charge -> new SubscriptionException.ChargeFailurePaymentRequired(createChargeFailure(charge)))
|
||||
.<SubscriptionException> map(charge ->
|
||||
new SubscriptionException.ChargeFailurePaymentRequired(getProvider(), createChargeFailure(charge)))
|
||||
|
||||
// Otherwise, return a generic payment required error
|
||||
.orElseGet(() -> new SubscriptionException.PaymentRequired())));
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.whispersystems.textsecuregcm.backup.Cdn3RemoteStorageManager;
|
|||
import org.whispersystems.textsecuregcm.configuration.dynamic.DynamicConfiguration;
|
||||
import org.whispersystems.textsecuregcm.controllers.SecureStorageController;
|
||||
import org.whispersystems.textsecuregcm.controllers.SecureValueRecovery2Controller;
|
||||
import org.whispersystems.textsecuregcm.experiment.ExperimentEnrollmentManager;
|
||||
import org.whispersystems.textsecuregcm.experiment.PushNotificationExperimentSamples;
|
||||
import org.whispersystems.textsecuregcm.limits.RateLimiters;
|
||||
import org.whispersystems.textsecuregcm.metrics.MicrometerAwsSdkMetricPublisher;
|
||||
|
@ -122,6 +123,9 @@ record CommandDependencies(
|
|||
new DynamicConfigurationManager<>(
|
||||
configuration.getDynamicConfig().build(awsCredentialsProvider, dynamicConfigurationExecutor), DynamicConfiguration.class);
|
||||
dynamicConfigurationManager.start();
|
||||
ExperimentEnrollmentManager experimentEnrollmentManager =
|
||||
new ExperimentEnrollmentManager(dynamicConfigurationManager);
|
||||
|
||||
final ClientResources.Builder redisClientResourcesBuilder = ClientResources.builder();
|
||||
|
||||
FaultTolerantRedisClusterClient cacheCluster = configuration.getCacheClusterConfiguration()
|
||||
|
@ -224,7 +228,8 @@ record CommandDependencies(
|
|||
new RepeatedUseECSignedPreKeyStore(dynamoDbAsyncClient,
|
||||
configuration.getDynamoDbTables().getEcSignedPreKeys().getTableName()),
|
||||
new RepeatedUseKEMSignedPreKeyStore(dynamoDbAsyncClient,
|
||||
configuration.getDynamoDbTables().getKemLastResortKeys().getTableName()));
|
||||
configuration.getDynamoDbTables().getKemLastResortKeys().getTableName()),
|
||||
experimentEnrollmentManager);
|
||||
MessagesDynamoDb messagesDynamoDb = new MessagesDynamoDb(dynamoDbClient, dynamoDbAsyncClient,
|
||||
configuration.getDynamoDbTables().getMessages().getTableName(),
|
||||
configuration.getDynamoDbTables().getMessages().getExpiration(),
|
||||
|
|
|
@ -53,6 +53,7 @@ enum ExternalServiceType {
|
|||
EXTERNAL_SERVICE_TYPE_PAYMENTS = 2;
|
||||
EXTERNAL_SERVICE_TYPE_STORAGE = 3;
|
||||
EXTERNAL_SERVICE_TYPE_SVR = 4;
|
||||
EXTERNAL_SERVICE_TYPE_SVRB = 5;
|
||||
}
|
||||
|
||||
message GetExternalServiceCredentialsRequest {
|
||||
|
|
|
@ -33,9 +33,8 @@ public class LocalFaultTolerantRedisClientFactory implements FaultTolerantRedisC
|
|||
if (shutdownHookConfigured.compareAndSet(false, true)) {
|
||||
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
|
||||
try {
|
||||
REDIS_SERVER_EXTENSION.afterEach(null);
|
||||
REDIS_SERVER_EXTENSION.afterAll(null);
|
||||
} catch (Exception e) {
|
||||
REDIS_SERVER_EXTENSION.close();
|
||||
} catch (Throwable e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}));
|
||||
|
|
|
@ -34,8 +34,8 @@ public class LocalFaultTolerantRedisClusterFactory implements FaultTolerantRedis
|
|||
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
|
||||
try {
|
||||
redisClusterExtension.afterEach(null);
|
||||
redisClusterExtension.afterAll(null);
|
||||
} catch (Exception e) {
|
||||
redisClusterExtension.close();
|
||||
} catch (Throwable e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}));
|
||||
|
|
|
@ -6,28 +6,43 @@
|
|||
package org.whispersystems.textsecuregcm.controllers;
|
||||
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.whispersystems.textsecuregcm.util.MockUtils.randomSecretBytes;
|
||||
|
||||
import io.dropwizard.testing.junit5.DropwizardExtensionsSupport;
|
||||
import io.dropwizard.testing.junit5.ResourceExtension;
|
||||
import jakarta.ws.rs.client.Entity;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
import jakarta.ws.rs.core.Response;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import org.glassfish.jersey.test.grizzly.GrizzlyWebTestContainerFactory;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mockito;
|
||||
import org.whispersystems.textsecuregcm.auth.ExternalServiceCredentials;
|
||||
import org.whispersystems.textsecuregcm.auth.ExternalServiceCredentialsGenerator;
|
||||
import org.whispersystems.textsecuregcm.configuration.SecureValueRecovery2Configuration;
|
||||
import org.whispersystems.textsecuregcm.configuration.SecureValueRecoveryConfiguration;
|
||||
import org.whispersystems.textsecuregcm.entities.AuthCheckRequest;
|
||||
import org.whispersystems.textsecuregcm.entities.AuthCheckResponseV2;
|
||||
import org.whispersystems.textsecuregcm.storage.Account;
|
||||
import org.whispersystems.textsecuregcm.storage.AccountsManager;
|
||||
import org.whispersystems.textsecuregcm.tests.util.AuthHelper;
|
||||
import org.whispersystems.textsecuregcm.util.MutableClock;
|
||||
import org.whispersystems.textsecuregcm.util.SystemMapper;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@ExtendWith(DropwizardExtensionsSupport.class)
|
||||
public class SecureValueRecovery2ControllerTest extends SecureValueRecoveryControllerBaseTest {
|
||||
public class SecureValueRecovery2ControllerTest {
|
||||
|
||||
private static final SecureValueRecovery2Configuration CFG = new SecureValueRecovery2Configuration(
|
||||
private static final SecureValueRecoveryConfiguration CFG = new SecureValueRecoveryConfiguration(
|
||||
"",
|
||||
randomSecretBytes(32),
|
||||
randomSecretBytes(32),
|
||||
|
@ -52,19 +67,305 @@ public class SecureValueRecovery2ControllerTest extends SecureValueRecoveryContr
|
|||
.addResource(CONTROLLER)
|
||||
.build();
|
||||
|
||||
protected SecureValueRecovery2ControllerTest() {
|
||||
super("/v2", ACCOUNTS_MANAGER, CLOCK, RESOURCES, CREDENTIAL_GENERATOR);
|
||||
@Nested
|
||||
class WithBackupsPrefix extends SecureValueRecoveryControllerBaseTest {
|
||||
protected WithBackupsPrefix() {
|
||||
super("/v2/backup");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
Map<String, CheckStatus> parseCheckResponse(final Response response) {
|
||||
final AuthCheckResponseV2 authCheckResponseV2 = response.readEntity(AuthCheckResponseV2.class);
|
||||
return authCheckResponseV2.matches().entrySet().stream().collect(Collectors.toMap(
|
||||
Map.Entry::getKey, e -> switch (e.getValue()) {
|
||||
case MATCH -> CheckStatus.MATCH;
|
||||
case INVALID -> CheckStatus.INVALID;
|
||||
case NO_MATCH -> CheckStatus.NO_MATCH;
|
||||
}
|
||||
));
|
||||
@Nested
|
||||
class WithSvr2Prefix extends SecureValueRecoveryControllerBaseTest {
|
||||
protected WithSvr2Prefix() {
|
||||
super("/v2/svr");
|
||||
}
|
||||
}
|
||||
|
||||
static abstract class SecureValueRecoveryControllerBaseTest {
|
||||
private static final UUID USER_1 = UUID.randomUUID();
|
||||
private static final UUID USER_2 = UUID.randomUUID();
|
||||
private static final UUID USER_3 = UUID.randomUUID();
|
||||
private static final String E164_VALID = "+18005550123";
|
||||
private static final String E164_INVALID = "1(800)555-0123";
|
||||
|
||||
private final String pathPrefix;
|
||||
|
||||
@BeforeEach
|
||||
public void before() throws Exception {
|
||||
Mockito.reset(ACCOUNTS_MANAGER);
|
||||
Mockito.when(ACCOUNTS_MANAGER.getByE164(E164_VALID)).thenReturn(Optional.of(account(USER_1)));
|
||||
}
|
||||
|
||||
protected SecureValueRecoveryControllerBaseTest(final String pathPrefix) {
|
||||
this.pathPrefix = pathPrefix;
|
||||
}
|
||||
|
||||
enum CheckStatus {
|
||||
MATCH,
|
||||
NO_MATCH,
|
||||
INVALID
|
||||
}
|
||||
|
||||
private Map<String, CheckStatus> parseCheckResponse(final Response response) {
|
||||
final AuthCheckResponseV2 authCheckResponseV2 = response.readEntity(AuthCheckResponseV2.class);
|
||||
return authCheckResponseV2.matches().entrySet().stream().collect(Collectors.toMap(
|
||||
Map.Entry::getKey, e -> switch (e.getValue()) {
|
||||
case MATCH -> CheckStatus.MATCH;
|
||||
case INVALID -> CheckStatus.INVALID;
|
||||
case NO_MATCH -> CheckStatus.NO_MATCH;
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOneMatch() {
|
||||
validate(Map.of(
|
||||
token(USER_1, day(1)), CheckStatus.MATCH,
|
||||
token(USER_2, day(1)), CheckStatus.NO_MATCH,
|
||||
token(USER_3, day(1)), CheckStatus.NO_MATCH
|
||||
), day(2));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNoMatch() {
|
||||
validate(Map.of(
|
||||
token(USER_2, day(1)), CheckStatus.NO_MATCH,
|
||||
token(USER_3, day(1)), CheckStatus.NO_MATCH
|
||||
), day(2));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSomeInvalid() {
|
||||
final ExternalServiceCredentials user1Cred = credentials(USER_1, day(1));
|
||||
final ExternalServiceCredentials user2Cred = credentials(USER_2, day(1));
|
||||
final ExternalServiceCredentials user3Cred = credentials(USER_3, day(1));
|
||||
|
||||
final String fakeToken = token(new ExternalServiceCredentials(user2Cred.username(), user3Cred.password()));
|
||||
validate(Map.of(
|
||||
token(user1Cred), CheckStatus.MATCH,
|
||||
token(user2Cred), CheckStatus.NO_MATCH,
|
||||
fakeToken, CheckStatus.INVALID
|
||||
), day(2));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSomeExpired() {
|
||||
validate(Map.of(
|
||||
token(USER_1, day(100)), CheckStatus.MATCH,
|
||||
token(USER_2, day(100)), CheckStatus.NO_MATCH,
|
||||
token(USER_3, day(10)), CheckStatus.INVALID,
|
||||
token(USER_3, day(20)), CheckStatus.INVALID
|
||||
), day(110));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSomeHaveNewerVersions() {
|
||||
validate(Map.of(
|
||||
token(USER_1, day(10)), CheckStatus.INVALID,
|
||||
token(USER_1, day(20)), CheckStatus.MATCH,
|
||||
token(USER_2, day(10)), CheckStatus.NO_MATCH,
|
||||
token(USER_3, day(20)), CheckStatus.NO_MATCH,
|
||||
token(USER_3, day(10)), CheckStatus.INVALID
|
||||
), day(25));
|
||||
}
|
||||
|
||||
private void validate(
|
||||
final Map<String, CheckStatus> expected,
|
||||
final long nowMillis) {
|
||||
CLOCK.setTimeMillis(nowMillis);
|
||||
final AuthCheckRequest request = new AuthCheckRequest(E164_VALID, List.copyOf(expected.keySet()));
|
||||
final Response response = RESOURCES.getJerseyTest().target(pathPrefix + "/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity(request, MediaType.APPLICATION_JSON));
|
||||
try (response) {
|
||||
assertEquals(200, response.getStatus());
|
||||
final Map<String, CheckStatus> res = parseCheckResponse(response);
|
||||
assertEquals(expected, res);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHttpResponseCodeSuccess() {
|
||||
final Map<String, CheckStatus> expected = Map.of(
|
||||
token(USER_1, day(10)), CheckStatus.INVALID,
|
||||
token(USER_1, day(20)), CheckStatus.MATCH,
|
||||
token(USER_2, day(10)), CheckStatus.NO_MATCH,
|
||||
token(USER_3, day(20)), CheckStatus.NO_MATCH,
|
||||
token(USER_3, day(10)), CheckStatus.INVALID
|
||||
);
|
||||
|
||||
CLOCK.setTimeMillis(day(25));
|
||||
|
||||
final AuthCheckRequest in = new AuthCheckRequest(E164_VALID, List.copyOf(expected.keySet()));
|
||||
|
||||
final Response response = RESOURCES.getJerseyTest()
|
||||
.target(pathPrefix + "/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity(in, MediaType.APPLICATION_JSON));
|
||||
|
||||
try (response) {
|
||||
assertEquals(200, response.getStatus());
|
||||
assertEquals(expected, parseCheckResponse(response));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHttpResponseCodeWhenInvalidNumber() {
|
||||
final AuthCheckRequest in = new AuthCheckRequest(E164_INVALID, Collections.singletonList("1"));
|
||||
final Response response = RESOURCES.getJerseyTest()
|
||||
.target(pathPrefix + "/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity(in, MediaType.APPLICATION_JSON));
|
||||
|
||||
try (response) {
|
||||
assertEquals(422, response.getStatus());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHttpResponseCodeWhenTooManyTokens() {
|
||||
final AuthCheckRequest inOkay = new AuthCheckRequest(E164_VALID, List.of(
|
||||
"1", "2", "3", "4", "5", "6", "7", "8", "9", "10"
|
||||
));
|
||||
final AuthCheckRequest inTooMany = new AuthCheckRequest(E164_VALID, List.of(
|
||||
"1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11"
|
||||
));
|
||||
final AuthCheckRequest inNoTokens = new AuthCheckRequest(E164_VALID, Collections.emptyList());
|
||||
|
||||
final Response responseOkay = RESOURCES.getJerseyTest()
|
||||
.target(pathPrefix + "/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity(inOkay, MediaType.APPLICATION_JSON));
|
||||
|
||||
final Response responseError1 = RESOURCES.getJerseyTest()
|
||||
.target(pathPrefix + "/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity(inTooMany, MediaType.APPLICATION_JSON));
|
||||
|
||||
final Response responseError2 = RESOURCES.getJerseyTest()
|
||||
.target(pathPrefix + "/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity(inNoTokens, MediaType.APPLICATION_JSON));
|
||||
|
||||
try (responseOkay; responseError1; responseError2) {
|
||||
assertEquals(200, responseOkay.getStatus());
|
||||
assertEquals(422, responseError1.getStatus());
|
||||
assertEquals(422, responseError2.getStatus());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHttpResponseCodeWhenPasswordsMissing() {
|
||||
final Response response = RESOURCES.getJerseyTest()
|
||||
.target(pathPrefix + "/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity("""
|
||||
{
|
||||
"number": "123"
|
||||
}
|
||||
""", MediaType.APPLICATION_JSON));
|
||||
|
||||
try (response) {
|
||||
assertEquals(422, response.getStatus());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHttpResponseCodeWhenNumberMissing() {
|
||||
final Response response = RESOURCES.getJerseyTest()
|
||||
.target(pathPrefix + "/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity("""
|
||||
{
|
||||
"passwords": ["aaa:bbb"]
|
||||
}
|
||||
""", MediaType.APPLICATION_JSON));
|
||||
|
||||
try (response) {
|
||||
assertEquals(422, response.getStatus());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHttpResponseCodeWhenExtraFields() {
|
||||
final Response response = RESOURCES.getJerseyTest()
|
||||
.target(pathPrefix + "/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity("""
|
||||
{
|
||||
"number": "+18005550123",
|
||||
"passwords": ["aaa:bbb"],
|
||||
"unexpected": "value"
|
||||
}
|
||||
""", MediaType.APPLICATION_JSON));
|
||||
|
||||
try (response) {
|
||||
assertEquals(200, response.getStatus());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAcceptsPasswordsOrTokens() {
|
||||
final Response passwordsResponse = RESOURCES.getJerseyTest()
|
||||
.target(pathPrefix + "/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity("""
|
||||
{
|
||||
"number": "+18005550123",
|
||||
"passwords": ["aaa:bbb"]
|
||||
}
|
||||
""", MediaType.APPLICATION_JSON));
|
||||
try (passwordsResponse) {
|
||||
assertEquals(200, passwordsResponse.getStatus());
|
||||
}
|
||||
|
||||
final Response tokensResponse = RESOURCES.getJerseyTest()
|
||||
.target(pathPrefix + "/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity("""
|
||||
{
|
||||
"number": "+18005550123",
|
||||
"tokens": ["aaa:bbb"]
|
||||
}
|
||||
""", MediaType.APPLICATION_JSON));
|
||||
try (tokensResponse) {
|
||||
assertEquals(200, tokensResponse.getStatus());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHttpResponseCodeWhenNotAJson() {
|
||||
final Response response = RESOURCES.getJerseyTest()
|
||||
.target(pathPrefix + "/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity("random text", MediaType.APPLICATION_JSON));
|
||||
|
||||
try (response) {
|
||||
assertEquals(400, response.getStatus());
|
||||
}
|
||||
}
|
||||
|
||||
private String token(final UUID uuid, final long timeMillis) {
|
||||
return token(credentials(uuid, timeMillis));
|
||||
}
|
||||
|
||||
private static String token(final ExternalServiceCredentials credentials) {
|
||||
return credentials.username() + ":" + credentials.password();
|
||||
}
|
||||
|
||||
private ExternalServiceCredentials credentials(final UUID uuid, final long timeMillis) {
|
||||
CLOCK.setTimeMillis(timeMillis);
|
||||
return CREDENTIAL_GENERATOR.generateForUuid(uuid);
|
||||
}
|
||||
|
||||
private static long day(final int n) {
|
||||
return TimeUnit.DAYS.toMillis(n);
|
||||
}
|
||||
|
||||
private static Account account(final UUID uuid) {
|
||||
final Account a = new Account();
|
||||
a.setUuid(uuid);
|
||||
return a;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,70 @@
|
|||
/*
|
||||
* Copyright 2023 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
package org.whispersystems.textsecuregcm.controllers;
|
||||
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
import static org.whispersystems.textsecuregcm.util.MockUtils.randomSecretBytes;
|
||||
|
||||
import io.dropwizard.auth.AuthValueFactoryProvider;
|
||||
import io.dropwizard.testing.junit5.DropwizardExtensionsSupport;
|
||||
import io.dropwizard.testing.junit5.ResourceExtension;
|
||||
import org.glassfish.jersey.test.grizzly.GrizzlyWebTestContainerFactory;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.whispersystems.textsecuregcm.auth.AuthenticatedDevice;
|
||||
import org.whispersystems.textsecuregcm.auth.ExternalServiceCredentials;
|
||||
import org.whispersystems.textsecuregcm.auth.ExternalServiceCredentialsGenerator;
|
||||
import org.whispersystems.textsecuregcm.configuration.SecureValueRecoveryConfiguration;
|
||||
import org.whispersystems.textsecuregcm.tests.util.AuthHelper;
|
||||
import org.whispersystems.textsecuregcm.util.MutableClock;
|
||||
import org.whispersystems.textsecuregcm.util.SystemMapper;
|
||||
import java.time.Instant;
|
||||
import java.util.HexFormat;
|
||||
|
||||
@ExtendWith(DropwizardExtensionsSupport.class)
|
||||
public class SecureValueRecoveryBControllerTest {
|
||||
|
||||
private static final SecureValueRecoveryConfiguration CFG = new SecureValueRecoveryConfiguration(
|
||||
"",
|
||||
randomSecretBytes(32),
|
||||
randomSecretBytes(32),
|
||||
null,
|
||||
null,
|
||||
null
|
||||
);
|
||||
|
||||
private static final MutableClock CLOCK = new MutableClock();
|
||||
|
||||
private static final ExternalServiceCredentialsGenerator CREDENTIAL_GENERATOR =
|
||||
SecureValueRecoveryBController.credentialsGenerator(CFG, CLOCK);
|
||||
|
||||
private static final SecureValueRecoveryBController CONTROLLER =
|
||||
new SecureValueRecoveryBController(CREDENTIAL_GENERATOR);
|
||||
|
||||
private static final ResourceExtension RESOURCES = ResourceExtension.builder()
|
||||
.addProvider(AuthHelper.getAuthFilter())
|
||||
.addProvider(new AuthValueFactoryProvider.Binder<>(AuthenticatedDevice.class))
|
||||
.setMapper(SystemMapper.jsonMapper())
|
||||
.setTestContainerFactory(new GrizzlyWebTestContainerFactory())
|
||||
.addResource(CONTROLLER)
|
||||
.build();
|
||||
|
||||
@Test
|
||||
public void testGetCredentials() {
|
||||
CLOCK.setTimeInstant(Instant.ofEpochSecond(123));
|
||||
final ExternalServiceCredentials creds = RESOURCES.getJerseyTest()
|
||||
.target("/v1/svrb/auth")
|
||||
.request()
|
||||
.header("Authorization", AuthHelper.getAuthHeader(AuthHelper.VALID_UUID, AuthHelper.VALID_PASSWORD))
|
||||
.get(ExternalServiceCredentials.class);
|
||||
|
||||
assertThat(HexFormat.of().parseHex(creds.username())).hasSize(16);
|
||||
System.out.println(creds.password());
|
||||
final String[] split = creds.password().split(":", 2);
|
||||
assertThat(Long.parseLong(split[0])).isEqualTo(123);
|
||||
}
|
||||
}
|
|
@ -1,324 +0,0 @@
|
|||
/*
|
||||
* Copyright 2023 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
package org.whispersystems.textsecuregcm.controllers;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
import io.dropwizard.testing.junit5.ResourceExtension;
|
||||
import jakarta.ws.rs.client.Entity;
|
||||
import jakarta.ws.rs.core.MediaType;
|
||||
import jakarta.ws.rs.core.Response;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.Mockito;
|
||||
import org.whispersystems.textsecuregcm.auth.ExternalServiceCredentials;
|
||||
import org.whispersystems.textsecuregcm.auth.ExternalServiceCredentialsGenerator;
|
||||
import org.whispersystems.textsecuregcm.entities.AuthCheckRequest;
|
||||
import org.whispersystems.textsecuregcm.storage.Account;
|
||||
import org.whispersystems.textsecuregcm.storage.AccountsManager;
|
||||
import org.whispersystems.textsecuregcm.util.MutableClock;
|
||||
import org.whispersystems.textsecuregcm.util.TestRandomUtil;
|
||||
|
||||
abstract class SecureValueRecoveryControllerBaseTest {
|
||||
|
||||
private static final UUID USER_1 = UUID.randomUUID();
|
||||
|
||||
private static final UUID USER_2 = UUID.randomUUID();
|
||||
|
||||
private static final UUID USER_3 = UUID.randomUUID();
|
||||
|
||||
private static final String E164_VALID = "+18005550123";
|
||||
|
||||
private static final String E164_INVALID = "1(800)555-0123";
|
||||
|
||||
private final String pathPrefix;
|
||||
private final ResourceExtension resourceExtension;
|
||||
private final AccountsManager mockAccountsManager;
|
||||
private final ExternalServiceCredentialsGenerator credentialsGenerator;
|
||||
private final MutableClock clock;
|
||||
|
||||
@BeforeEach
|
||||
public void before() throws Exception {
|
||||
Mockito.when(mockAccountsManager.getByE164(E164_VALID)).thenReturn(Optional.of(account(USER_1)));
|
||||
}
|
||||
|
||||
protected SecureValueRecoveryControllerBaseTest(
|
||||
final String pathPrefix,
|
||||
final AccountsManager mockAccountsManager,
|
||||
final MutableClock mutableClock,
|
||||
final ResourceExtension resourceExtension,
|
||||
final ExternalServiceCredentialsGenerator credentialsGenerator) {
|
||||
this.pathPrefix = pathPrefix;
|
||||
this.resourceExtension = resourceExtension;
|
||||
this.mockAccountsManager = mockAccountsManager;
|
||||
this.credentialsGenerator = credentialsGenerator;
|
||||
this.clock = mutableClock;
|
||||
}
|
||||
|
||||
enum CheckStatus {
|
||||
MATCH,
|
||||
NO_MATCH,
|
||||
INVALID
|
||||
}
|
||||
abstract Map<String, CheckStatus> parseCheckResponse(Response response);
|
||||
|
||||
@Test
|
||||
public void testOneMatch() throws Exception {
|
||||
validate(Map.of(
|
||||
token(USER_1, day(1)), CheckStatus.MATCH,
|
||||
token(USER_2, day(1)), CheckStatus.NO_MATCH,
|
||||
token(USER_3, day(1)), CheckStatus.NO_MATCH
|
||||
), day(2));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNoMatch() throws Exception {
|
||||
validate(Map.of(
|
||||
token(USER_2, day(1)), CheckStatus.NO_MATCH,
|
||||
token(USER_3, day(1)), CheckStatus.NO_MATCH
|
||||
), day(2));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSomeInvalid() throws Exception {
|
||||
final ExternalServiceCredentials user1Cred = credentials(USER_1, day(1));
|
||||
final ExternalServiceCredentials user2Cred = credentials(USER_2, day(1));
|
||||
final ExternalServiceCredentials user3Cred = credentials(USER_3, day(1));
|
||||
|
||||
final String fakeToken = token(new ExternalServiceCredentials(user2Cred.username(), user3Cred.password()));
|
||||
validate(Map.of(
|
||||
token(user1Cred), CheckStatus.MATCH,
|
||||
token(user2Cred), CheckStatus.NO_MATCH,
|
||||
fakeToken, CheckStatus.INVALID
|
||||
), day(2));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSomeExpired() throws Exception {
|
||||
validate(Map.of(
|
||||
token(USER_1, day(100)), CheckStatus.MATCH,
|
||||
token(USER_2, day(100)), CheckStatus.NO_MATCH,
|
||||
token(USER_3, day(10)), CheckStatus.INVALID,
|
||||
token(USER_3, day(20)), CheckStatus.INVALID
|
||||
), day(110));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSomeHaveNewerVersions() throws Exception {
|
||||
validate(Map.of(
|
||||
token(USER_1, day(10)), CheckStatus.INVALID,
|
||||
token(USER_1, day(20)), CheckStatus.MATCH,
|
||||
token(USER_2, day(10)), CheckStatus.NO_MATCH,
|
||||
token(USER_3, day(20)), CheckStatus.NO_MATCH,
|
||||
token(USER_3, day(10)), CheckStatus.INVALID
|
||||
), day(25));
|
||||
}
|
||||
|
||||
private void validate(
|
||||
final Map<String, CheckStatus> expected,
|
||||
final long nowMillis) throws Exception {
|
||||
clock.setTimeMillis(nowMillis);
|
||||
final AuthCheckRequest request = new AuthCheckRequest(E164_VALID, List.copyOf(expected.keySet()));
|
||||
final Response response = resourceExtension.getJerseyTest().target(pathPrefix + "/backup/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity(request, MediaType.APPLICATION_JSON));
|
||||
try (response) {
|
||||
assertEquals(200, response.getStatus());
|
||||
final Map<String, CheckStatus> res = parseCheckResponse(response);
|
||||
assertEquals(expected, res);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHttpResponseCodeSuccess() throws Exception {
|
||||
final Map<String, CheckStatus> expected = Map.of(
|
||||
token(USER_1, day(10)), CheckStatus.INVALID,
|
||||
token(USER_1, day(20)), CheckStatus.MATCH,
|
||||
token(USER_2, day(10)), CheckStatus.NO_MATCH,
|
||||
token(USER_3, day(20)), CheckStatus.NO_MATCH,
|
||||
token(USER_3, day(10)), CheckStatus.INVALID
|
||||
);
|
||||
|
||||
clock.setTimeMillis(day(25));
|
||||
|
||||
final AuthCheckRequest in = new AuthCheckRequest(E164_VALID, List.copyOf(expected.keySet()));
|
||||
|
||||
final Response response = resourceExtension.getJerseyTest()
|
||||
.target(pathPrefix + "/backup/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity(in, MediaType.APPLICATION_JSON));
|
||||
|
||||
try (response) {
|
||||
assertEquals(200, response.getStatus());
|
||||
assertEquals(expected, parseCheckResponse(response));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHttpResponseCodeWhenInvalidNumber() throws Exception {
|
||||
final AuthCheckRequest in = new AuthCheckRequest(E164_INVALID, Collections.singletonList("1"));
|
||||
final Response response = resourceExtension.getJerseyTest()
|
||||
.target(pathPrefix + "/backup/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity(in, MediaType.APPLICATION_JSON));
|
||||
|
||||
try (response) {
|
||||
assertEquals(422, response.getStatus());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHttpResponseCodeWhenTooManyTokens() throws Exception {
|
||||
final AuthCheckRequest inOkay = new AuthCheckRequest(E164_VALID, List.of(
|
||||
"1", "2", "3", "4", "5", "6", "7", "8", "9", "10"
|
||||
));
|
||||
final AuthCheckRequest inTooMany = new AuthCheckRequest(E164_VALID, List.of(
|
||||
"1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11"
|
||||
));
|
||||
final AuthCheckRequest inNoTokens = new AuthCheckRequest(E164_VALID, Collections.emptyList());
|
||||
|
||||
final Response responseOkay = resourceExtension.getJerseyTest()
|
||||
.target(pathPrefix + "/backup/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity(inOkay, MediaType.APPLICATION_JSON));
|
||||
|
||||
final Response responseError1 = resourceExtension.getJerseyTest()
|
||||
.target(pathPrefix + "/backup/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity(inTooMany, MediaType.APPLICATION_JSON));
|
||||
|
||||
final Response responseError2 = resourceExtension.getJerseyTest()
|
||||
.target(pathPrefix + "/backup/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity(inNoTokens, MediaType.APPLICATION_JSON));
|
||||
|
||||
try (responseOkay; responseError1; responseError2) {
|
||||
assertEquals(200, responseOkay.getStatus());
|
||||
assertEquals(422, responseError1.getStatus());
|
||||
assertEquals(422, responseError2.getStatus());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHttpResponseCodeWhenPasswordsMissing() throws Exception {
|
||||
final Response response = resourceExtension.getJerseyTest()
|
||||
.target(pathPrefix + "/backup/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity("""
|
||||
{
|
||||
"number": "123"
|
||||
}
|
||||
""", MediaType.APPLICATION_JSON));
|
||||
|
||||
try (response) {
|
||||
assertEquals(422, response.getStatus());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHttpResponseCodeWhenNumberMissing() throws Exception {
|
||||
final Response response = resourceExtension.getJerseyTest()
|
||||
.target(pathPrefix + "/backup/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity("""
|
||||
{
|
||||
"passwords": ["aaa:bbb"]
|
||||
}
|
||||
""", MediaType.APPLICATION_JSON));
|
||||
|
||||
try (response) {
|
||||
assertEquals(422, response.getStatus());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHttpResponseCodeWhenExtraFields() throws Exception {
|
||||
final Response response = resourceExtension.getJerseyTest()
|
||||
.target(pathPrefix + "/backup/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity("""
|
||||
{
|
||||
"number": "+18005550123",
|
||||
"passwords": ["aaa:bbb"],
|
||||
"unexpected": "value"
|
||||
}
|
||||
""", MediaType.APPLICATION_JSON));
|
||||
|
||||
try (response) {
|
||||
assertEquals(200, response.getStatus());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAcceptsPasswordsOrTokens() {
|
||||
final Response passwordsResponse = resourceExtension.getJerseyTest()
|
||||
.target(pathPrefix + "/backup/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity("""
|
||||
{
|
||||
"number": "+18005550123",
|
||||
"passwords": ["aaa:bbb"]
|
||||
}
|
||||
""", MediaType.APPLICATION_JSON));
|
||||
try (passwordsResponse) {
|
||||
assertEquals(200, passwordsResponse.getStatus());
|
||||
}
|
||||
|
||||
final Response tokensResponse = resourceExtension.getJerseyTest()
|
||||
.target(pathPrefix + "/backup/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity("""
|
||||
{
|
||||
"number": "+18005550123",
|
||||
"tokens": ["aaa:bbb"]
|
||||
}
|
||||
""", MediaType.APPLICATION_JSON));
|
||||
try (tokensResponse) {
|
||||
assertEquals(200, tokensResponse.getStatus());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHttpResponseCodeWhenNotAJson() throws Exception {
|
||||
final Response response = resourceExtension.getJerseyTest()
|
||||
.target(pathPrefix + "/backup/auth/check")
|
||||
.request()
|
||||
.post(Entity.entity("random text", MediaType.APPLICATION_JSON));
|
||||
|
||||
try (response) {
|
||||
assertEquals(400, response.getStatus());
|
||||
}
|
||||
}
|
||||
|
||||
String token(final UUID uuid, final long timeMillis) {
|
||||
return token(credentials(uuid, timeMillis));
|
||||
}
|
||||
|
||||
static String token(final ExternalServiceCredentials credentials) {
|
||||
return credentials.username() + ":" + credentials.password();
|
||||
}
|
||||
|
||||
private ExternalServiceCredentials credentials(final UUID uuid, final long timeMillis) {
|
||||
clock.setTimeMillis(timeMillis);
|
||||
return credentialsGenerator.generateForUuid(uuid);
|
||||
}
|
||||
|
||||
static long day(final int n) {
|
||||
return TimeUnit.DAYS.toMillis(n);
|
||||
}
|
||||
|
||||
private static Account account(final UUID uuid) {
|
||||
final Account a = new Account();
|
||||
a.setUuid(uuid);
|
||||
return a;
|
||||
}
|
||||
}
|
|
@ -992,6 +992,44 @@ class SubscriptionControllerTest {
|
|||
verify(PLAY_MANAGER, times(1)).cancelAllActiveSubscriptions(oldPurchaseToken);
|
||||
}
|
||||
|
||||
@Test
|
||||
void createReceiptChargeFailure() throws InvalidInputException, VerificationFailedException {
|
||||
final byte[] subscriberUserAndKey = new byte[32];
|
||||
Arrays.fill(subscriberUserAndKey, (byte) 1);
|
||||
final String subscriberId = Base64.getEncoder().encodeToString(subscriberUserAndKey);
|
||||
|
||||
when(CLOCK.instant()).thenReturn(Instant.now());
|
||||
when(SUBSCRIPTIONS.get(any(), any()))
|
||||
.thenReturn(CompletableFuture.completedFuture(Subscriptions.GetResult.found(Subscriptions.Record.from(
|
||||
Arrays.copyOfRange(subscriberUserAndKey, 0, 16),
|
||||
Map.of(Subscriptions.KEY_PASSWORD, b(new byte[16]),
|
||||
Subscriptions.KEY_CREATED_AT, n(Instant.now().getEpochSecond()),
|
||||
Subscriptions.KEY_ACCESSED_AT, n(Instant.now().getEpochSecond()),
|
||||
Subscriptions.KEY_PROCESSOR_ID_CUSTOMER_ID,
|
||||
b(new ProcessorCustomer("customer", PaymentProvider.STRIPE).toDynamoBytes()),
|
||||
Subscriptions.KEY_SUBSCRIPTION_ID, s("subscriptionId"))))));
|
||||
when(STRIPE_MANAGER.getReceiptItem(any()))
|
||||
.thenReturn(CompletableFuture.failedFuture(new SubscriptionException.ChargeFailurePaymentRequired(
|
||||
PaymentProvider.STRIPE,
|
||||
new ChargeFailure("card_declined", "Insufficient funds", null, null, null))));
|
||||
|
||||
final ReceiptCredentialRequest receiptRequest = new ClientZkReceiptOperations(
|
||||
ServerSecretParams.generate().getPublicParams()).createReceiptCredentialRequestContext(
|
||||
new ReceiptSerial(new byte[ReceiptSerial.SIZE])).getRequest();
|
||||
final Response response = RESOURCE_EXTENSION
|
||||
.target(String.format("/v1/subscription/%s/receipt_credentials", subscriberId))
|
||||
.request()
|
||||
.post(Entity.json(new SubscriptionController.GetReceiptCredentialsRequest(receiptRequest.serialize())));
|
||||
|
||||
assertThat(response.getStatus()).isEqualTo(402);
|
||||
final Map responseMap = response.readEntity(Map.class);
|
||||
assertThat(responseMap.get("processor")).isEqualTo("STRIPE");
|
||||
assertThat(responseMap.get("chargeFailure")).asInstanceOf(
|
||||
InstanceOfAssertFactories.map(String.class, Object.class))
|
||||
.extracting("code")
|
||||
.isEqualTo("card_declined");
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@CsvSource({"5, P45D", "201, P13D"})
|
||||
public void createReceiptCredential(long level, Duration expectedExpirationWindow)
|
||||
|
|
|
@ -8,6 +8,7 @@ package org.whispersystems.textsecuregcm.redis;
|
|||
import static org.junit.jupiter.api.Assumptions.assumeFalse;
|
||||
|
||||
import io.github.resilience4j.circuitbreaker.CallNotPermittedException;
|
||||
import io.lettuce.core.FlushMode;
|
||||
import io.lettuce.core.RedisClient;
|
||||
import io.lettuce.core.RedisException;
|
||||
import io.lettuce.core.RedisURI;
|
||||
|
@ -21,7 +22,6 @@ import java.net.ServerSocket;
|
|||
import java.time.Duration;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import org.junit.jupiter.api.extension.AfterAllCallback;
|
||||
import org.junit.jupiter.api.extension.AfterEachCallback;
|
||||
import org.junit.jupiter.api.extension.BeforeAllCallback;
|
||||
import org.junit.jupiter.api.extension.BeforeEachCallback;
|
||||
|
@ -32,18 +32,18 @@ import org.whispersystems.textsecuregcm.util.RedisClusterUtil;
|
|||
import redis.embedded.RedisServer;
|
||||
import redis.embedded.exceptions.EmbeddedRedisException;
|
||||
|
||||
public class RedisClusterExtension implements BeforeAllCallback, BeforeEachCallback, AfterAllCallback,
|
||||
AfterEachCallback {
|
||||
public class RedisClusterExtension implements BeforeAllCallback, BeforeEachCallback, AfterEachCallback,
|
||||
ExtensionContext.Store.CloseableResource {
|
||||
|
||||
private static final Duration DEFAULT_TIMEOUT = Duration.ofSeconds(2);
|
||||
private static final int NODE_COUNT = 2;
|
||||
|
||||
private static final RedisServer[] CLUSTER_NODES = new RedisServer[NODE_COUNT];
|
||||
private static ClientResources redisClientResources;
|
||||
|
||||
private final Duration timeout;
|
||||
private final RetryConfiguration retryConfiguration;
|
||||
private FaultTolerantRedisClusterClient redisCluster;
|
||||
private ClientResources redisClientResources;
|
||||
|
||||
public RedisClusterExtension(final Duration timeout, final RetryConfiguration retryConfiguration) {
|
||||
this.timeout = timeout;
|
||||
|
@ -56,35 +56,42 @@ public class RedisClusterExtension implements BeforeAllCallback, BeforeEachCallb
|
|||
}
|
||||
|
||||
@Override
|
||||
public void afterAll(final ExtensionContext context) throws Exception {
|
||||
for (final RedisServer node : CLUSTER_NODES) {
|
||||
node.stop();
|
||||
public void close() throws Throwable {
|
||||
if (redisClientResources != null) {
|
||||
redisClientResources.shutdown().get();
|
||||
|
||||
for (final RedisServer node : CLUSTER_NODES) {
|
||||
node.stop();
|
||||
}
|
||||
}
|
||||
|
||||
redisClientResources = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterEach(final ExtensionContext context) throws Exception {
|
||||
redisCluster.shutdown();
|
||||
redisClientResources.shutdown().get();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void beforeAll(final ExtensionContext context) throws Exception {
|
||||
assumeFalse(System.getProperty("os.name").equalsIgnoreCase("windows"));
|
||||
|
||||
for (int i = 0; i < NODE_COUNT; i++) {
|
||||
// We're occasionally seeing redis server startup failing due to the bind address being already in use.
|
||||
// To mitigate that, we're going to just retry a couple of times before failing the test.
|
||||
CLUSTER_NODES[i] = startWithRetries(3);
|
||||
}
|
||||
if (redisClientResources == null) {
|
||||
redisClientResources = ClientResources.builder().build();
|
||||
|
||||
assembleCluster(CLUSTER_NODES);
|
||||
for (int i = 0; i < NODE_COUNT; i++) {
|
||||
// We're occasionally seeing redis server startup failing due to the bind address being already in use.
|
||||
// To mitigate that, we're going to just retry a couple of times before failing the test.
|
||||
CLUSTER_NODES[i] = startWithRetries(3);
|
||||
}
|
||||
|
||||
assembleCluster(CLUSTER_NODES);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void beforeEach(final ExtensionContext context) throws Exception {
|
||||
|
||||
redisClientResources = ClientResources.builder().build();
|
||||
final CircuitBreakerConfiguration circuitBreakerConfig = new CircuitBreakerConfiguration();
|
||||
circuitBreakerConfig.setWaitDurationInOpenState(Duration.ofMillis(500));
|
||||
redisCluster = new FaultTolerantRedisClusterClient("test-cluster",
|
||||
|
@ -120,7 +127,7 @@ public class RedisClusterExtension implements BeforeAllCallback, BeforeEachCallb
|
|||
}
|
||||
});
|
||||
|
||||
redisCluster.useCluster(connection -> connection.sync().flushall());
|
||||
redisCluster.useCluster(connection -> connection.sync().flushall(FlushMode.SYNC));
|
||||
}
|
||||
|
||||
public static List<RedisURI> getRedisURIs() {
|
||||
|
|
|
@ -7,13 +7,12 @@ package org.whispersystems.textsecuregcm.redis;
|
|||
|
||||
import static org.junit.jupiter.api.Assumptions.assumeFalse;
|
||||
|
||||
import io.lettuce.core.FlushMode;
|
||||
import io.lettuce.core.RedisURI;
|
||||
import io.lettuce.core.resource.ClientResources;
|
||||
import java.io.IOException;
|
||||
import java.net.ServerSocket;
|
||||
import java.time.Duration;
|
||||
import io.lettuce.core.resource.ClientResources;
|
||||
import org.junit.jupiter.api.extension.AfterAllCallback;
|
||||
import org.junit.jupiter.api.extension.AfterEachCallback;
|
||||
import org.junit.jupiter.api.extension.BeforeAllCallback;
|
||||
import org.junit.jupiter.api.extension.BeforeEachCallback;
|
||||
import org.junit.jupiter.api.extension.ExtensionContext;
|
||||
|
@ -22,11 +21,12 @@ import org.whispersystems.textsecuregcm.configuration.RetryConfiguration;
|
|||
import redis.embedded.RedisServer;
|
||||
import redis.embedded.exceptions.EmbeddedRedisException;
|
||||
|
||||
public class RedisServerExtension implements BeforeAllCallback, BeforeEachCallback, AfterAllCallback, AfterEachCallback {
|
||||
public class RedisServerExtension implements BeforeAllCallback, BeforeEachCallback, ExtensionContext.Store.CloseableResource {
|
||||
|
||||
private static RedisServer redisServer;
|
||||
private static ClientResources redisClientResources;
|
||||
|
||||
private FaultTolerantRedisClient faultTolerantRedisClient;
|
||||
private ClientResources redisClientResources;
|
||||
|
||||
public static class RedisServerExtensionBuilder {
|
||||
|
||||
|
@ -46,14 +46,18 @@ public class RedisServerExtension implements BeforeAllCallback, BeforeEachCallba
|
|||
public void beforeAll(final ExtensionContext context) throws Exception {
|
||||
assumeFalse(System.getProperty("os.name").equalsIgnoreCase("windows"));
|
||||
|
||||
redisServer = RedisServer.builder()
|
||||
.setting("appendonly no")
|
||||
.setting("save \"\"")
|
||||
.setting("dir " + System.getProperty("java.io.tmpdir"))
|
||||
.port(getAvailablePort())
|
||||
.build();
|
||||
if (redisServer == null) {
|
||||
redisServer = RedisServer.builder()
|
||||
.setting("appendonly no")
|
||||
.setting("save \"\"")
|
||||
.setting("dir " + System.getProperty("java.io.tmpdir"))
|
||||
.port(getAvailablePort())
|
||||
.build();
|
||||
|
||||
startWithRetries(3);
|
||||
redisClientResources = ClientResources.builder().build();
|
||||
|
||||
startWithRetries(3);
|
||||
}
|
||||
}
|
||||
|
||||
public static RedisURI getRedisURI() {
|
||||
|
@ -62,7 +66,6 @@ public class RedisServerExtension implements BeforeAllCallback, BeforeEachCallba
|
|||
|
||||
@Override
|
||||
public void beforeEach(final ExtensionContext context) {
|
||||
redisClientResources = ClientResources.builder().build();
|
||||
final CircuitBreakerConfiguration circuitBreakerConfig = new CircuitBreakerConfiguration();
|
||||
circuitBreakerConfig.setWaitDurationInOpenState(Duration.ofMillis(500));
|
||||
faultTolerantRedisClient = new FaultTolerantRedisClient("test-redis-client",
|
||||
|
@ -72,19 +75,18 @@ public class RedisServerExtension implements BeforeAllCallback, BeforeEachCallba
|
|||
circuitBreakerConfig,
|
||||
new RetryConfiguration());
|
||||
|
||||
faultTolerantRedisClient.useConnection(connection -> connection.sync().flushall());
|
||||
faultTolerantRedisClient.useConnection(connection -> connection.sync().flushall(FlushMode.SYNC));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterEach(final ExtensionContext context) throws InterruptedException {
|
||||
redisClientResources.shutdown().await();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterAll(final ExtensionContext context) {
|
||||
public void close() throws Throwable {
|
||||
if (redisServer != null) {
|
||||
redisClientResources.shutdown().await();
|
||||
redisServer.stop();
|
||||
}
|
||||
|
||||
redisClientResources = null;
|
||||
redisServer = null;
|
||||
}
|
||||
|
||||
public FaultTolerantRedisClient getRedisClient() {
|
||||
|
|
|
@ -32,7 +32,7 @@ import org.junit.jupiter.api.Test;
|
|||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
import org.whispersystems.textsecuregcm.auth.ExternalServiceCredentials;
|
||||
import org.whispersystems.textsecuregcm.auth.ExternalServiceCredentialsGenerator;
|
||||
import org.whispersystems.textsecuregcm.configuration.SecureValueRecovery2Configuration;
|
||||
import org.whispersystems.textsecuregcm.configuration.SecureValueRecoveryConfiguration;
|
||||
|
||||
class SecureValueRecovery2ClientTest {
|
||||
|
||||
|
@ -55,7 +55,7 @@ class SecureValueRecovery2ClientTest {
|
|||
httpExecutor = Executors.newSingleThreadExecutor();
|
||||
retryExecutor = Executors.newSingleThreadScheduledExecutor();
|
||||
|
||||
final SecureValueRecovery2Configuration config = new SecureValueRecovery2Configuration(
|
||||
final SecureValueRecoveryConfiguration config = new SecureValueRecoveryConfiguration(
|
||||
"http://localhost:" + wireMock.getPort(),
|
||||
randomSecretBytes(32),
|
||||
randomSecretBytes(32),
|
||||
|
|
|
@ -44,6 +44,7 @@ import org.whispersystems.textsecuregcm.entities.ApnRegistrationId;
|
|||
import org.whispersystems.textsecuregcm.entities.ECSignedPreKey;
|
||||
import org.whispersystems.textsecuregcm.entities.GcmRegistrationId;
|
||||
import org.whispersystems.textsecuregcm.entities.KEMSignedPreKey;
|
||||
import org.whispersystems.textsecuregcm.experiment.ExperimentEnrollmentManager;
|
||||
import org.whispersystems.textsecuregcm.identity.IdentityType;
|
||||
import org.whispersystems.textsecuregcm.redis.FaultTolerantRedisClient;
|
||||
import org.whispersystems.textsecuregcm.redis.RedisClusterExtension;
|
||||
|
@ -66,6 +67,7 @@ public class AccountCreationDeletionIntegrationTest {
|
|||
DynamoDbExtensionSchema.Tables.USERNAMES,
|
||||
DynamoDbExtensionSchema.Tables.EC_KEYS,
|
||||
DynamoDbExtensionSchema.Tables.PQ_KEYS,
|
||||
DynamoDbExtensionSchema.Tables.PAGED_PQ_KEYS,
|
||||
DynamoDbExtensionSchema.Tables.REPEATED_USE_EC_SIGNED_PRE_KEYS,
|
||||
DynamoDbExtensionSchema.Tables.REPEATED_USE_KEM_SIGNED_PRE_KEYS);
|
||||
|
||||
|
@ -105,7 +107,8 @@ public class AccountCreationDeletionIntegrationTest {
|
|||
new RepeatedUseECSignedPreKeyStore(dynamoDbAsyncClient,
|
||||
DynamoDbExtensionSchema.Tables.REPEATED_USE_EC_SIGNED_PRE_KEYS.tableName()),
|
||||
new RepeatedUseKEMSignedPreKeyStore(dynamoDbAsyncClient,
|
||||
DynamoDbExtensionSchema.Tables.REPEATED_USE_KEM_SIGNED_PRE_KEYS.tableName()));
|
||||
DynamoDbExtensionSchema.Tables.REPEATED_USE_KEM_SIGNED_PRE_KEYS.tableName()),
|
||||
mock(ExperimentEnrollmentManager.class));
|
||||
|
||||
final ClientPublicKeys clientPublicKeys = new ClientPublicKeys(DYNAMO_DB_EXTENSION.getDynamoDbAsyncClient(),
|
||||
DynamoDbExtensionSchema.Tables.CLIENT_PUBLIC_KEYS.tableName());
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.whispersystems.textsecuregcm.controllers.MismatchedDevicesException;
|
|||
import org.whispersystems.textsecuregcm.entities.AccountAttributes;
|
||||
import org.whispersystems.textsecuregcm.entities.ECSignedPreKey;
|
||||
import org.whispersystems.textsecuregcm.entities.KEMSignedPreKey;
|
||||
import org.whispersystems.textsecuregcm.experiment.ExperimentEnrollmentManager;
|
||||
import org.whispersystems.textsecuregcm.identity.IdentityType;
|
||||
import org.whispersystems.textsecuregcm.redis.FaultTolerantRedisClient;
|
||||
import org.whispersystems.textsecuregcm.redis.RedisClusterExtension;
|
||||
|
@ -60,6 +61,7 @@ class AccountsManagerChangeNumberIntegrationTest {
|
|||
Tables.USERNAMES,
|
||||
Tables.EC_KEYS,
|
||||
Tables.PQ_KEYS,
|
||||
Tables.PAGED_PQ_KEYS,
|
||||
Tables.REPEATED_USE_EC_SIGNED_PRE_KEYS,
|
||||
Tables.REPEATED_USE_KEM_SIGNED_PRE_KEYS);
|
||||
|
||||
|
@ -96,7 +98,8 @@ class AccountsManagerChangeNumberIntegrationTest {
|
|||
new RepeatedUseECSignedPreKeyStore(dynamoDbAsyncClient,
|
||||
DynamoDbExtensionSchema.Tables.REPEATED_USE_EC_SIGNED_PRE_KEYS.tableName()),
|
||||
new RepeatedUseKEMSignedPreKeyStore(dynamoDbAsyncClient,
|
||||
DynamoDbExtensionSchema.Tables.REPEATED_USE_KEM_SIGNED_PRE_KEYS.tableName()));
|
||||
DynamoDbExtensionSchema.Tables.REPEATED_USE_KEM_SIGNED_PRE_KEYS.tableName()),
|
||||
mock(ExperimentEnrollmentManager.class));
|
||||
|
||||
final ClientPublicKeys clientPublicKeys = new ClientPublicKeys(DYNAMO_DB_EXTENSION.getDynamoDbAsyncClient(),
|
||||
DynamoDbExtensionSchema.Tables.CLIENT_PUBLIC_KEYS.tableName());
|
||||
|
|
|
@ -73,6 +73,7 @@ class AccountsManagerConcurrentModificationIntegrationTest {
|
|||
Tables.DELETED_ACCOUNTS,
|
||||
Tables.EC_KEYS,
|
||||
Tables.PQ_KEYS,
|
||||
Tables.PAGED_PQ_KEYS,
|
||||
Tables.REPEATED_USE_EC_SIGNED_PRE_KEYS,
|
||||
Tables.REPEATED_USE_KEM_SIGNED_PRE_KEYS);
|
||||
|
||||
|
|
|
@ -38,6 +38,7 @@ import org.junit.jupiter.api.extension.RegisterExtension;
|
|||
import org.mockito.Mockito;
|
||||
import org.whispersystems.textsecuregcm.auth.DisconnectionRequestManager;
|
||||
import org.whispersystems.textsecuregcm.configuration.dynamic.DynamicConfiguration;
|
||||
import org.whispersystems.textsecuregcm.experiment.ExperimentEnrollmentManager;
|
||||
import org.whispersystems.textsecuregcm.redis.FaultTolerantRedisClient;
|
||||
import org.whispersystems.textsecuregcm.redis.RedisClusterExtension;
|
||||
import org.whispersystems.textsecuregcm.securestorage.SecureStorageClient;
|
||||
|
@ -73,6 +74,7 @@ class AccountsManagerUsernameIntegrationTest {
|
|||
Tables.PNI_ASSIGNMENTS,
|
||||
Tables.EC_KEYS,
|
||||
Tables.PQ_KEYS,
|
||||
Tables.PAGED_PQ_KEYS,
|
||||
Tables.REPEATED_USE_EC_SIGNED_PRE_KEYS,
|
||||
Tables.REPEATED_USE_KEM_SIGNED_PRE_KEYS);
|
||||
|
||||
|
@ -109,7 +111,8 @@ class AccountsManagerUsernameIntegrationTest {
|
|||
new RepeatedUseECSignedPreKeyStore(dynamoDbAsyncClient,
|
||||
DynamoDbExtensionSchema.Tables.REPEATED_USE_EC_SIGNED_PRE_KEYS.tableName()),
|
||||
new RepeatedUseKEMSignedPreKeyStore(dynamoDbAsyncClient,
|
||||
DynamoDbExtensionSchema.Tables.REPEATED_USE_KEM_SIGNED_PRE_KEYS.tableName()));
|
||||
DynamoDbExtensionSchema.Tables.REPEATED_USE_KEM_SIGNED_PRE_KEYS.tableName()),
|
||||
mock(ExperimentEnrollmentManager.class));
|
||||
|
||||
accounts = Mockito.spy(new Accounts(
|
||||
Clock.systemUTC(),
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.signal.libsignal.protocol.ecc.ECKeyPair;
|
|||
import org.whispersystems.textsecuregcm.auth.DisconnectionRequestManager;
|
||||
import org.whispersystems.textsecuregcm.configuration.dynamic.DynamicConfiguration;
|
||||
import org.whispersystems.textsecuregcm.entities.DeviceInfo;
|
||||
import org.whispersystems.textsecuregcm.experiment.ExperimentEnrollmentManager;
|
||||
import org.whispersystems.textsecuregcm.identity.IdentityType;
|
||||
import org.whispersystems.textsecuregcm.redis.RedisClusterExtension;
|
||||
import org.whispersystems.textsecuregcm.redis.RedisServerExtension;
|
||||
|
@ -62,6 +63,7 @@ public class AddRemoveDeviceIntegrationTest {
|
|||
DynamoDbExtensionSchema.Tables.USERNAMES,
|
||||
DynamoDbExtensionSchema.Tables.EC_KEYS,
|
||||
DynamoDbExtensionSchema.Tables.PQ_KEYS,
|
||||
DynamoDbExtensionSchema.Tables.PAGED_PQ_KEYS,
|
||||
DynamoDbExtensionSchema.Tables.REPEATED_USE_EC_SIGNED_PRE_KEYS,
|
||||
DynamoDbExtensionSchema.Tables.REPEATED_USE_KEM_SIGNED_PRE_KEYS);
|
||||
|
||||
|
@ -104,7 +106,8 @@ public class AddRemoveDeviceIntegrationTest {
|
|||
new RepeatedUseECSignedPreKeyStore(dynamoDbAsyncClient,
|
||||
DynamoDbExtensionSchema.Tables.REPEATED_USE_EC_SIGNED_PRE_KEYS.tableName()),
|
||||
new RepeatedUseKEMSignedPreKeyStore(dynamoDbAsyncClient,
|
||||
DynamoDbExtensionSchema.Tables.REPEATED_USE_KEM_SIGNED_PRE_KEYS.tableName()));
|
||||
DynamoDbExtensionSchema.Tables.REPEATED_USE_KEM_SIGNED_PRE_KEYS.tableName()),
|
||||
mock(ExperimentEnrollmentManager.class));
|
||||
|
||||
final ClientPublicKeys clientPublicKeys = new ClientPublicKeys(DYNAMO_DB_EXTENSION.getDynamoDbAsyncClient(),
|
||||
DynamoDbExtensionSchema.Tables.CLIENT_PUBLIC_KEYS.tableName());
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
/*
|
||||
* Copyright 2025 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
package org.whispersystems.textsecuregcm.storage;
|
||||
|
||||
import com.apple.foundationdb.Database;
|
||||
import com.apple.foundationdb.FDB;
|
||||
import java.io.IOException;
|
||||
|
||||
interface FoundationDbDatabaseLifecycleManager {
|
||||
|
||||
void initializeDatabase(final FDB fdb) throws IOException;
|
||||
|
||||
Database getDatabase();
|
||||
|
||||
void closeDatabase();
|
||||
}
|
|
@ -0,0 +1,43 @@
|
|||
/*
|
||||
* Copyright 2025 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
package org.whispersystems.textsecuregcm.storage;
|
||||
|
||||
import com.apple.foundationdb.Database;
|
||||
import com.apple.foundationdb.FDB;
|
||||
import java.io.IOException;
|
||||
import org.junit.jupiter.api.extension.BeforeAllCallback;
|
||||
import org.junit.jupiter.api.extension.ExtensionContext;
|
||||
|
||||
class FoundationDbExtension implements BeforeAllCallback, ExtensionContext.Store.CloseableResource {
|
||||
|
||||
private static FoundationDbDatabaseLifecycleManager databaseLifecycleManager;
|
||||
|
||||
@Override
|
||||
public void beforeAll(final ExtensionContext context) throws IOException {
|
||||
if (databaseLifecycleManager == null) {
|
||||
final String serviceContainerName = System.getProperty("foundationDb.serviceContainerName");
|
||||
|
||||
databaseLifecycleManager = serviceContainerName != null
|
||||
? new ServiceContainerFoundationDbDatabaseLifecycleManager(serviceContainerName)
|
||||
: new TestcontainersFoundationDbDatabaseLifecycleManager();
|
||||
|
||||
databaseLifecycleManager.initializeDatabase(FDB.selectAPIVersion(FoundationDbVersion.getFoundationDbApiVersion()));
|
||||
|
||||
context.getRoot().getStore(ExtensionContext.Namespace.GLOBAL).put(getClass().getName(), this);
|
||||
}
|
||||
}
|
||||
|
||||
public Database getDatabase() {
|
||||
return databaseLifecycleManager.getDatabase();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws Throwable {
|
||||
if (databaseLifecycleManager != null) {
|
||||
databaseLifecycleManager.closeDatabase();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
/*
|
||||
* Copyright 2025 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
package org.whispersystems.textsecuregcm.storage;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
import org.whispersystems.textsecuregcm.util.TestRandomUtil;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
|
||||
|
||||
public class FoundationDbTest {
|
||||
|
||||
@RegisterExtension
|
||||
static FoundationDbExtension FOUNDATION_DB_EXTENSION = new FoundationDbExtension();
|
||||
|
||||
@Test
|
||||
void setGetValue() {
|
||||
final byte[] key = "test".getBytes(StandardCharsets.UTF_8);
|
||||
final byte[] value = TestRandomUtil.nextBytes(16);
|
||||
|
||||
FOUNDATION_DB_EXTENSION.getDatabase().run(transaction -> {
|
||||
transaction.set(key, value);
|
||||
return null;
|
||||
});
|
||||
|
||||
final byte[] retrievedValue = FOUNDATION_DB_EXTENSION.getDatabase().run(transaction -> transaction.get(key).join());
|
||||
|
||||
assertArrayEquals(value, retrievedValue);
|
||||
}
|
||||
}
|
|
@ -8,18 +8,26 @@ package org.whispersystems.textsecuregcm.storage;
|
|||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.reset;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.ValueSource;
|
||||
import org.signal.libsignal.protocol.ecc.Curve;
|
||||
import org.signal.libsignal.protocol.ecc.ECKeyPair;
|
||||
import org.whispersystems.textsecuregcm.entities.ECPreKey;
|
||||
import org.whispersystems.textsecuregcm.entities.ECSignedPreKey;
|
||||
import org.whispersystems.textsecuregcm.entities.KEMSignedPreKey;
|
||||
import org.whispersystems.textsecuregcm.experiment.ExperimentEnrollmentManager;
|
||||
import org.whispersystems.textsecuregcm.storage.DynamoDbExtensionSchema.Tables;
|
||||
import org.whispersystems.textsecuregcm.tests.util.KeysHelper;
|
||||
import software.amazon.awssdk.services.dynamodb.DynamoDbAsyncClient;
|
||||
|
@ -27,10 +35,15 @@ import software.amazon.awssdk.services.dynamodb.DynamoDbAsyncClient;
|
|||
class KeysManagerTest {
|
||||
|
||||
private KeysManager keysManager;
|
||||
private ExperimentEnrollmentManager experimentEnrollmentManager;
|
||||
|
||||
private SingleUseKEMPreKeyStore singleUseKEMPreKeyStore;
|
||||
private PagedSingleUseKEMPreKeyStore pagedSingleUseKEMPreKeyStore;
|
||||
|
||||
@RegisterExtension
|
||||
static final DynamoDbExtension DYNAMO_DB_EXTENSION = new DynamoDbExtension(
|
||||
Tables.EC_KEYS, Tables.PQ_KEYS, Tables.REPEATED_USE_EC_SIGNED_PRE_KEYS, Tables.REPEATED_USE_KEM_SIGNED_PRE_KEYS);
|
||||
Tables.EC_KEYS, Tables.PQ_KEYS, Tables.PAGED_PQ_KEYS,
|
||||
Tables.REPEATED_USE_EC_SIGNED_PRE_KEYS, Tables.REPEATED_USE_KEM_SIGNED_PRE_KEYS);
|
||||
|
||||
@RegisterExtension
|
||||
static final S3LocalStackExtension S3_EXTENSION = new S3LocalStackExtension("testbucket");
|
||||
|
@ -43,15 +56,20 @@ class KeysManagerTest {
|
|||
@BeforeEach
|
||||
void setup() {
|
||||
final DynamoDbAsyncClient dynamoDbAsyncClient = DYNAMO_DB_EXTENSION.getDynamoDbAsyncClient();
|
||||
experimentEnrollmentManager = mock(ExperimentEnrollmentManager.class);
|
||||
singleUseKEMPreKeyStore = new SingleUseKEMPreKeyStore(dynamoDbAsyncClient, Tables.PQ_KEYS.tableName());
|
||||
pagedSingleUseKEMPreKeyStore = new PagedSingleUseKEMPreKeyStore(dynamoDbAsyncClient,
|
||||
S3_EXTENSION.getS3Client(),
|
||||
DynamoDbExtensionSchema.Tables.PAGED_PQ_KEYS.tableName(),
|
||||
S3_EXTENSION.getBucketName());
|
||||
|
||||
keysManager = new KeysManager(
|
||||
new SingleUseECPreKeyStore(dynamoDbAsyncClient, Tables.EC_KEYS.tableName()),
|
||||
new SingleUseKEMPreKeyStore(dynamoDbAsyncClient, Tables.PQ_KEYS.tableName()),
|
||||
new PagedSingleUseKEMPreKeyStore(dynamoDbAsyncClient,
|
||||
S3_EXTENSION.getS3Client(),
|
||||
DynamoDbExtensionSchema.Tables.PAGED_PQ_KEYS.tableName(),
|
||||
S3_EXTENSION.getBucketName()),
|
||||
singleUseKEMPreKeyStore,
|
||||
pagedSingleUseKEMPreKeyStore,
|
||||
new RepeatedUseECSignedPreKeyStore(dynamoDbAsyncClient, Tables.REPEATED_USE_EC_SIGNED_PRE_KEYS.tableName()),
|
||||
new RepeatedUseKEMSignedPreKeyStore(dynamoDbAsyncClient, Tables.REPEATED_USE_KEM_SIGNED_PRE_KEYS.tableName()));
|
||||
new RepeatedUseKEMSignedPreKeyStore(dynamoDbAsyncClient, Tables.REPEATED_USE_KEM_SIGNED_PRE_KEYS.tableName()),
|
||||
experimentEnrollmentManager);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -67,18 +85,58 @@ class KeysManagerTest {
|
|||
"Repeatedly storing same key should have no effect");
|
||||
}
|
||||
|
||||
@Test
|
||||
void storeKemOneTimePreKeys() {
|
||||
@ParameterizedTest
|
||||
@ValueSource(booleans = {true, false})
|
||||
void storeKemOneTimePreKeysClearsOld(boolean inPagedExperiment) {
|
||||
final List<KEMSignedPreKey> oldPreKeys = List.of(generateTestKEMSignedPreKey(1));
|
||||
|
||||
// Leave a key in the 'other' key store
|
||||
(inPagedExperiment
|
||||
? singleUseKEMPreKeyStore.store(ACCOUNT_UUID, DEVICE_ID, oldPreKeys)
|
||||
: pagedSingleUseKEMPreKeyStore.store(ACCOUNT_UUID, DEVICE_ID, oldPreKeys))
|
||||
.join();
|
||||
|
||||
when(experimentEnrollmentManager.isEnrolled(ACCOUNT_UUID, KeysManager.PAGED_KEYS_EXPERIMENT_NAME))
|
||||
.thenReturn(inPagedExperiment);
|
||||
|
||||
|
||||
final List<KEMSignedPreKey> newPreKeys = List.of(generateTestKEMSignedPreKey(2));
|
||||
keysManager.storeKemOneTimePreKeys(ACCOUNT_UUID, DEVICE_ID, newPreKeys).join();
|
||||
|
||||
final int expectedPagedKeyCount = inPagedExperiment ? 1 : 0;
|
||||
final int expectedUnpagedKeyCount = 1 - expectedPagedKeyCount;
|
||||
assertEquals(1, keysManager.getPqCount(ACCOUNT_UUID, DEVICE_ID).join());
|
||||
assertEquals(expectedPagedKeyCount, pagedSingleUseKEMPreKeyStore.getCount(ACCOUNT_UUID, DEVICE_ID).join());
|
||||
assertEquals(expectedUnpagedKeyCount, singleUseKEMPreKeyStore.getCount(ACCOUNT_UUID, DEVICE_ID).join());
|
||||
|
||||
final KEMSignedPreKey key = keysManager.takePQ(ACCOUNT_UUID, DEVICE_ID).join().orElseThrow();
|
||||
assertEquals(2, key.keyId());
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(booleans = {true, false})
|
||||
void storeKemOneTimePreKeys(boolean inPagedExperiment) {
|
||||
assertEquals(0, keysManager.getPqCount(ACCOUNT_UUID, DEVICE_ID).join(),
|
||||
"Initial pre-key count for an account should be zero");
|
||||
|
||||
keysManager.storeKemOneTimePreKeys(ACCOUNT_UUID, DEVICE_ID, List.of(generateTestKEMSignedPreKey(1))).join();
|
||||
assertEquals(1, keysManager.getPqCount(ACCOUNT_UUID, DEVICE_ID).join());
|
||||
when(experimentEnrollmentManager.isEnrolled(ACCOUNT_UUID, KeysManager.PAGED_KEYS_EXPERIMENT_NAME))
|
||||
.thenReturn(inPagedExperiment);
|
||||
|
||||
final int expectedPagedKeyCount = inPagedExperiment ? 1 : 0;
|
||||
final int expectedUnpagedKeyCount = 1 - expectedPagedKeyCount;
|
||||
|
||||
keysManager.storeKemOneTimePreKeys(ACCOUNT_UUID, DEVICE_ID, List.of(generateTestKEMSignedPreKey(1))).join();
|
||||
assertEquals(1, keysManager.getPqCount(ACCOUNT_UUID, DEVICE_ID).join());
|
||||
assertEquals(expectedPagedKeyCount, pagedSingleUseKEMPreKeyStore.getCount(ACCOUNT_UUID, DEVICE_ID).join());
|
||||
assertEquals(expectedUnpagedKeyCount, singleUseKEMPreKeyStore.getCount(ACCOUNT_UUID, DEVICE_ID).join());
|
||||
|
||||
keysManager.storeKemOneTimePreKeys(ACCOUNT_UUID, DEVICE_ID, List.of(generateTestKEMSignedPreKey(1))).join();
|
||||
assertEquals(1, keysManager.getPqCount(ACCOUNT_UUID, DEVICE_ID).join());
|
||||
assertEquals(expectedPagedKeyCount, pagedSingleUseKEMPreKeyStore.getCount(ACCOUNT_UUID, DEVICE_ID).join());
|
||||
assertEquals(expectedUnpagedKeyCount, singleUseKEMPreKeyStore.getCount(ACCOUNT_UUID, DEVICE_ID).join());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
void storeEcSignedPreKeys() {
|
||||
assertTrue(keysManager.getEcSignedPreKey(ACCOUNT_UUID, DEVICE_ID).join().isEmpty());
|
||||
|
@ -128,9 +186,24 @@ class KeysManagerTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
void testDeleteSingleUsePreKeysByAccount() {
|
||||
void takeWithExistingExperimentalKey() {
|
||||
// Put a key in the new store, even though we're not in the experiment. This simulates a take when operating
|
||||
// in mixed mode on experiment rollout
|
||||
pagedSingleUseKEMPreKeyStore.store(ACCOUNT_UUID, DEVICE_ID, List.of(generateTestKEMSignedPreKey(1))).join();
|
||||
|
||||
assertEquals(1, keysManager.getPqCount(ACCOUNT_UUID, DEVICE_ID).join());
|
||||
assertEquals(1, keysManager.takePQ(ACCOUNT_UUID, DEVICE_ID).join().orElseThrow().keyId());
|
||||
assertEquals(0, keysManager.getPqCount(ACCOUNT_UUID, DEVICE_ID).join());
|
||||
}
|
||||
|
||||
@ParameterizedTest
|
||||
@ValueSource(booleans = {true, false})
|
||||
void testDeleteSingleUsePreKeysByAccount(final boolean inPagedExperiment) {
|
||||
int keyId = 1;
|
||||
|
||||
when(experimentEnrollmentManager.isEnrolled(ACCOUNT_UUID, KeysManager.PAGED_KEYS_EXPERIMENT_NAME))
|
||||
.thenReturn(inPagedExperiment);
|
||||
|
||||
for (byte deviceId : new byte[] {DEVICE_ID, DEVICE_ID + 1}) {
|
||||
keysManager.storeEcOneTimePreKeys(ACCOUNT_UUID, deviceId, List.of(generateTestPreKey(keyId++))).join();
|
||||
keysManager.storeKemOneTimePreKeys(ACCOUNT_UUID, deviceId, List.of(generateTestKEMSignedPreKey(keyId++))).join();
|
||||
|
@ -155,10 +228,14 @@ class KeysManagerTest {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
void testDeleteSingleUsePreKeysByAccountAndDevice() {
|
||||
@ParameterizedTest
|
||||
@ValueSource(booleans = {true, false})
|
||||
void testDeleteSingleUsePreKeysByAccountAndDevice(final boolean inPagedExperiment) {
|
||||
int keyId = 1;
|
||||
|
||||
when(experimentEnrollmentManager.isEnrolled(ACCOUNT_UUID, KeysManager.PAGED_KEYS_EXPERIMENT_NAME))
|
||||
.thenReturn(inPagedExperiment);
|
||||
|
||||
for (byte deviceId : new byte[] {DEVICE_ID, DEVICE_ID + 1}) {
|
||||
keysManager.storeEcOneTimePreKeys(ACCOUNT_UUID, deviceId, List.of(generateTestPreKey(keyId++))).join();
|
||||
keysManager.storeKemOneTimePreKeys(ACCOUNT_UUID, deviceId, List.of(generateTestKEMSignedPreKey(keyId++))).join();
|
||||
|
|
|
@ -50,8 +50,8 @@ class MessagesCacheGetItemsScriptTest {
|
|||
|
||||
assertNotNull(messageAndScores);
|
||||
assertEquals(2, messageAndScores.size());
|
||||
final MessageProtos.Envelope resultEnvelope = MessageProtos.Envelope.parseFrom(
|
||||
messageAndScores.getFirst());
|
||||
final MessageProtos.Envelope resultEnvelope =
|
||||
EnvelopeUtil.expand(MessageProtos.Envelope.parseFrom(messageAndScores.getFirst()));
|
||||
|
||||
assertEquals(serverGuid, resultEnvelope.getServerGuid());
|
||||
}
|
||||
|
|
|
@ -43,7 +43,7 @@ class MessagesCacheInsertScriptTest {
|
|||
|
||||
insertScript.executeAsync(destinationUuid, deviceId, envelope1);
|
||||
|
||||
assertEquals(List.of(envelope1), getStoredMessages(destinationUuid, deviceId));
|
||||
assertEquals(List.of(EnvelopeUtil.compress(envelope1)), getStoredMessages(destinationUuid, deviceId));
|
||||
|
||||
final MessageProtos.Envelope envelope2 = MessageProtos.Envelope.newBuilder()
|
||||
.setServerTimestamp(Instant.now().getEpochSecond())
|
||||
|
@ -52,11 +52,13 @@ class MessagesCacheInsertScriptTest {
|
|||
|
||||
insertScript.executeAsync(destinationUuid, deviceId, envelope2);
|
||||
|
||||
assertEquals(List.of(envelope1, envelope2), getStoredMessages(destinationUuid, deviceId));
|
||||
assertEquals(List.of(EnvelopeUtil.compress(envelope1), EnvelopeUtil.compress(envelope2)),
|
||||
getStoredMessages(destinationUuid, deviceId));
|
||||
|
||||
insertScript.executeAsync(destinationUuid, deviceId, envelope1);
|
||||
|
||||
assertEquals(List.of(envelope1, envelope2), getStoredMessages(destinationUuid, deviceId),
|
||||
assertEquals(List.of(EnvelopeUtil.compress(envelope1), EnvelopeUtil.compress(envelope2)),
|
||||
getStoredMessages(destinationUuid, deviceId),
|
||||
"Messages with same GUID should be deduplicated");
|
||||
}
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@ import org.junit.jupiter.api.Test;
|
|||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
import org.whispersystems.textsecuregcm.entities.MessageProtos;
|
||||
import org.whispersystems.textsecuregcm.redis.RedisClusterExtension;
|
||||
import org.whispersystems.textsecuregcm.util.UUIDUtil;
|
||||
|
||||
class MessagesCacheRemoveByGuidScriptTest {
|
||||
|
||||
|
@ -44,9 +45,8 @@ class MessagesCacheRemoveByGuidScriptTest {
|
|||
|
||||
assertEquals(1, removedMessages.size());
|
||||
|
||||
final MessageProtos.Envelope resultMessage = MessageProtos.Envelope.parseFrom(
|
||||
removedMessages.getFirst());
|
||||
final MessageProtos.Envelope resultMessage = MessageProtos.Envelope.parseFrom(removedMessages.getFirst());
|
||||
|
||||
assertEquals(serverGuid, UUID.fromString(resultMessage.getServerGuid()));
|
||||
assertEquals(serverGuid, UUIDUtil.fromByteString(resultMessage.getServerGuidBinary()));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
* Copyright 2025 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
package org.whispersystems.textsecuregcm.storage;
|
||||
|
||||
import com.apple.foundationdb.Database;
|
||||
import com.apple.foundationdb.FDB;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* Manages the lifecycle of a database connected to a FoundationDB instance running as an external service container.
|
||||
*/
|
||||
class ServiceContainerFoundationDbDatabaseLifecycleManager implements FoundationDbDatabaseLifecycleManager {
|
||||
|
||||
private final String foundationDbServiceContainerName;
|
||||
|
||||
private Database database;
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(ServiceContainerFoundationDbDatabaseLifecycleManager.class);
|
||||
|
||||
ServiceContainerFoundationDbDatabaseLifecycleManager(final String foundationDbServiceContainerName) {
|
||||
log.info("Using FoundationDB service container: {}", foundationDbServiceContainerName);
|
||||
this.foundationDbServiceContainerName = foundationDbServiceContainerName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void initializeDatabase(final FDB fdb) throws IOException {
|
||||
final File clusterFile = File.createTempFile("fdb.cluster", "");
|
||||
clusterFile.deleteOnExit();
|
||||
|
||||
try (final FileWriter fileWriter = new FileWriter(clusterFile)) {
|
||||
fileWriter.write(String.format("docker:docker@%s:4500", foundationDbServiceContainerName));
|
||||
}
|
||||
|
||||
database = fdb.open(clusterFile.getAbsolutePath());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Database getDatabase() {
|
||||
return database;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void closeDatabase() {
|
||||
database.close();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,44 @@
|
|||
/*
|
||||
* Copyright 2025 Signal Messenger, LLC
|
||||
* SPDX-License-Identifier: AGPL-3.0-only
|
||||
*/
|
||||
|
||||
package org.whispersystems.textsecuregcm.storage;
|
||||
|
||||
import com.apple.foundationdb.Database;
|
||||
import com.apple.foundationdb.FDB;
|
||||
import earth.adi.testcontainers.containers.FoundationDBContainer;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.testcontainers.utility.DockerImageName;
|
||||
|
||||
class TestcontainersFoundationDbDatabaseLifecycleManager implements FoundationDbDatabaseLifecycleManager {
|
||||
|
||||
private FoundationDBContainer foundationDBContainer;
|
||||
private Database database;
|
||||
|
||||
private static final String FOUNDATIONDB_IMAGE_NAME = "foundationdb/foundationdb:" + FoundationDbVersion.getFoundationDbVersion();
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(TestcontainersFoundationDbDatabaseLifecycleManager.class);
|
||||
|
||||
@Override
|
||||
public void initializeDatabase(final FDB fdb) {
|
||||
log.info("Using Testcontainers FoundationDB container: {}", FOUNDATIONDB_IMAGE_NAME);
|
||||
|
||||
foundationDBContainer = new FoundationDBContainer(DockerImageName.parse(FOUNDATIONDB_IMAGE_NAME));
|
||||
foundationDBContainer.start();
|
||||
|
||||
database = fdb.open(foundationDBContainer.getClusterFilePath());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Database getDatabase() {
|
||||
return database;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void closeDatabase() {
|
||||
database.close();
|
||||
foundationDBContainer.close();
|
||||
}
|
||||
}
|
|
@ -42,6 +42,7 @@ import org.junit.jupiter.params.ParameterizedTest;
|
|||
import org.junit.jupiter.params.provider.Arguments;
|
||||
import org.junit.jupiter.params.provider.EnumSource;
|
||||
import org.junit.jupiter.params.provider.MethodSource;
|
||||
import org.whispersystems.textsecuregcm.controllers.RateLimitExceededException;
|
||||
import org.whispersystems.textsecuregcm.storage.SubscriptionException;
|
||||
import org.whispersystems.textsecuregcm.util.CompletableFutureTestUtil;
|
||||
import org.whispersystems.textsecuregcm.util.MockUtils;
|
||||
|
@ -191,6 +192,15 @@ class GooglePlayBillingManagerTest {
|
|||
verifyNoInteractions(cancel);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void handle429() throws IOException {
|
||||
final HttpResponseException mockException = mock(HttpResponseException.class);
|
||||
when(mockException.getStatusCode()).thenReturn(429);
|
||||
when(subscriptionsv2Get.execute()).thenThrow(mockException);
|
||||
CompletableFutureTestUtil.assertFailsWithCause(
|
||||
RateLimitExceededException.class, googlePlayBillingManager.getSubscriptionInformation(PURCHASE_TOKEN));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void getReceiptUnacknowledged() throws IOException {
|
||||
when(subscriptionsv2Get.execute()).thenReturn(new SubscriptionPurchaseV2()
|
||||
|
|
|
@ -53,6 +53,9 @@ directoryV2.client.userIdTokenSharedSecret: bbcdefghijklmnopqrstuvwxyz0123456789
|
|||
svr2.userAuthenticationTokenSharedSecret: abcdefghijklmnopqrstuvwxyz0123456789ABCDEFG= # base64-encoded secret shared with SVR2 to generate auth tokens for Signal users
|
||||
svr2.userIdTokenSharedSecret: bbcdefghijklmnopqrstuvwxyz0123456789ABCDEFG= # base64-encoded secret shared with SVR2 to generate auth identity tokens for Signal users
|
||||
|
||||
svrb.userAuthenticationTokenSharedSecret: abcdefghijklmnopqrstuvwxyz0123456789ABCDEFG= # base64-encoded secret shared with SVRB to generate auth tokens for Signal users
|
||||
svrb.userIdTokenSharedSecret: bbcdefghijklmnopqrstuvwxyz0123456789ABCDEFG= # base64-encoded secret shared with SVRB to generate auth identity tokens for Signal users
|
||||
|
||||
tus.userAuthenticationTokenSharedSecret: abcdefghijklmnopqrstuvwxyz0123456789ABCDEFG=
|
||||
|
||||
# The below private key was key generated exclusively for testing purposes. Do not use it in any other context.
|
||||
|
|
|
@ -223,6 +223,35 @@ svr2:
|
|||
9Kxq0DY7RCEpdHMCKcOL
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
svrb:
|
||||
uri: svrb.example.com
|
||||
userAuthenticationTokenSharedSecret: secret://svrb.userAuthenticationTokenSharedSecret
|
||||
userIdTokenSharedSecret: secret://svrb.userIdTokenSharedSecret
|
||||
svrCaCertificates:
|
||||
# this is a randomly generated test certificate
|
||||
- |
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDazCCAlOgAwIBAgIUW5lcNWkuynRVc8Rq5pO6mHQBuZAwDQYJKoZIhvcNAQEL
|
||||
BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM
|
||||
GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yNDAzMjUwMzE4MTNaFw0yOTAz
|
||||
MjQwMzE4MTNaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw
|
||||
HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEB
|
||||
AQUAA4IBDwAwggEKAoIBAQCfH4Um+fv2r4KudhD37/UXp8duRLTmp4XvpBTpDHpD
|
||||
2HF8p2yThVKlJnMkP/9Ey1Rb0vhxO7DCltLdW8IYcxJuHoyMvyhGUEtxxkOZbrk8
|
||||
ciUR9jTZ37x7vXRGj/RxcdlS6iD0MeF0D/LAkImt4T/kiKwDbENrVEnYWJmipCKP
|
||||
ribxWky7HqxDCoYMQr0zatxB3A9mx5stH+H3kbw3CZcm+ugF9ZIKDEVHb0lf28gq
|
||||
llmD120q/vs9YV3rzVL7sBGDqf6olkulvHQJKElZg2rdcHWFcngSlU2BjR04oyuH
|
||||
c/SSiLSB3YB0tdFGta5uorXyV1y7RElPeBfOfvEjsG3TAgMBAAGjUzBRMB0GA1Ud
|
||||
DgQWBBQX+xlgSWWbDjv0SrJ+h67xauJ80zAfBgNVHSMEGDAWgBQX+xlgSWWbDjv0
|
||||
SrJ+h67xauJ80zAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQAw
|
||||
ZG2MCCjscn6h/QOoJU+IDfa68OqLq0I37gMnLMde4yEhAmm//miePIq4Uz9GRJ+h
|
||||
rAmdEnspKgyQ93PjF7Xpk/JdJA4B1bIrsOl/cSwqx2sFhRt8Kt1DHGlGWXqOaHRP
|
||||
UkZ86MyRL3sXly6WkxEYxZJeQaOzMy2XmQh7grzrlTBuSI+0xf7vsRRDipxr6LVQ
|
||||
6qGWyGODLLc2JD1IXj/1HpRVT2LoGGlKMuyxACQAm4oak1vvJ9mGxgfd9AU+eo58
|
||||
O/esB2Eaf+QqMPELdFSZQfG2jvp+3WQTZK8fDKHyLr076G3UetEMy867F6fzTSZd
|
||||
9Kxq0DY7RCEpdHMCKcOL
|
||||
-----END CERTIFICATE-----
|
||||
|
||||
messageCache: # Redis server configuration for message store cache
|
||||
persistDelayMinutes: 1
|
||||
cluster:
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 045ec042af4a17cfd564bdf66504d9a2faae81fa
|
||||
Subproject commit 01c0bbfd6253e9532802e789dd801e9673ae4c2c
|
Loading…
Reference in New Issue