Skip to content
This repository was archived by the owner on Sep 21, 2024. It is now read-only.

Commit 6045a61

Browse files
cdatajsantell
authored andcommitted
feat: Periodic syndication checks to IPFS Kubo (#685)
1 parent 69c118b commit 6045a61

File tree

14 files changed

+318
-95
lines changed

14 files changed

+318
-95
lines changed
+125
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,125 @@
1+
on:
2+
workflow_call:
3+
4+
name: 'Workflow Analysis'
5+
6+
jobs:
7+
report-test-flakes:
8+
name: 'Report test flakes'
9+
runs-on: ubuntu-latest
10+
permissions:
11+
contents: read
12+
pull-requests: write
13+
steps:
14+
- uses: actions/download-artifact@v3
15+
- name: Parse test results
16+
id: parse-test-results
17+
run: |
18+
sudo apt-get install colorized-logs
19+
20+
echo -n 'results=[' >> $GITHUB_OUTPUT
21+
22+
ENTRIES=""
23+
24+
for RESULTS_DIR in test-results-*/ ; do
25+
mapfile -t target <$RESULTS_DIR/target
26+
27+
PLATFORM="${target[0]}"
28+
FEATURES="${target[1]}"
29+
TOOLCHAIN="${target[2]}"
30+
31+
LOG_PATH="$RESULTS_DIR/log"
32+
csplit -q "$LOG_PATH" %^------------%
33+
34+
SUMMARY=""
35+
36+
if [[ -f "./xx00" ]]; then
37+
SUMMARY=$(tail ./xx00 -n+2 | ansi2txt | jq -M --compact-output --raw-input --slurp . | sed -e 's/\\/\\\\/g')
38+
else
39+
continue
40+
fi
41+
42+
ENTRY="{\"platform\":\"$PLATFORM\",\"features\":\"$FEATURES\",\"toolchain\":\"$TOOLCHAIN\",\"summary\":$SUMMARY}"
43+
44+
if [ -z "$ENTRIES" ]; then
45+
ENTRIES="$ENTRY"
46+
else
47+
ENTRIES="$ENTRIES,$ENTRY"
48+
fi
49+
50+
done
51+
52+
echo -n "$ENTRIES ]" >> $GITHUB_OUTPUT
53+
- name: Report test flakes
54+
uses: actions/github-script@v6
55+
with:
56+
github-token: ${{ secrets.GITHUB_TOKEN }}
57+
script: |
58+
const results = JSON.parse(`${{ steps.parse-test-results.outputs.results }}`);
59+
60+
const { data: comments } = await github.rest.issues.listComments({
61+
owner: context.repo.owner,
62+
repo: context.repo.repo,
63+
issue_number: context.issue.number,
64+
});
65+
66+
const testFlakeAnalysisHeader = 'Test flake analysis';
67+
const existingComment = comments.find(comment => {
68+
return comment.user.type === 'Bot' && comment.body.includes(testFlakeAnalysisHeader)
69+
});
70+
71+
let body = '';
72+
73+
if (results.length == 0) {
74+
body = "No test results to analyze. Maybe none of the test runs passed?";
75+
} else {
76+
77+
let table = "\n\n| status | platform | features | toolchain |\n|:---:|---|---|---|\n";
78+
79+
const flakeSummaries = [];
80+
81+
for (result of results) {
82+
const isFlakey = result.summary.indexOf("FLAKY") > 0;
83+
84+
table += `| ${ isFlakey ? "🟡" : "🟢" } | \`${ result.platform }\` | \`${ result.features }\` | \`${result.toolchain}\` |\n`;
85+
86+
if (isFlakey) {
87+
flakeSummaries.push(`#### Flake summary for \`${ result.platform }\`, \`${ result.features }\`, \`${ result.toolchain }\`
88+
89+
\`\`\`shell
90+
${ result.summary }
91+
\`\`\``);
92+
}
93+
}
94+
95+
if (flakeSummaries.length == 0) {
96+
body += '\nNo flakes detected 🎉\n\n'
97+
}
98+
99+
body += table;
100+
101+
if (flakeSummaries.length > 0) {
102+
body += "\n\n";
103+
body += flakeSummaries.join('\n\n');
104+
}
105+
}
106+
107+
body = `### ${testFlakeAnalysisHeader}
108+
109+
${body}`;
110+
111+
if (existingComment) {
112+
github.rest.issues.updateComment({
113+
owner: context.repo.owner,
114+
repo: context.repo.repo,
115+
comment_id: existingComment.id,
116+
body
117+
});
118+
} else {
119+
github.rest.issues.createComment({
120+
issue_number: context.issue.number,
121+
owner: context.repo.owner,
122+
repo: context.repo.repo,
123+
body
124+
});
125+
}

.github/workflows/run_test_suite.yaml

+65-55
Original file line numberDiff line numberDiff line change
@@ -37,39 +37,6 @@ jobs:
3737
swift build --sanitize=address
3838
swift test --sanitize=address
3939
40-
run-test-suite-windows:
41-
runs-on: windows-latest
42-
steps:
43-
- uses: actions/checkout@v3
44-
- uses: Swatinem/rust-cache@v2
45-
- name: 'Setup Rust'
46-
run: |
47-
curl -sSf https://sh.rustup.rs | sh -s -- -y
48-
- name: 'Install environment packages'
49-
run: |
50-
choco install -y cmake protoc openssl
51-
shell: sh
52-
- name: 'Install IPFS Kubo'
53-
uses: ibnesayeed/setup-ipfs@master
54-
with:
55-
ipfs_version: v0.17.0
56-
run_daemon: true
57-
- name: 'Run Rust native target tests'
58-
# Increase stack size on Windows tests; (non-main) threads are spawned with 2MB
59-
# default stack size, which `orb_can_render_peers_in_the_sphere_address_book`
60-
# uses (at time of writing) slightly more than 2MB. While we could set the thread
61-
# stack size at runtime (via tokio's `thread_stack_size`), it appears to not solve the
62-
# problem, possibly due to the harness thread overflowing (e.g. a non-main thread that
63-
# we can't configure within the test). In lieu of that, set RUST_MIN_STACK to increase
64-
# the stack sizes of threads created by tokio within tests, as well as the test harness
65-
# threads themselves.
66-
#
67-
# While our main thread isn't under fire here, notating this for future use:
68-
# https://users.rust-lang.org/t/stack-overflow-when-compiling-on-windows-10/50818/8
69-
run: $env:RUST_MIN_STACK = '4000000'; cargo test --features test-kubo,helpers
70-
env:
71-
NOOSPHERE_LOG: deafening
72-
7340
run-linting-linux:
7441
runs-on: ubuntu-latest
7542
steps:
@@ -89,15 +56,31 @@ jobs:
8956
- name: 'Run Linter'
9057
run: cargo clippy --all -- -D warnings
9158

92-
run-test-suite-linux:
93-
runs-on: ubuntu-latest
59+
run-rust-test-suite:
60+
name: 'Run Rust test suite'
61+
strategy:
62+
matrix:
63+
features: ['test-kubo,headers', 'test-kubo,headers,rocksdb']
64+
platform: ['ubuntu-latest', 'windows-latest', 'macos-13']
65+
toolchain: ['stable']
66+
exclude:
67+
- platform: 'windows-latest'
68+
features: 'test-kubo,headers,rocksdb'
69+
runs-on: ${{ matrix.platform }}
9470
steps:
9571
- uses: actions/checkout@v3
9672
- uses: Swatinem/rust-cache@v2
9773
- name: 'Setup Rust'
9874
run: |
9975
curl -sSf https://sh.rustup.rs | sh -s -- -y
100-
- name: 'Install environment packages'
76+
rustup toolchain install ${{matrix.toolchain}}
77+
- name: 'Install environment packages (Windows)'
78+
if: ${{ matrix.platform == 'windows-latest' }}
79+
run: |
80+
choco install -y cmake protoc openssl
81+
shell: sh
82+
- name: 'Install environment packages (Linux)'
83+
if: ${{ matrix.platform == 'ubuntu-latest' }}
10184
run: |
10285
sudo apt-get update -qqy
10386
sudo apt-get install jq protobuf-compiler cmake
@@ -106,28 +89,55 @@ jobs:
10689
with:
10790
ipfs_version: v0.17.0
10891
run_daemon: true
109-
- name: 'Run Rust native target tests'
110-
run: NOOSPHERE_LOG=deafening cargo test --features test-kubo,headers
111-
112-
run-test-suite-linux-rocksdb:
113-
runs-on: ubuntu-latest
114-
steps:
115-
- uses: actions/checkout@v3
116-
- uses: Swatinem/rust-cache@v2
117-
- name: 'Setup Rust'
92+
- name: Install cargo-binstall
93+
uses: cargo-bins/[email protected]
94+
- name: Install binaries from cargo
11895
run: |
119-
curl -sSf https://sh.rustup.rs | sh -s -- -y
120-
- name: 'Install environment packages'
96+
cargo +${{ matrix.toolchain }} binstall cargo-nextest --no-confirm --force
97+
- name: 'Run Rust tests'
98+
shell: bash
12199
run: |
122-
sudo apt-get update -qqy
123-
sudo apt-get install jq protobuf-compiler cmake libclang-dev
124-
- name: 'Install IPFS Kubo'
125-
uses: ibnesayeed/setup-ipfs@master
100+
mkdir -p test-results
101+
102+
echo "${{ matrix.platform }}
103+
${{ matrix.features }}
104+
${{ matrix.toolchain}}" > test-results/target
105+
106+
FEATURES="";
107+
108+
if [ -n "${{matrix.features}}" ]; then
109+
FEATURES="--features ${{matrix.features}}"
110+
fi
111+
112+
# Increase stack size on Windows tests; (non-main) threads are spawned with 2MB
113+
# default stack size, which `orb_can_render_peers_in_the_sphere_address_book`
114+
# uses (at time of writing) slightly more than 2MB. While we could set the thread
115+
# stack size at runtime (via tokio's `thread_stack_size`), it appears to not solve the
116+
# problem, possibly due to the harness thread overflowing (e.g. a non-main thread that
117+
# we can't configure within the test). In lieu of that, set RUST_MIN_STACK to increase
118+
# the stack sizes of threads created by tokio within tests, as well as the test harness
119+
# threads themselves.
120+
#
121+
# While our main thread isn't under fire here, notating this for future use:
122+
# https://users.rust-lang.org/t/stack-overflow-when-compiling-on-windows-10/50818/8
123+
if [[ "${{matrix.platform}}" == "windows-latest" ]]
124+
export RUST_MIN_STACK = '4000000'
125+
fi
126+
127+
cargo +${{ matrix.toolchain }} nextest run $FEATURES --retries 5 --color always 2>&1 | tee test-results/log
128+
env:
129+
NOOSPHERE_LOG: academic
130+
- uses: actions/upload-artifact@v3
126131
with:
127-
ipfs_version: v0.17.0
128-
run_daemon: true
129-
- name: 'Run Rust native target tests (RocksDB)'
130-
run: NOOSPHERE_LOG=defeaning cargo test -p noosphere -p noosphere-storage --features rocksdb,test-kubo
132+
name: test-results-${{ hashFiles('./test-results') }}
133+
path: ./test-results
134+
135+
report-test-flakes:
136+
name: 'Report test flakes (Linux)'
137+
needs: ['run-rust-test-suite']
138+
if: always()
139+
uses: ./.github/workflows/report_test_flakes.yaml
140+
secrets: inherit
131141

132142
run-test-suite-linux-c:
133143
runs-on: ubuntu-latest

images/orb/Dockerfile

+1-1
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ FROM ubuntu:latest
1313
RUN mkdir -p /root/.noosphere
1414
RUN mkdir -p /root/sphere
1515

16-
ENV RUST_LOG="info,tower_http,noosphere,noosphere_cli,noosphere_ipfs,noosphere_storage,noosphere_core,noosphere_storage,noosphere_api,orb=debug"
16+
ENV NOOSPHERE_LOG="deafening"
1717
VOLUME ["/root/.noosphere", "/root/sphere"]
1818
EXPOSE 4433
1919

rust/noosphere-core/src/api/client.rs

+4-1
Original file line numberDiff line numberDiff line change
@@ -254,7 +254,10 @@ where
254254
.stream()
255255
.map(|block| match block {
256256
Ok(block) => Ok(block),
257-
Err(error) => Err(anyhow!(error)),
257+
Err(error) => {
258+
warn!("Replication stream ended prematurely");
259+
Err(anyhow!(error))
260+
}
258261
}),
259262
)
260263
}

rust/noosphere-core/src/stream/memo.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ where
8282
};
8383

8484
if replicate_authority {
85-
debug!("Replicating authority...");
85+
trace!("Replicating authority...");
8686
let authority = sphere.get_authority().await?;
8787
let store = store.clone();
8888

@@ -105,7 +105,7 @@ where
105105
}
106106

107107
if replicate_address_book {
108-
debug!("Replicating address book...");
108+
trace!("Replicating address book...");
109109
let address_book = sphere.get_address_book().await?;
110110
let identities = address_book.get_identities().await?;
111111

@@ -121,7 +121,7 @@ where
121121
}
122122

123123
if replicate_content {
124-
debug!("Replicating content...");
124+
trace!("Replicating content...");
125125
let content = sphere.get_content().await?;
126126

127127
tasks.spawn(walk_versioned_map_changes_and(content, store.clone(), move |_, link, store| async move {

rust/noosphere-core/src/view/content.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ impl<'a, 'b, S: BlockStore> BodyChunkDecoder<'a, 'b, S> {
1717
let store = self.1.clone();
1818
Box::pin(try_stream! {
1919
while let Some(cid) = next {
20-
debug!("Unpacking block {}...", cid);
20+
trace!("Unpacking block {}...", cid);
2121
let chunk = store.load::<DagCborCodec, BodyChunkIpld>(&cid).await.map_err(|error| {
2222
std::io::Error::new(std::io::ErrorKind::UnexpectedEof, error.to_string())
2323
})?;

rust/noosphere-gateway/src/gateway.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,8 @@ where
7878

7979
let ipfs_client = KuboClient::new(&ipfs_api)?;
8080

81-
let (syndication_tx, syndication_task) = start_ipfs_syndication::<C, S>(ipfs_api.clone());
81+
let (syndication_tx, syndication_task) =
82+
start_ipfs_syndication::<C, S>(ipfs_api.clone(), vec![sphere_context.clone()]);
8283
let (name_system_tx, name_system_task) = start_name_system::<C, S>(
8384
NameSystemConfiguration {
8485
connection_type: NameSystemConnectionType::Remote(name_resolver_api),

0 commit comments

Comments
 (0)