-
Notifications
You must be signed in to change notification settings - Fork 2k
chore(ci): Move most CI checks to merge queue #17340
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
1f054de
9caf8cf
4c4ead9
a3ef6af
122c4c5
ccd98db
1e1951e
7a762af
7b55c19
149b7f2
ba9df94
4b30c5d
fd7dd67
63845fc
11bff58
bbd961f
e2a3c5c
f0e3c29
a462bd8
70aee4b
f53f09c
bef839d
1318ccc
5d07909
c440e41
7cec18c
36281b8
91f5521
bf3e656
56a2386
f64e0f4
fc13e20
7e093e9
916f517
7df850f
c73dcdd
413b9a5
6dc1d97
4bb5d91
abe7127
2306380
da0ebf7
51ae61e
a000905
fe859c3
b24fa49
20bb7a1
e90ff05
b996ff5
72acc1e
411ade8
102adff
a3a7226
e4dabc4
28874d8
f088fa1
8668a62
dcd2888
c62c9c4
d06d307
b80717f
fc93a70
af923fd
269c6d8
cda0ac0
b3c24da
8694337
bcf3b6a
5fbe4c4
ff39c08
621ef2c
b2c4865
134f36f
04c5920
986aa2f
8acc7e6
e7f6ba6
c02106d
1054934
e4df03b
aff1ccd
03e4a65
d16e2bf
5ea6db9
9e70ce4
debeebe
81dbb58
b46f677
2fc8b49
f7e4206
f901fc2
88a61ab
38c8e1e
320c8e9
b236e98
b747bca
e29ea2f
9642aba
cefa826
598f407
07e6388
09e62be
871b693
fe70713
6e98ed4
66118df
24cdd38
fb4d7d9
73576dc
7067666
67198ae
a71b845
bbae3bf
70c5585
725b64b
7c5c2e3
881574c
a6bc74c
18cc6cb
b823417
cde2d27
3962e4e
3c7fb23
fa7d5a1
0b78789
6dc6cd2
37140ac
cd73258
dc8ff61
85aa21c
8589665
be1da81
c1a05e9
0aed9fc
54077fb
83764f8
4df68e8
45e5067
af2b195
484168f
42d15a0
82ab049
8b909f6
c4325b7
1de77c6
8a229ac
d442c92
3bd7c24
a3371bb
f8c13b8
fff5992
dc699b4
7d10985
02d28a8
f4c2f38
d940bab
4de3874
7498408
bdbb9cd
d96ccee
375f6a3
2f32589
4899501
e782d09
8c2d086
2aad1d7
68923e1
035765e
4712e00
4c3eae9
930e409
69aa124
7d00508
f40154c
d5b202f
d780da1
9c1dc09
2207acc
29ac7dc
0c5ed45
dc929e2
1138a08
47246f8
75cdf1f
44ead21
0d5b580
5f0b8ff
b940457
d43aeb5
369cf4f
ddeecbd
b1daf60
d3ed90f
09abb8c
20aed4f
d5a1cfb
f0511c8
219d51d
a5c87ed
139ec58
dcd8fff
2397499
c1dbced
6a68cdf
52416e3
a59eaaa
bbfd789
975434d
2524931
8f29583
47dea5f
0ad782a
fa3906c
46c7ba1
36e6135
b14f91c
6f5531a
32ce960
96900d6
3307a62
91c974b
305b4a7
1d6dcaf
4737b07
7812007
21227f0
37ec4c0
a677e9f
305f642
e03aecb
1bd175d
6f8a5c5
789e3e5
311de48
2bd2f25
5bfafa4
d356d76
dd23290
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,333 @@ | ||
| # This workflow identifies changes between the base and the head ref, for use in | ||
| # other workflows to decide if they should be executed. | ||
|
|
||
| name: Identify Changes | ||
|
|
||
| on: | ||
| workflow_call: | ||
| # These inputs allow the filter action to be able to access the correct refs for | ||
| # comparison in changes detection, it is required as this is called from the | ||
| # merge_group context. | ||
| inputs: | ||
| base_ref: | ||
| required: true | ||
| type: string | ||
| head_ref: | ||
| required: true | ||
| type: string | ||
| outputs: | ||
| source: | ||
| value: ${{ jobs.changes.outputs.source }} | ||
| dependencies: | ||
| value: ${{ jobs.changes.outputs.dependencies }} | ||
| internal_events: | ||
| value: ${{ jobs.changes.outputs.internal_events }} | ||
| cue: | ||
| value: ${{ jobs.changes.outputs.cue }} | ||
| component_docs: | ||
| value: ${{ jobs.changes.outputs.component_docs }} | ||
| markdown: | ||
| value: ${{ jobs.changes.outputs.markdown }} | ||
| install: | ||
| value: ${{ jobs.changes.outputs.install }} | ||
| k8s: | ||
| value: ${{ jobs.changes.outputs.k8s }} | ||
| all-int: | ||
| value: ${{ jobs.changes.outputs.all-int }} | ||
| amqp: | ||
| value: ${{ jobs.changes.outputs.amqp }} | ||
| appsignal: | ||
| value: ${{ jobs.changes.outputs.appsignal }} | ||
| aws: | ||
| value: ${{ jobs.changes.outputs.aws }} | ||
| axiom: | ||
| value: ${{ jobs.changes.outputs.axiom }} | ||
| azure: | ||
| value: ${{ jobs.changes.outputs.azure }} | ||
| clickhouse: | ||
| value: ${{ jobs.changes.outputs.clickhouse }} | ||
| databend: | ||
| value: ${{ jobs.changes.outputs.databend }} | ||
| datadog: | ||
| value: ${{ jobs.changes.outputs.datadog }} | ||
| dnstap: | ||
| value: ${{ jobs.changes.outputs.dnstap }} | ||
| docker-logs: | ||
| value: ${{ jobs.changes.outputs.docker-logs }} | ||
| elasticsearch: | ||
| value: ${{ jobs.changes.outputs.elasticsearch }} | ||
| eventstoredb: | ||
| value: ${{ jobs.changes.outputs.eventstoredb }} | ||
| fluent: | ||
| value: ${{ jobs.changes.outputs.fluent }} | ||
| gcp: | ||
| value: ${{ jobs.changes.outputs.gcp }} | ||
| humio: | ||
| value: ${{ jobs.changes.outputs.humio }} | ||
| http-client: | ||
| value: ${{ jobs.changes.outputs.http-client }} | ||
| influxdb: | ||
| value: ${{ jobs.changes.outputs.influxdb }} | ||
| kafka: | ||
| value: ${{ jobs.changes.outputs.kafka }} | ||
| logstash: | ||
| value: ${{ jobs.changes.outputs.logstash }} | ||
| loki: | ||
| value: ${{ jobs.changes.outputs.loki }} | ||
| mongodb: | ||
| value: ${{ jobs.changes.outputs.mongodb }} | ||
| nats: | ||
| value: ${{ jobs.changes.outputs.nats }} | ||
| nginx: | ||
| value: ${{ jobs.changes.outputs.nginx }} | ||
| opentelemetry: | ||
| value: ${{ jobs.changes.outputs.opentelemetry }} | ||
| postgres: | ||
| value: ${{ jobs.changes.outputs.postgres }} | ||
| prometheus: | ||
| value: ${{ jobs.changes.outputs.prometheus }} | ||
| pulsar: | ||
| value: ${{ jobs.changes.outputs.pulsar }} | ||
| redis: | ||
| value: ${{ jobs.changes.outputs.redis }} | ||
| splunk: | ||
| value: ${{ jobs.changes.outputs.splunk }} | ||
| webhdfs: | ||
| value: ${{ jobs.changes.outputs.webhdfs }} | ||
|
|
||
| jobs: | ||
| changes: | ||
| runs-on: ubuntu-20.04 | ||
| # Set job outputs to values from filter step | ||
| outputs: | ||
| # General source code | ||
| source: ${{ steps.filter.outputs.source }} | ||
| dependencies: ${{ steps.filter.outputs.dependencies }} | ||
| internal_events: ${{ steps.filter.outputs.internal_events }} | ||
| cue: ${{ steps.filter.outputs.cue }} | ||
| component_docs: ${{ steps.filter.outputs.component_docs }} | ||
| markdown: ${{ steps.filter.outputs.markdown }} | ||
| install: ${{ steps.filter.outputs.install }} | ||
| # K8s | ||
| k8s: ${{ steps.filter.outputs.k8s }} | ||
| # Integrations | ||
| all-int: ${{ steps.filter.outputs.all-int }} | ||
| amqp: ${{ steps.filter.outputs.amqp }} | ||
| appsignal: ${{ steps.filter.outputs.appsignal}} | ||
| aws: ${{ steps.filter.outputs.aws }} | ||
| axiom: ${{ steps.filter.outputs.axiom }} | ||
| azure: ${{ steps.filter.outputs.azure }} | ||
| clickhouse: ${{ steps.filter.outputs.clickhouse }} | ||
| databend: ${{ steps.filter.outputs.databend }} | ||
| datadog: ${{ steps.filter.outputs.datadog }} | ||
| dnstap: ${{ steps.filter.outputs.dnstap }} | ||
| docker-logs: ${{ steps.filter.outputs.docker-logs }} | ||
| elasticsearch: ${{ steps.filter.outputs.elasticsearch }} | ||
| eventstoredb: ${{ steps.filter.outputs.eventstoredb }} | ||
| fluent: ${{ steps.filter.outputs.fluent }} | ||
| gcp: ${{ steps.filter.outputs.gcp }} | ||
| humio: ${{ steps.filter.outputs.humio }} | ||
| http-client: ${{ steps.filter.outputs.http-client }} | ||
| influxdb: ${{ steps.filter.outputs.influxdb }} | ||
| kafka: ${{ steps.filter.outputs.kafka }} | ||
| logstash: ${{ steps.filter.outputs.logstash }} | ||
| loki: ${{ steps.filter.outputs.loki }} | ||
| mongodb: ${{ steps.filter.outputs.mongodb }} | ||
| nats: ${{ steps.filter.outputs.nats }} | ||
| nginx: ${{ steps.filter.outputs.nginx }} | ||
| opentelemetry: ${{ steps.filter.outputs.opentelemetry }} | ||
| postgres: ${{ steps.filter.outputs.postgres }} | ||
| prometheus: ${{ steps.filter.outputs.prometheus }} | ||
| pulsar: ${{ steps.filter.outputs.pulsar }} | ||
| redis: ${{ steps.filter.outputs.redis }} | ||
| splunk: ${{ steps.filter.outputs.splunk }} | ||
| webhdfs: ${{ steps.filter.outputs.webhdfs }} | ||
| steps: | ||
| - uses: actions/checkout@v3 | ||
|
|
||
| - uses: dorny/paths-filter@v2 | ||
| id: filter | ||
| with: | ||
| base: ${{ inputs.base_ref }} | ||
| ref: ${{ inputs.head_ref }} | ||
| filters: | | ||
| source: | ||
| - ".github/workflows/test.yml" | ||
| - ".cargo/**" | ||
| - "benches/**" | ||
| - "lib/**" | ||
| - "proto/**" | ||
| - "scripts/**" | ||
| - "src/**" | ||
| - "tests/**" | ||
| - "build.rs" | ||
| - "Cargo.lock" | ||
| - "Cargo.toml" | ||
| - "Makefile" | ||
| - "rust-toolchain.toml" | ||
| - "vdev/**" | ||
| deny: | ||
| - 'deny.toml' | ||
| - "vdev/**" | ||
| dependencies: | ||
| - ".cargo/**" | ||
| - 'Cargo.toml' | ||
| - 'Cargo.lock' | ||
| - 'rust-toolchain.toml' | ||
| - '.github/workflows/pr.yml' | ||
| - 'Makefile' | ||
| - 'scripts/cross/**' | ||
| - "vdev/**" | ||
| cue: | ||
| - 'website/cue/**' | ||
| - "vdev" | ||
| component_docs: | ||
| - 'scripts/generate-component-docs.rb' | ||
| - "vdev/**" | ||
| markdown: | ||
| - '**/**.md' | ||
| - "vdev/**" | ||
| internal_events: | ||
| - 'src/internal_events/**' | ||
| - "vdev/**" | ||
| docker: | ||
| - 'distribution/docker/**' | ||
| - "vdev/**" | ||
| install: | ||
| - ".github/workflows/install-sh.yml" | ||
| - "distribution/install.sh" | ||
| k8s: | ||
| - "src/sources/kubernetes_logs/**" | ||
| all-int: | ||
| - "lib/vector-core/**" | ||
| amqp: | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This could be a follow-up, but I'd love to see these closer to the tests themselves in https://github.com/vectordotdev/vector/blob/master/scripts/integration/amqp/test.yaml
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I agree.. I did see they have an option to define them in a separate file. It shouldn't be hard to add that.
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Though I suppose could split that out into different jobs.
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. You could also add a subcommand to
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Oh I do like that better... Ok, I think that is a good candidate for a follow-up task.
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. For the integration tests, it struck me that we might want to check for changes in common places like 'lib/' or something like that, and run all of them. |
||
| - "src/amqp.rs" | ||
| - "src/internal_events/amqp.rs" | ||
| - "src/sinks/amqp/**" | ||
| - "src/sources/amqp.rs" | ||
| - "src/sources/util/**" | ||
| - "src/sinks/util/**" | ||
| appsignal: | ||
| - "src/sinks/appsignal/**" | ||
| - "src/sinks/util/**" | ||
| aws: | ||
| - "src/aws_**" | ||
| - "src/internal_events/aws_**" | ||
| - "src/sources/aws_**" | ||
| - "src/sources/util/**" | ||
| - "src/sinks/aws_**" | ||
| - "src/sinks/util/**" | ||
| - "src/transforms/aws_**" | ||
| axiom: | ||
| - "src/sinks/axiom.rs" | ||
| - "src/sinks/util/**" | ||
| azure: | ||
| - "src/sinks/azure_**" | ||
| - "src/sinks/util/**" | ||
| clickhouse: | ||
| - "src/sinks/clickhouse/**" | ||
| - "src/sinks/util/**" | ||
| databend: | ||
| - "src/sinks/databend/**" | ||
| - "src/sinks/util/**" | ||
| datadog: | ||
| - "src/common/datadog.rs" | ||
| - "src/internal_events/datadog_*" | ||
| - "src/sources/datadog_agent/**" | ||
| - "src/sinks/datadog/**" | ||
| - "src/sinks/datadog_archives.rs" | ||
| - "src/sinks/util/**" | ||
| docker-logs: | ||
| - "src/docker.rs" | ||
| - "src/internal_events/docker_logs.rs" | ||
| - "src/sources/docker_logs/**" | ||
| - "src/sources/util/**" | ||
neuronull marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| elasticsearch: | ||
| - "src/sinks/elasticsearch/**" | ||
| - "src/sinks/util/**" | ||
| eventstoredb: | ||
| - "src/internal_events/eventstoredb_metrics.rs" | ||
| - "src/sources/eventstoredb_metrics/**" | ||
| - "src/sources/util/**" | ||
| fluent: | ||
| - "src/internal_events/fluent.rs" | ||
| - "src/sources/fluent/**" | ||
| - "src/sources/util/**" | ||
| gcp: | ||
| - "src/internal_events/gcp_pubsub.rs" | ||
| - "src/sources/gcp_pubsub.rs" | ||
| - "src/sources/util/**" | ||
| - "src/sinks/gcp/**" | ||
| - "src/sinks/util/**" | ||
| - "src/gcp.rs" | ||
| humio: | ||
| - "src/sinks/humio/**" | ||
| - "src/sinks/util/**" | ||
| http-client: | ||
| - "src/sinks/http-client/**" | ||
| influxdb: | ||
| - "src/internal_events/influxdb.rs" | ||
| - "src/sinks/influxdb/**" | ||
| - "src/sinks/util/**" | ||
| kafka: | ||
| - "src/internal_events/kafka.rs" | ||
| - "src/sinks/kafka/**" | ||
| - "src/sinks/util/**" | ||
| - "src/sources/kafka.rs" | ||
| - "src/sources/util/**" | ||
| - "src/kafka.rs" | ||
| logstash: | ||
| - "src/sources/logstash.rs" | ||
| - "src/sources/util/**" | ||
| loki: | ||
| - "src/internal_events/loki.rs" | ||
| - "src/sinks/loki/**" | ||
| - "src/sinks/util/**" | ||
| mongodb: | ||
| - "src/internal_events/mongodb_metrics.rs" | ||
| - "src/sources/mongodb_metrics/**" | ||
| - "src/sources/util/**" | ||
| nats: | ||
| - "src/internal_events/nats.rs" | ||
| - "src/sources/nats.rs" | ||
| - "src/sources/util/**" | ||
| - "src/sinks/nats.rs" | ||
| - "src/sinks/util/**" | ||
| - "src/nats.rs" | ||
| nginx: | ||
| - "src/internal_events/nginx_metrics.rs" | ||
| - "src/sources/nginx_metrics/**" | ||
| - "src/sources/util/**" | ||
| opentelemetry: | ||
| - "src/sources/opentelemetry/**" | ||
| - "src/sources/util/**" | ||
| postgres: | ||
| - "src/internal_events/postgresql_metrics.rs" | ||
| - "src/sources/postgresql_metrics.rs" | ||
| - "src/sources/util/**" | ||
| prometheus: | ||
| - "src/internal_events/prometheus.rs" | ||
| - "src/sources/prometheus/**" | ||
| - "src/sources/util/**" | ||
| - "src/sinks/prometheus/**" | ||
| - "src/sinks/util/**" | ||
| pulsar: | ||
| - "src/internal_events/pulsar.rs" | ||
| - "src/sinks/pulsar/**" | ||
| - "src/sinks/util/**" | ||
| redis: | ||
| - "src/internal_events/redis.rs" | ||
| - "src/sources/redis/**" | ||
| - "src/sources/util/**" | ||
| - "src/sinks/redis.rs" | ||
| - "src/sinks/util/**" | ||
| splunk: | ||
| - "src/internal_events/splunk_hec.rs" | ||
| - "src/sources/splunk_hec/**" | ||
| - "src/sources/util/**" | ||
| - "src/sinks/splunk_hec/**" | ||
| - "src/sinks/util/**" | ||
| webhdfs: | ||
| - "src/sinks/webhdfs/**" | ||
| - "src/sinks/util/**" | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,63 @@ | ||
| name: CLI - Linux | ||
|
|
||
| on: | ||
| workflow_call: | ||
|
|
||
| jobs: | ||
| test-cli: | ||
| runs-on: [linux, ubuntu-20.04-8core] | ||
| env: | ||
| CARGO_INCREMENTAL: 0 | ||
| steps: | ||
| - name: (PR comment) Get PR branch | ||
| if: ${{ github.event_name == 'issue_comment' }} | ||
| uses: xt0rted/pull-request-comment-branch@v1 | ||
| id: comment-branch | ||
|
|
||
| - name: (PR comment) Set latest commit status as pending | ||
| if: ${{ github.event_name == 'issue_comment' }} | ||
| uses: myrotvorets/set-commit-status-action@1.1.6 | ||
| with: | ||
| sha: ${{ steps.comment-branch.outputs.head_sha }} | ||
| token: ${{ secrets.GITHUB_TOKEN }} | ||
| context: CLI - Linux | ||
| status: pending | ||
|
|
||
| - name: (PR comment) Checkout PR branch | ||
| if: ${{ github.event_name == 'issue_comment' }} | ||
| uses: actions/checkout@v3 | ||
| with: | ||
| ref: ${{ steps.comment-branch.outputs.head_ref }} | ||
|
|
||
| - name: Checkout branch | ||
| if: ${{ github.event_name != 'issue_comment' }} | ||
| uses: actions/checkout@v3 | ||
|
|
||
| - name: Cache Cargo registry + index | ||
| uses: actions/cache@v3 | ||
| with: | ||
| path: | | ||
| ~/.cargo/bin/ | ||
| ~/.cargo/registry/index/ | ||
| ~/.cargo/registry/cache/ | ||
| ~/.cargo/git/db/ | ||
| key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} | ||
| restore-keys: | | ||
| ${{ runner.os }}-cargo- | ||
|
|
||
| - run: sudo -E bash scripts/environment/bootstrap-ubuntu-20.04.sh | ||
| - run: bash scripts/environment/prepare.sh | ||
| - run: echo "::add-matcher::.github/matchers/rust.json" | ||
| - run: make test-cli | ||
| - name: Upload test results | ||
| run: scripts/upload-test-results.sh | ||
| if: always() | ||
|
|
||
| - name: (PR comment) Set latest commit status as ${{ job.status }} | ||
| uses: myrotvorets/set-commit-status-action@1.1.6 | ||
| if: always() && github.event_name == 'issue_comment' | ||
| with: | ||
| sha: ${{ steps.comment-branch.outputs.head_sha }} | ||
| token: ${{ secrets.GITHUB_TOKEN }} | ||
| context: CLI - Linux | ||
| status: ${{ job.status }} |
Uh oh!
There was an error while loading. Please reload this page.