Skip to content

Commit 58fc62f

Browse files
Merge branch 'main' into duncan-harvey/serverless-mini-agent-bump-version
2 parents 7da0f69 + 5a20849 commit 58fc62f

37 files changed

+959
-436
lines changed

.github/workflows/test.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -141,7 +141,7 @@ jobs:
141141
env:
142142
RUSTFLAGS: "${{ matrix.flags }}"
143143
run: |
144-
cargo build -p builder --features profiling,telemetry,data-pipeline,symbolizer,crashtracker --release -vv
144+
cargo run --bin release --features profiling,telemetry,data-pipeline,symbolizer,crashtracker --release -- --out $LIBDD_OUTPUT_FOLDER
145145
146146
- name: 'Publish libdatadog'
147147
uses: actions/upload-artifact@v4
@@ -202,7 +202,7 @@ jobs:
202202
- name: Free Disk Space
203203
uses: jlumbroso/[email protected]
204204
with:
205-
tool-cache: false
205+
tool-cache: true
206206
android: true
207207
dotnet: true
208208
haskell: true

Cargo.lock

+1-2
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

LICENSE-3rdparty.yml

+164-1
Large diffs are not rendered by default.

builder/Cargo.toml

+16-10
Original file line numberDiff line numberDiff line change
@@ -8,22 +8,28 @@ license.workspace = true
88

99
[features]
1010
default = []
11-
crashtracker = ["datadog-profiling-ffi?/crashtracker-receiver", "datadog-profiling-ffi?/crashtracker-collector", "datadog-profiling-ffi?/demangler"]
12-
profiling = ["dep:datadog-profiling-ffi"]
13-
telemetry = ["profiling", "datadog-profiling-ffi?/ddtelemetry-ffi"]
14-
data-pipeline = ["telemetry", "datadog-profiling-ffi?/data-pipeline-ffi"]
15-
symbolizer = ["profiling", "datadog-profiling-ffi?/symbolizer"]
11+
crashtracker = []
12+
profiling = []
13+
telemetry = []
14+
data-pipeline = []
15+
symbolizer = []
1616

17-
[build-dependencies]
17+
[lib]
18+
bench = false
19+
test = false
20+
doctest = false
21+
22+
23+
[dependencies]
1824
anyhow = { version = "1.0" }
1925
build_common = { path = "../build-common", features = ["cbindgen"] }
2026
cmake = "0.1.50"
21-
tools = { path = "../tools" }
22-
ddcommon-ffi = { path = "../ddcommon-ffi" }
23-
datadog-profiling-ffi = { path = "../profiling-ffi", optional = true, features = ["cbindgen"] }
27+
pico-args = "0.5.0"
2428
tar = "0.4.41"
29+
tools = { path = "../tools" }
2530

2631
[[bin]]
27-
name = "dummy"
32+
name = "release"
33+
path = "src/bin/release.rs"
2834
test = false
2935
bench = false

builder/build/data_pipeline.rs

-23
This file was deleted.

builder/build/main.rs

+22-250
Original file line numberDiff line numberDiff line change
@@ -1,256 +1,28 @@
11
// Copyright 2024-Present Datadog, Inc. https://www.datadoghq.com/
22
// SPDX-License-Identifier: Apache-2.0
33

4-
pub mod arch;
5-
mod common;
6-
#[cfg(feature = "crashtracker")]
7-
mod crashtracker;
8-
mod module;
9-
10-
#[cfg(feature = "data-pipeline")]
11-
mod data_pipeline;
12-
13-
#[cfg(feature = "profiling")]
14-
mod profiling;
15-
16-
#[cfg(feature = "symbolizer")]
17-
mod symbolizer;
18-
19-
use anyhow::Result;
20-
use std::path::{Path, PathBuf};
21-
use std::process::Command;
22-
use std::rc::Rc;
23-
use std::{env, fs};
24-
25-
use build_common::{determine_paths, HEADER_PATH};
26-
use tools::headers::dedup_headers;
27-
28-
use crate::common::Common;
29-
#[cfg(feature = "crashtracker")]
30-
use crate::crashtracker::CrashTracker;
31-
#[cfg(feature = "data-pipeline")]
32-
use crate::data_pipeline::DataPipeline;
33-
#[cfg(feature = "profiling")]
34-
use crate::profiling::Profiling;
35-
#[cfg(feature = "symbolizer")]
36-
use crate::symbolizer::Symbolizer;
37-
use module::Module;
38-
39-
/// [`Builder`] is a structure that holds all the information required to assemble the final
40-
/// workspace artifact. It will manage the different modules which will be in charge of producing
41-
/// the different binaries and source files that will be part of the artifact. The builder will
42-
/// provide the needed information: paths, version, etc, to the different modules so they can
43-
/// install their sub-artifacts on the target folder.
44-
/// The target folder is set through `LIBDD_OUTPUT_FOLDER` environment variable if it is not
45-
/// provided the default target folder will be the builder output directory.
46-
///
47-
/// # Example
48-
///
49-
/// ```rust
50-
/// use crate::core::Core;
51-
///
52-
/// let mut builder = Builder::new(&path, &profile, &version);
53-
/// let core = Box::new(Core {
54-
/// version: builder.version.clone(),
55-
/// });
56-
/// builder.add_module(core);
57-
/// builder.build()?;
58-
/// builder.pack()?;
59-
/// ```
60-
struct Builder {
61-
modules: Vec<Box<dyn Module>>,
62-
main_header: Rc<str>,
63-
source_inc: Rc<str>,
64-
source_lib: Rc<str>,
65-
target_dir: Rc<str>,
66-
target_lib: Rc<str>,
67-
target_include: Rc<str>,
68-
target_bin: Rc<str>,
69-
target_pkconfig: Rc<str>,
70-
version: Rc<str>,
71-
}
72-
73-
impl Builder {
74-
/// Creates a new Builder instance
75-
///
76-
/// # Aguments
77-
///
78-
/// * `target_dir`: artifact folder.
79-
/// * `profile`: Release configuration: debug or release;
80-
/// * `version`: artifact's version.
81-
///
82-
/// # Returns
83-
///
84-
/// A new Builder instance.
85-
fn new(source_dir: &str, target_dir: &str, profile: &str, version: &str) -> Self {
86-
Builder {
87-
modules: Vec::new(),
88-
main_header: "common.h".into(),
89-
source_lib: (source_dir.to_string() + "/" + profile + "/deps").into(),
90-
source_inc: (source_dir.to_string() + "/" + HEADER_PATH).into(),
91-
target_dir: target_dir.into(),
92-
target_lib: (target_dir.to_string() + "/lib").into(),
93-
target_include: (target_dir.to_string() + "/" + HEADER_PATH).into(),
94-
target_bin: (target_dir.to_string() + "/bin").into(),
95-
target_pkconfig: (target_dir.to_string() + "/lib/pkgconfig").into(),
96-
version: version.into(),
97-
}
98-
}
99-
100-
/// Adds a boxed object which implements Module trait.
101-
fn add_module(&mut self, module: Box<dyn Module>) {
102-
self.modules.push(module);
103-
}
104-
105-
fn create_dir_structure(&self) {
106-
let target = Path::new(self.target_dir.as_ref());
107-
if fs::metadata(target).is_ok() {
108-
fs::remove_dir_all(Path::new(self.target_dir.as_ref()))
109-
.expect("Failed to clean preexisting target folder");
110-
}
111-
fs::create_dir_all(Path::new(self.target_dir.as_ref()))
112-
.expect("Failed to create target directory");
113-
fs::create_dir_all(Path::new(self.target_include.as_ref()))
114-
.expect("Failed to create include directory");
115-
fs::create_dir_all(Path::new(self.target_lib.as_ref()))
116-
.expect("Failed to create include directory");
117-
fs::create_dir_all(Path::new(self.target_bin.as_ref()))
118-
.expect("Failed to create include directory");
119-
fs::create_dir_all(Path::new(self.target_pkconfig.as_ref()))
120-
.expect("Failed to create include directory");
121-
}
122-
123-
fn deduplicate_headers(&self) {
124-
let datadog_inc_dir = Path::new(self.source_inc.as_ref());
125-
126-
let mut headers: Vec<String> = Vec::new();
127-
let inc_files = fs::read_dir(datadog_inc_dir).unwrap();
128-
for file in inc_files.flatten() {
129-
let name = file.file_name().into_string().unwrap();
130-
if name.ends_with(".h") && !name.eq("common.h") && !name.eq("blazesym.h") {
131-
headers.push(file.path().to_string_lossy().to_string());
132-
}
133-
}
134-
135-
let base_header = self.source_inc.to_string() + "/" + self.main_header.as_ref();
136-
dedup_headers(&base_header, &headers);
137-
}
138-
139-
// TODO: maybe do this in module's build.rs
140-
fn sanitize_libraries(&self) {
141-
let datadog_lib_dir = Path::new(self.source_lib.as_ref());
142-
143-
let libs = fs::read_dir(datadog_lib_dir).unwrap();
144-
for lib in libs.flatten() {
145-
let name = lib.file_name().into_string().unwrap();
146-
if name.ends_with(".so") {
147-
arch::fix_rpath(lib.path().to_str().unwrap());
148-
}
149-
}
150-
}
151-
152-
fn add_cmake(&self) {
153-
let libs = arch::NATIVE_LIBS.to_owned();
154-
let output = Command::new("sed")
155-
.arg("s/@Datadog_LIBRARIES@/".to_string() + libs.trim() + "/g")
156-
.arg("../cmake/DatadogConfig.cmake.in")
157-
.output()
158-
.expect("Failed to modify cmake");
159-
160-
let cmake_path: PathBuf = [&self.target_dir, "DatadogConfig.cmake"].iter().collect();
161-
fs::write(cmake_path, output.stdout).expect("writing cmake file failed");
162-
}
163-
164-
/// Builds the final artifact by going through all modules and instancing their install method.
165-
///
166-
/// #Returns
167-
///
168-
/// Ok(()) if success Err(_) if failure.
169-
fn build(&self) -> Result<()> {
170-
for module in &self.modules {
171-
module.install()?;
172-
}
173-
Ok(())
174-
}
175-
176-
/// Generate a tar file with all the intermediate artifacts generated by all the modules.k
177-
///
178-
/// #Returns
179-
///
180-
/// Ok(()) if success Err(_) if failure.
181-
fn pack(&self) -> Result<()> {
182-
let tarname = "libdatadog".to_string() + "_v" + &self.version + ".tar";
183-
let path: PathBuf = [self.target_dir.as_ref(), &tarname].iter().collect();
184-
let artifact = fs::File::create(path).expect("Failed to create tarfile");
185-
let mut ar = tar::Builder::new(artifact);
186-
ar.append_dir_all("lib", self.target_lib.as_ref())?;
187-
ar.append_dir("bin", self.target_bin.as_ref())?;
188-
ar.append_dir_all("include/datadog", self.target_include.as_ref())?;
189-
190-
ar.finish().expect("Failed to write the tarfile");
191-
Ok(())
192-
}
193-
}
194-
1954
fn main() {
1965
// Rerun build script if any of the env vars change.
197-
println!("cargo:rerun-if-env-changed=LIBDD_OUTPUT_FOLDER");
198-
println!("cargo:rerun-if-env-changed=PROFILE");
199-
200-
let (_, source_path) = determine_paths();
201-
let mut path = env::var("OUT_DIR").unwrap();
202-
if let Ok(libdd_path) = env::var("LIBDD_OUTPUT_FOLDER") {
203-
path = libdd_path;
204-
}
205-
206-
let profile = env::var("PROFILE").unwrap();
207-
let version = env::var("CARGO_PKG_VERSION").unwrap();
208-
let mut builder = Builder::new(source_path.to_str().unwrap(), &path, &profile, &version);
209-
210-
builder.create_dir_structure();
211-
builder.deduplicate_headers();
212-
builder.sanitize_libraries();
213-
builder.add_cmake();
214-
215-
// add modules based on features
216-
builder.add_module(Box::new(Common {
217-
source_include: builder.source_inc.clone(),
218-
target_include: builder.target_include.clone(),
219-
}));
220-
221-
#[cfg(feature = "profiling")]
222-
builder.add_module(Box::new(Profiling {
223-
source_include: builder.source_inc.clone(),
224-
source_lib: builder.source_lib.clone(),
225-
target_include: builder.target_include.clone(),
226-
target_lib: builder.target_lib.clone(),
227-
target_pkconfig: builder.target_pkconfig.clone(),
228-
version: builder.version.clone(),
229-
}));
230-
231-
#[cfg(feature = "data-pipeline")]
232-
builder.add_module(Box::new(DataPipeline {
233-
source_include: builder.source_inc.clone(),
234-
target_include: builder.target_include.clone(),
235-
}));
236-
237-
#[cfg(feature = "symbolizer")]
238-
builder.add_module(Box::new(Symbolizer {
239-
source_include: builder.source_inc.clone(),
240-
target_include: builder.target_include.clone(),
241-
}));
242-
243-
#[cfg(feature = "crashtracker")]
244-
builder.add_module(Box::new(CrashTracker {
245-
source_include: builder.source_inc.clone(),
246-
target_include: builder.target_include.clone(),
247-
target_dir: builder.target_dir.clone(),
248-
}));
249-
250-
// Build artifacts.
251-
let res = builder.build();
252-
match res {
253-
Ok(_) => builder.pack().unwrap(),
254-
Err(err) => panic!("{}", format!("Building failed: {}", err)),
255-
}
6+
println!(
7+
"cargo:rustc-env=TARGET={}",
8+
std::env::var("TARGET").unwrap()
9+
);
10+
println!(
11+
"cargo:rustc-env=PROFILE={}",
12+
std::env::var("PROFILE").unwrap()
13+
);
14+
println!(
15+
"cargo:rustc-env=OPT_LEVEL={}",
16+
std::env::var("OPT_LEVEL").unwrap()
17+
);
18+
println!("cargo:rustc-env=DEBUG={}", std::env::var("DEBUG").unwrap());
19+
println!(
20+
"cargo:rustc-env=CARGO_PKG_VERSION={}",
21+
std::env::var("CARGO_PKG_VERSION").unwrap()
22+
);
23+
println!("cargo:rustc-env=HOST={}", std::env::var("HOST").unwrap());
24+
println!(
25+
"cargo:rustc-env=OUT_DIR={}",
26+
std::env::var("OUT_DIR").unwrap()
27+
);
25628
}

builder/build/symbolizer.rs

-23
This file was deleted.

0 commit comments

Comments
 (0)