Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 29 additions & 0 deletions confidence-resolver/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@ use std::io::Result;
use std::path::PathBuf;

fn main() -> Result<()> {
// Suppress all clippy lints in generated proto code
const ALLOW_ATTR: &str = "#[allow(clippy::all, clippy::arithmetic_side_effects, clippy::panic, clippy::unwrap_used, clippy::expect_used, clippy::indexing_slicing)]";

let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("protos");
let proto_files = vec![
root.join("confidence/flags/admin/v1/types.proto"),
Expand All @@ -22,6 +25,8 @@ fn main() -> Result<()> {

let mut config = prost_build::Config::new();

config.type_attribute(".", ALLOW_ATTR);

[
"confidence.flags.admin.v1.ClientResolveInfo.EvaluationContextSchemaInstance",
"confidence.flags.admin.v1.ContextFieldSemanticType",
Expand Down Expand Up @@ -71,6 +76,30 @@ fn main() -> Result<()> {
".confidence.iam.v1",
".google.type",
])?;

// Suppress all clippy lints in generated serde files
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
for entry in std::fs::read_dir(&out_dir)? {
let entry = entry?;
let path = entry.path();
if path.extension().is_some_and(|e| e == "rs")
&& path
.file_name()
.is_some_and(|n| n.to_str().unwrap().contains(".serde.rs"))
{
let content = std::fs::read_to_string(&path)?;
let mut new_content = content
.replace("\nimpl ", &format!("\n{}\nimpl ", ALLOW_ATTR))
.replace("\nimpl<", &format!("\n{}\nimpl<", ALLOW_ATTR));

// Handle first impl if it's at the start of file
if new_content.starts_with("impl ") || new_content.starts_with("impl<") {
new_content = format!("{}\n{}", ALLOW_ATTR, new_content);
}

std::fs::write(&path, new_content)?;
}
}
}

Ok(())
Expand Down
5 changes: 3 additions & 2 deletions confidence-resolver/src/err.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ impl ErrorCode {

// emit 8 sextets (MSB first)
core::array::from_fn(|i| {
let shift = 42 - i * 6;
let shift = 42usize.wrapping_sub(i.wrapping_mul(6usize));
let sextet = ((v >> shift) & 0x3F) as u8;
b64u6(sextet)
})
Expand Down Expand Up @@ -162,7 +162,7 @@ impl core::fmt::Display for ErrorCode {
}
}

#[allow(clippy::indexing_slicing)]
#[allow(clippy::indexing_slicing, clippy::arithmetic_side_effects)]
const fn fnv1a64<const N: usize>(parts: [&[u8]; N]) -> u64 {
const FNV64_INIT: u64 = 0xCBF2_9CE4_8422_2325;
const FNV64_PRIME: u64 = 0x1000_0000_01B3;
Expand All @@ -183,6 +183,7 @@ const fn fnv1a64<const N: usize>(parts: [&[u8]; N]) -> u64 {
}

#[inline]
#[allow(clippy::arithmetic_side_effects)]
fn b64u6(x: u8) -> u8 {
match x {
0..=25 => b'A' + x,
Expand Down
15 changes: 12 additions & 3 deletions confidence-resolver/src/flag_logger.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,12 @@ pub struct Logger {
flag_log_requests: Mutex<Vec<FlagLogQueueRequest>>,
}

impl Default for Logger {
fn default() -> Self {
Self::new()
}
}

impl Logger {
pub fn new() -> Logger {
Logger {
Expand Down Expand Up @@ -400,7 +406,8 @@ fn update_rule_variant_info(
let resolve_count = match flag_info.rule_resolve_info.get(&rule_info.rule) {
Some(i) => i.count,
None => 0,
} + rule_info.count;
}
.saturating_add(rule_info.count);

// assignment id to count
let current_assignments: &HashMap<String, i64> =
Expand All @@ -415,7 +422,8 @@ fn update_rule_variant_info(
let count = match current_assignments.get(&aa.assignment_id) {
None => 0,
Some(a) => *a,
} + aa.count;
}
.saturating_add(aa.count);
new_assignment_count.insert(aa.clone().assignment_id, count);
}
flag_info.rule_resolve_info.insert(
Expand All @@ -431,7 +439,8 @@ fn update_rule_variant_info(
let count = match flag_info.variant_resolve_info.get(&variant_info.variant) {
None => 0,
Some(v) => *v,
} + variant_info.count;
}
.saturating_add(variant_info.count);
flag_info
.variant_resolve_info
.insert(variant_info.variant.clone(), count);
Expand Down
18 changes: 9 additions & 9 deletions confidence-resolver/src/gzip.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,16 +35,16 @@ pub fn decompress_gz(buffer: &[u8]) -> Fallible<Vec<u8>> {
if flags & FHCRC != 0 {
fail!("crc not supported");
}
let trailer_start = buffer.len() - 8;
let crc_bytes = buffer.get(trailer_start..trailer_start + 4).or_fail()?;
let trailer_start = buffer.len().checked_sub(8).or_fail()?;
let crc_end = trailer_start.checked_add(4).or_fail()?;
let isize_end = trailer_start.checked_add(8).or_fail()?;

let crc_bytes = buffer.get(trailer_start..crc_end).or_fail()?;
let crc = u32::from_le_bytes(crc_bytes.try_into().or_fail()?);
let isize = u32::from_le_bytes(
buffer
.get(trailer_start + 4..trailer_start + 8)
.or_fail()?
.try_into()
.or_fail()?,
);

let isize_bytes = buffer.get(crc_end..isize_end).or_fail()?;
let isize = u32::from_le_bytes(isize_bytes.try_into().or_fail()?);

let compressed_bytes = buffer.get(10..trailer_start).or_fail()?;
let data = decompress_to_vec(compressed_bytes).or_fail()?;
if isize != data.len() as u32 {
Expand Down
48 changes: 32 additions & 16 deletions confidence-resolver/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,18 @@
#![cfg_attr(not(test), deny(
clippy::panic,
clippy::unwrap_used,
clippy::expect_used,
clippy::indexing_slicing,
// clippy::integer_arithmetic
))]
#![cfg_attr(
not(test),
deny(
clippy::panic,
clippy::unwrap_used,
clippy::expect_used,
clippy::indexing_slicing,
clippy::arithmetic_side_effects
)
)]

use bitvec::prelude as bv;
use core::marker::PhantomData;
use fastmurmur3::murmur3_x64_128;
use std::collections::{BTreeMap, HashMap, HashSet};
use std::fmt::format;

use bytes::Bytes;

Expand Down Expand Up @@ -688,7 +690,8 @@ impl<'a, H: Host> AccountResolver<'a, H> {
};
let apply_time = to_date_time_utc(apply_time).or_fail()?;
let skew = send_time.signed_duration_since(apply_time);
let skew_adjusted_applied_time = datetime_to_timestamp(&(receive_time - skew));
let adjusted_time = receive_time.checked_sub_signed(skew).or_fail()?;
let skew_adjusted_applied_time = datetime_to_timestamp(&adjusted_time);
assigned_flags.push(FlagToApply {
assigned_flag: assigned_flag.clone(),
skew_adjusted_applied_time,
Expand Down Expand Up @@ -851,9 +854,9 @@ impl<'a, H: Host> AccountResolver<'a, H> {
let read_materialization = &materialization_spec.read_materialization;
if !read_materialization.is_empty() {
if let Some(info) = sticky_context.get(&unit) {
let info_from_context = info.info_map.get(read_materialization).clone();
let info_from_context = info.info_map.get(read_materialization);

if let Some(ref info_data) = info_from_context {
if let Some(info_data) = info_from_context {
if !info_data.unit_in_info {
if materialization_spec
.mode
Expand Down Expand Up @@ -927,7 +930,7 @@ impl<'a, H: Host> AccountResolver<'a, H> {
let bucket_count = spec.bucket_count;
let variant_salt = segment_name.split("/").nth(1).or_fail()?;
let key = format!("{}|{}", variant_salt, unit);
let bucket = bucket(hash(&key), bucket_count as u64) as i32;
let bucket = bucket(hash(&key), bucket_count as u64)? as i32;

let matched_assignment = spec.assignments.iter().find(|assignment| {
assignment
Expand Down Expand Up @@ -1077,7 +1080,7 @@ impl<'a, H: Host> AccountResolver<'a, H> {
return Ok(true);
}; // todo: would this match or not?
let salted_unit = self.client.account.salt_unit(unit)?;
let unit_hash = bucket(hash(&salted_unit), BUCKETS);
let unit_hash = bucket(hash(&salted_unit), BUCKETS)?;
Ok(bitset[unit_hash])
}

Expand Down Expand Up @@ -1381,12 +1384,16 @@ pub fn hash(key: &str) -> u128 {
murmur3_x64_128(key.as_bytes(), 0)
}

pub fn bucket(hash: u128, buckets: u64) -> usize {
#[allow(clippy::arithmetic_side_effects)] // buckets != 0 checked above
pub fn bucket(hash: u128, buckets: u64) -> Fallible<usize> {
if buckets == 0 {
fail!(":bucket.zero_buckets");
}
// convert u128 to u64 to match what we do in the java resolver
let hash_long: u64 = hash as u64;

// don't ask me why
((hash_long >> 4) % buckets) as usize
Ok(((hash_long >> 4) % buckets) as usize)
}

#[cfg(test)]
Expand Down Expand Up @@ -1477,10 +1484,19 @@ mod tests {
let account = Account {
name: "accounts/confidence-test".to_string(),
};
let bucket = bucket(hash(&account.salt_unit("roug").unwrap()), BUCKETS);
let bucket = bucket(hash(&account.salt_unit("roug").unwrap()), BUCKETS).unwrap();
assert_eq!(bucket, 567493); // test matching bucketing result from the java randomizer
}

#[test]
fn test_bucket_zero() {
let account = Account {
name: "accounts/confidence-test".to_string(),
};
let result = bucket(hash(&account.salt_unit("roug").unwrap()), 0);
assert!(result.is_err()); // bucket count of 0 should return error
}

#[test]
fn test_account_salt() {
let account = Account {
Expand Down
7 changes: 6 additions & 1 deletion confidence-resolver/src/resolve_logger.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ use std::sync::{
};

use crate::{
err::{Fallible, OrFailExt},
schema_util::{DerivedClientSchema, SchemaFromEvaluationContext},
FlagToApply,
};
Expand Down Expand Up @@ -36,6 +35,12 @@ pub struct ResolveLogger {
state: ArcSwap<RwLock<Option<ResolveInfoState>>>,
}

impl Default for ResolveLogger {
fn default() -> Self {
Self::new()
}
}

impl ResolveLogger {
pub fn new() -> ResolveLogger {
ResolveLogger {
Expand Down
Loading