Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(lint): initialise biome_graphql_analyze #3276

Merged
merged 1 commit into from
Jun 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 21 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@ biome_diagnostics_categories = { version = "0.5.7", path = "./crates/biome_diagn
biome_diagnostics_macros = { version = "0.5.7", path = "./crates/biome_diagnostics_macros" }
biome_formatter = { version = "0.5.7", path = "./crates/biome_formatter" }
biome_fs = { version = "0.5.7", path = "./crates/biome_fs" }
biome_graphql_analyze = { version = "0.0.1", path = "./crates/biome_graphql_analyze" }
biome_graphql_factory = { version = "0.1.0", path = "./crates/biome_graphql_factory" }
biome_graphql_parser = { version = "0.1.0", path = "./crates/biome_graphql_parser" }
biome_graphql_syntax = { version = "0.1.0", path = "./crates/biome_graphql_syntax" }
Expand Down
36 changes: 36 additions & 0 deletions crates/biome_graphql_analyze/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
[package]
authors.workspace = true
categories.workspace = true
description = "Biome's GraphQL linter"
edition.workspace = true
homepage.workspace = true
keywords.workspace = true
license.workspace = true
name = "biome_graphql_analyze"
repository.workspace = true
version = "0.0.1"

[dependencies]
biome_analyze = { workspace = true }
biome_console = { workspace = true }
biome_deserialize = { workspace = true }
biome_deserialize_macros = { workspace = true }
biome_diagnostics = { workspace = true }
biome_graphql_syntax = { workspace = true }
biome_rowan = { workspace = true }
biome_suppression = { workspace = true }
lazy_static = { workspace = true }
schemars = { workspace = true, optional = true }
serde = { workspace = true, features = ["derive"] }

[dev-dependencies]
biome_graphql_parser = { path = "../biome_graphql_parser" }
biome_test_utils = { path = "../biome_test_utils" }
insta = { workspace = true, features = ["glob"] }
tests_macros = { path = "../tests_macros" }

[features]
schema = ["schemars", "biome_deserialize/schema"]

[lints]
workspace = true
196 changes: 196 additions & 0 deletions crates/biome_graphql_analyze/src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,196 @@
mod lint;
pub mod options;
mod registry;
mod suppression_action;

pub use crate::registry::visit_registry;
use crate::suppression_action::GraphqlSuppressionAction;
use biome_analyze::{
AnalysisFilter, AnalyzerOptions, AnalyzerSignal, ControlFlow, LanguageRoot, MatchQueryParams,
MetadataRegistry, RuleRegistry, SuppressionKind,
};
use biome_diagnostics::{category, Error};
use biome_graphql_syntax::GraphqlLanguage;
use biome_suppression::{parse_suppression_comment, SuppressionDiagnostic};

/// Return the static [MetadataRegistry] for the JSON analyzer rules
pub fn metadata() -> &'static MetadataRegistry {
lazy_static::lazy_static! {
static ref METADATA: MetadataRegistry = {
let mut metadata = MetadataRegistry::default();
visit_registry(&mut metadata);
metadata
};
}

&METADATA
}

/// Run the analyzer on the provided `root`: this process will use the given `filter`
/// to selectively restrict analysis to specific rules / a specific source range,
/// then call `emit_signal` when an analysis rule emits a diagnostic or action
pub fn analyze<'a, F, B>(
root: &LanguageRoot<GraphqlLanguage>,
filter: AnalysisFilter,
options: &'a AnalyzerOptions,
emit_signal: F,
) -> (Option<B>, Vec<Error>)
where
F: FnMut(&dyn AnalyzerSignal<GraphqlLanguage>) -> ControlFlow<B> + 'a,
B: 'a,
{
analyze_with_inspect_matcher(root, filter, |_| {}, options, emit_signal)
}

/// Run the analyzer on the provided `root`: this process will use the given `filter`
/// to selectively restrict analysis to specific rules / a specific source range,
/// then call `emit_signal` when an analysis rule emits a diagnostic or action.
/// Additionally, this function takes a `inspect_matcher` function that can be
/// used to inspect the "query matches" emitted by the analyzer before they are
/// processed by the lint rules registry
pub fn analyze_with_inspect_matcher<'a, V, F, B>(
root: &LanguageRoot<GraphqlLanguage>,
filter: AnalysisFilter,
inspect_matcher: V,
options: &'a AnalyzerOptions,
mut emit_signal: F,
) -> (Option<B>, Vec<Error>)
where
V: FnMut(&MatchQueryParams<GraphqlLanguage>) + 'a,
F: FnMut(&dyn AnalyzerSignal<GraphqlLanguage>) -> ControlFlow<B> + 'a,
B: 'a,
{
fn parse_linter_suppression_comment(
text: &str,
) -> Vec<Result<SuppressionKind, SuppressionDiagnostic>> {
let mut result = Vec::new();

for comment in parse_suppression_comment(text) {
let categories = match comment {
Ok(comment) => {
if comment.is_legacy {
result.push(Ok(SuppressionKind::Deprecated));
}
comment.categories
}
Err(err) => {
result.push(Err(err));
continue;
}
};

for (key, value) in categories {
if key == category!("lint") {
if let Some(value) = value {
result.push(Ok(SuppressionKind::MaybeLegacy(value)));
} else {
result.push(Ok(SuppressionKind::Everything));
}
} else {
let category = key.name();
if let Some(rule) = category.strip_prefix("lint/") {
result.push(Ok(SuppressionKind::Rule(rule)));
}
}
}
}

result
}

let mut registry = RuleRegistry::builder(&filter, root);
visit_registry(&mut registry);

let (registry, services, diagnostics, visitors) = registry.build();

// Bail if we can't parse a rule option
if !diagnostics.is_empty() {
return (None, diagnostics);
}

let mut analyzer = biome_analyze::Analyzer::new(
metadata(),
biome_analyze::InspectMatcher::new(registry, inspect_matcher),
parse_linter_suppression_comment,
Box::new(GraphqlSuppressionAction),
&mut emit_signal,
);

for ((phase, _), visitor) in visitors {
analyzer.add_visitor(phase, visitor);
}

(
analyzer.run(biome_analyze::AnalyzerContext {
root: root.clone(),
range: filter.range,
services,
options,
}),
diagnostics,
)
}

#[cfg(test)]
mod tests {
use crate::analyze;
use biome_analyze::{AnalysisFilter, AnalyzerOptions, ControlFlow, Never, RuleFilter};
use biome_console::fmt::{Formatter, Termcolor};
use biome_console::{markup, Markup};
use biome_diagnostics::termcolor::NoColor;
use biome_diagnostics::{Diagnostic, DiagnosticExt, PrintDiagnostic, Severity};
use biome_graphql_parser::parse_graphql;
use biome_rowan::TextRange;
use std::slice;

#[ignore]
#[test]
fn quick_test() {
fn markup_to_string(markup: Markup) -> String {
let mut buffer = Vec::new();
let mut write = Termcolor(NoColor::new(&mut buffer));
let mut fmt = Formatter::new(&mut write);
fmt.write_markup(markup).unwrap();

String::from_utf8(buffer).unwrap()
}

const SOURCE: &str = r#" "#;

let parsed = parse_graphql(SOURCE);

let mut error_ranges: Vec<TextRange> = Vec::new();
let rule_filter = RuleFilter::Rule("nursery", "noUnknownPseudoClassSelector");
let options = AnalyzerOptions::default();
analyze(
&parsed.tree(),
AnalysisFilter {
enabled_rules: Some(slice::from_ref(&rule_filter)),
..AnalysisFilter::default()
},
&options,
|signal| {
if let Some(diag) = signal.diagnostic() {
error_ranges.push(diag.location().span.unwrap());
let error = diag
.with_severity(Severity::Warning)
.with_file_path("ahahah")
.with_file_source_code(SOURCE);
let text = markup_to_string(markup! {
{PrintDiagnostic::verbose(&error)}
});
eprintln!("{text}");
}

for action in signal.actions() {
let new_code = action.mutation.commit();
eprintln!("{new_code}");
}

ControlFlow::<Never>::Continue(())
},
);

assert_eq!(error_ranges.as_slice(), &[]);
}
}
4 changes: 4 additions & 0 deletions crates/biome_graphql_analyze/src/lint.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
//! Generated file, do not edit by hand, see `xtask/codegen`

pub mod nursery;
::biome_analyze::declare_category! { pub Lint { kind : Lint , groups : [self :: nursery :: Nursery ,] } }
14 changes: 14 additions & 0 deletions crates/biome_graphql_analyze/src/lint/nursery.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
//! Generated file, do not edit by hand, see `xtask/codegen`

use biome_analyze::declare_lint_group;

pub mod use_dummy_rule;

declare_lint_group! {
pub Nursery {
name : "nursery" ,
rules : [
self :: use_dummy_rule :: UseDummyRule ,
]
}
}
42 changes: 42 additions & 0 deletions crates/biome_graphql_analyze/src/lint/nursery/use_dummy_rule.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
use biome_analyze::{context::RuleContext, declare_rule, Ast, Rule};
use biome_graphql_syntax::GraphqlRoot;

declare_rule! {
/// Dummy rule
///
/// ## Examples
///
/// ### Invalid
///
/// ```json,expect_diagnostic
/// {
/// "title": "New title",
/// "title": "Second title"
/// }
/// ```
///
/// ### Valid
///
/// ```json
/// {
/// "title": "New title",
/// "secondTitle": "Second title"
/// }
/// ```
pub UseDummyRule {
version: "next",
name: "useDummyRule",
language: "graphql",
}
}

impl Rule for UseDummyRule {
type Query = Ast<GraphqlRoot>;
type State = ();
type Signals = Option<Self::State>;
type Options = ();

fn run(_ctx: &RuleContext<Self>) -> Self::Signals {
None
}
}
6 changes: 6 additions & 0 deletions crates/biome_graphql_analyze/src/options.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
//! Generated file, do not edit by hand, see `xtask/codegen`

use crate::lint;

pub type UseDummyRule =
<lint::nursery::use_dummy_rule::UseDummyRule as biome_analyze::Rule>::Options;
7 changes: 7 additions & 0 deletions crates/biome_graphql_analyze/src/registry.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
//! Generated file, do not edit by hand, see `xtask/codegen`

use biome_analyze::RegistryVisitor;
use biome_graphql_syntax::GraphqlLanguage;
pub fn visit_registry<V: RegistryVisitor<GraphqlLanguage>>(registry: &mut V) {
registry.record_category::<crate::lint::Lint>();
}
26 changes: 26 additions & 0 deletions crates/biome_graphql_analyze/src/suppression_action.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
use biome_analyze::{ApplySuppression, SuppressionAction};
use biome_graphql_syntax::GraphqlLanguage;
use biome_rowan::{BatchMutation, SyntaxToken};

pub(crate) struct GraphqlSuppressionAction;

impl SuppressionAction for GraphqlSuppressionAction {
type Language = GraphqlLanguage;

fn find_token_to_apply_suppression(
&self,
_original_token: SyntaxToken<Self::Language>,
) -> Option<ApplySuppression<Self::Language>> {
// TODO: property implement. Look for the JsSuppressionAction for an example
None
}

fn apply_suppression(
&self,
_mutation: &mut BatchMutation<Self::Language>,
_apply_suppression: ApplySuppression<Self::Language>,
_suppression_text: &str,
) {
unreachable!("find_token_to_apply_suppression return None")
}
}
Loading
Loading