From 36b6109c593814da7c7e96df23ba159d24b25b7f Mon Sep 17 00:00:00 2001 From: Emanuele Stoppa Date: Mon, 24 Jun 2024 11:57:51 +0100 Subject: [PATCH] chore(lint): initialise `biome_graphql_analyze` --- Cargo.lock | 21 ++ Cargo.toml | 1 + crates/biome_graphql_analyze/Cargo.toml | 36 +++ crates/biome_graphql_analyze/src/lib.rs | 196 +++++++++++++++ crates/biome_graphql_analyze/src/lint.rs | 4 + .../biome_graphql_analyze/src/lint/nursery.rs | 14 ++ .../src/lint/nursery/use_dummy_rule.rs | 42 ++++ crates/biome_graphql_analyze/src/options.rs | 6 + crates/biome_graphql_analyze/src/registry.rs | 7 + .../src/suppression_action.rs | 26 ++ .../biome_graphql_analyze/tests/spec_tests.rs | 226 ++++++++++++++++++ .../specs/nursery/useDummyRule/valid.graphql | 3 + .../nursery/useDummyRule/valid.graphql.snap | 11 + justfile | 8 + knope.toml | 4 + xtask/codegen/src/generate_analyzer.rs | 28 +++ .../codegen/src/generate_new_analyzer_rule.rs | 78 ++++++ 17 files changed, 711 insertions(+) create mode 100644 crates/biome_graphql_analyze/Cargo.toml create mode 100644 crates/biome_graphql_analyze/src/lib.rs create mode 100644 crates/biome_graphql_analyze/src/lint.rs create mode 100644 crates/biome_graphql_analyze/src/lint/nursery.rs create mode 100644 crates/biome_graphql_analyze/src/lint/nursery/use_dummy_rule.rs create mode 100644 crates/biome_graphql_analyze/src/options.rs create mode 100644 crates/biome_graphql_analyze/src/registry.rs create mode 100644 crates/biome_graphql_analyze/src/suppression_action.rs create mode 100644 crates/biome_graphql_analyze/tests/spec_tests.rs create mode 100644 crates/biome_graphql_analyze/tests/specs/nursery/useDummyRule/valid.graphql create mode 100644 crates/biome_graphql_analyze/tests/specs/nursery/useDummyRule/valid.graphql.snap diff --git a/Cargo.lock b/Cargo.lock index eb9fda64d8cf..00741863c263 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -462,6 +462,27 @@ dependencies = [ "tracing", ] +[[package]] +name = "biome_graphql_analyze" +version = "0.0.1" +dependencies = [ + "biome_analyze", + "biome_console", + "biome_deserialize", + "biome_deserialize_macros", + "biome_diagnostics", + "biome_graphql_parser", + "biome_graphql_syntax", + "biome_rowan", + "biome_suppression", + "biome_test_utils", + "insta", + "lazy_static", + "schemars", + "serde", + "tests_macros", +] + [[package]] name = "biome_graphql_factory" version = "0.1.0" diff --git a/Cargo.toml b/Cargo.toml index 428f80f97434..790520bb7eb7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -105,6 +105,7 @@ biome_diagnostics_categories = { version = "0.5.7", path = "./crates/biome_diagn biome_diagnostics_macros = { version = "0.5.7", path = "./crates/biome_diagnostics_macros" } biome_formatter = { version = "0.5.7", path = "./crates/biome_formatter" } biome_fs = { version = "0.5.7", path = "./crates/biome_fs" } +biome_graphql_analyze = { version = "0.0.1", path = "./crates/biome_graphql_analyze" } biome_graphql_factory = { version = "0.1.0", path = "./crates/biome_graphql_factory" } biome_graphql_parser = { version = "0.1.0", path = "./crates/biome_graphql_parser" } biome_graphql_syntax = { version = "0.1.0", path = "./crates/biome_graphql_syntax" } diff --git a/crates/biome_graphql_analyze/Cargo.toml b/crates/biome_graphql_analyze/Cargo.toml new file mode 100644 index 000000000000..283883d95ef8 --- /dev/null +++ b/crates/biome_graphql_analyze/Cargo.toml @@ -0,0 +1,36 @@ +[package] +authors.workspace = true +categories.workspace = true +description = "Biome's GraphQL linter" +edition.workspace = true +homepage.workspace = true +keywords.workspace = true +license.workspace = true +name = "biome_graphql_analyze" +repository.workspace = true +version = "0.0.1" + +[dependencies] +biome_analyze = { workspace = true } +biome_console = { workspace = true } +biome_deserialize = { workspace = true } +biome_deserialize_macros = { workspace = true } +biome_diagnostics = { workspace = true } +biome_graphql_syntax = { workspace = true } +biome_rowan = { workspace = true } +biome_suppression = { workspace = true } +lazy_static = { workspace = true } +schemars = { workspace = true, optional = true } +serde = { workspace = true, features = ["derive"] } + +[dev-dependencies] +biome_graphql_parser = { path = "../biome_graphql_parser" } +biome_test_utils = { path = "../biome_test_utils" } +insta = { workspace = true, features = ["glob"] } +tests_macros = { path = "../tests_macros" } + +[features] +schema = ["schemars", "biome_deserialize/schema"] + +[lints] +workspace = true diff --git a/crates/biome_graphql_analyze/src/lib.rs b/crates/biome_graphql_analyze/src/lib.rs new file mode 100644 index 000000000000..2aa12461bde6 --- /dev/null +++ b/crates/biome_graphql_analyze/src/lib.rs @@ -0,0 +1,196 @@ +mod lint; +pub mod options; +mod registry; +mod suppression_action; + +pub use crate::registry::visit_registry; +use crate::suppression_action::GraphqlSuppressionAction; +use biome_analyze::{ + AnalysisFilter, AnalyzerOptions, AnalyzerSignal, ControlFlow, LanguageRoot, MatchQueryParams, + MetadataRegistry, RuleRegistry, SuppressionKind, +}; +use biome_diagnostics::{category, Error}; +use biome_graphql_syntax::GraphqlLanguage; +use biome_suppression::{parse_suppression_comment, SuppressionDiagnostic}; + +/// Return the static [MetadataRegistry] for the JSON analyzer rules +pub fn metadata() -> &'static MetadataRegistry { + lazy_static::lazy_static! { + static ref METADATA: MetadataRegistry = { + let mut metadata = MetadataRegistry::default(); + visit_registry(&mut metadata); + metadata + }; + } + + &METADATA +} + +/// Run the analyzer on the provided `root`: this process will use the given `filter` +/// to selectively restrict analysis to specific rules / a specific source range, +/// then call `emit_signal` when an analysis rule emits a diagnostic or action +pub fn analyze<'a, F, B>( + root: &LanguageRoot, + filter: AnalysisFilter, + options: &'a AnalyzerOptions, + emit_signal: F, +) -> (Option, Vec) +where + F: FnMut(&dyn AnalyzerSignal) -> ControlFlow + 'a, + B: 'a, +{ + analyze_with_inspect_matcher(root, filter, |_| {}, options, emit_signal) +} + +/// Run the analyzer on the provided `root`: this process will use the given `filter` +/// to selectively restrict analysis to specific rules / a specific source range, +/// then call `emit_signal` when an analysis rule emits a diagnostic or action. +/// Additionally, this function takes a `inspect_matcher` function that can be +/// used to inspect the "query matches" emitted by the analyzer before they are +/// processed by the lint rules registry +pub fn analyze_with_inspect_matcher<'a, V, F, B>( + root: &LanguageRoot, + filter: AnalysisFilter, + inspect_matcher: V, + options: &'a AnalyzerOptions, + mut emit_signal: F, +) -> (Option, Vec) +where + V: FnMut(&MatchQueryParams) + 'a, + F: FnMut(&dyn AnalyzerSignal) -> ControlFlow + 'a, + B: 'a, +{ + fn parse_linter_suppression_comment( + text: &str, + ) -> Vec> { + let mut result = Vec::new(); + + for comment in parse_suppression_comment(text) { + let categories = match comment { + Ok(comment) => { + if comment.is_legacy { + result.push(Ok(SuppressionKind::Deprecated)); + } + comment.categories + } + Err(err) => { + result.push(Err(err)); + continue; + } + }; + + for (key, value) in categories { + if key == category!("lint") { + if let Some(value) = value { + result.push(Ok(SuppressionKind::MaybeLegacy(value))); + } else { + result.push(Ok(SuppressionKind::Everything)); + } + } else { + let category = key.name(); + if let Some(rule) = category.strip_prefix("lint/") { + result.push(Ok(SuppressionKind::Rule(rule))); + } + } + } + } + + result + } + + let mut registry = RuleRegistry::builder(&filter, root); + visit_registry(&mut registry); + + let (registry, services, diagnostics, visitors) = registry.build(); + + // Bail if we can't parse a rule option + if !diagnostics.is_empty() { + return (None, diagnostics); + } + + let mut analyzer = biome_analyze::Analyzer::new( + metadata(), + biome_analyze::InspectMatcher::new(registry, inspect_matcher), + parse_linter_suppression_comment, + Box::new(GraphqlSuppressionAction), + &mut emit_signal, + ); + + for ((phase, _), visitor) in visitors { + analyzer.add_visitor(phase, visitor); + } + + ( + analyzer.run(biome_analyze::AnalyzerContext { + root: root.clone(), + range: filter.range, + services, + options, + }), + diagnostics, + ) +} + +#[cfg(test)] +mod tests { + use crate::analyze; + use biome_analyze::{AnalysisFilter, AnalyzerOptions, ControlFlow, Never, RuleFilter}; + use biome_console::fmt::{Formatter, Termcolor}; + use biome_console::{markup, Markup}; + use biome_diagnostics::termcolor::NoColor; + use biome_diagnostics::{Diagnostic, DiagnosticExt, PrintDiagnostic, Severity}; + use biome_graphql_parser::parse_graphql; + use biome_rowan::TextRange; + use std::slice; + + #[ignore] + #[test] + fn quick_test() { + fn markup_to_string(markup: Markup) -> String { + let mut buffer = Vec::new(); + let mut write = Termcolor(NoColor::new(&mut buffer)); + let mut fmt = Formatter::new(&mut write); + fmt.write_markup(markup).unwrap(); + + String::from_utf8(buffer).unwrap() + } + + const SOURCE: &str = r#" "#; + + let parsed = parse_graphql(SOURCE); + + let mut error_ranges: Vec = Vec::new(); + let rule_filter = RuleFilter::Rule("nursery", "noUnknownPseudoClassSelector"); + let options = AnalyzerOptions::default(); + analyze( + &parsed.tree(), + AnalysisFilter { + enabled_rules: Some(slice::from_ref(&rule_filter)), + ..AnalysisFilter::default() + }, + &options, + |signal| { + if let Some(diag) = signal.diagnostic() { + error_ranges.push(diag.location().span.unwrap()); + let error = diag + .with_severity(Severity::Warning) + .with_file_path("ahahah") + .with_file_source_code(SOURCE); + let text = markup_to_string(markup! { + {PrintDiagnostic::verbose(&error)} + }); + eprintln!("{text}"); + } + + for action in signal.actions() { + let new_code = action.mutation.commit(); + eprintln!("{new_code}"); + } + + ControlFlow::::Continue(()) + }, + ); + + assert_eq!(error_ranges.as_slice(), &[]); + } +} diff --git a/crates/biome_graphql_analyze/src/lint.rs b/crates/biome_graphql_analyze/src/lint.rs new file mode 100644 index 000000000000..03cc5d9015e6 --- /dev/null +++ b/crates/biome_graphql_analyze/src/lint.rs @@ -0,0 +1,4 @@ +//! Generated file, do not edit by hand, see `xtask/codegen` + +pub mod nursery; +::biome_analyze::declare_category! { pub Lint { kind : Lint , groups : [self :: nursery :: Nursery ,] } } diff --git a/crates/biome_graphql_analyze/src/lint/nursery.rs b/crates/biome_graphql_analyze/src/lint/nursery.rs new file mode 100644 index 000000000000..265fbb213ee0 --- /dev/null +++ b/crates/biome_graphql_analyze/src/lint/nursery.rs @@ -0,0 +1,14 @@ +//! Generated file, do not edit by hand, see `xtask/codegen` + +use biome_analyze::declare_lint_group; + +pub mod use_dummy_rule; + +declare_lint_group! { + pub Nursery { + name : "nursery" , + rules : [ + self :: use_dummy_rule :: UseDummyRule , + ] + } +} diff --git a/crates/biome_graphql_analyze/src/lint/nursery/use_dummy_rule.rs b/crates/biome_graphql_analyze/src/lint/nursery/use_dummy_rule.rs new file mode 100644 index 000000000000..91d2a7bef0c6 --- /dev/null +++ b/crates/biome_graphql_analyze/src/lint/nursery/use_dummy_rule.rs @@ -0,0 +1,42 @@ +use biome_analyze::{context::RuleContext, declare_rule, Ast, Rule}; +use biome_graphql_syntax::GraphqlRoot; + +declare_rule! { + /// Dummy rule + /// + /// ## Examples + /// + /// ### Invalid + /// + /// ```json,expect_diagnostic + /// { + /// "title": "New title", + /// "title": "Second title" + /// } + /// ``` + /// + /// ### Valid + /// + /// ```json + /// { + /// "title": "New title", + /// "secondTitle": "Second title" + /// } + /// ``` + pub UseDummyRule { + version: "next", + name: "useDummyRule", + language: "graphql", + } +} + +impl Rule for UseDummyRule { + type Query = Ast; + type State = (); + type Signals = Option; + type Options = (); + + fn run(_ctx: &RuleContext) -> Self::Signals { + None + } +} diff --git a/crates/biome_graphql_analyze/src/options.rs b/crates/biome_graphql_analyze/src/options.rs new file mode 100644 index 000000000000..ca8385464d4d --- /dev/null +++ b/crates/biome_graphql_analyze/src/options.rs @@ -0,0 +1,6 @@ +//! Generated file, do not edit by hand, see `xtask/codegen` + +use crate::lint; + +pub type UseDummyRule = + ::Options; diff --git a/crates/biome_graphql_analyze/src/registry.rs b/crates/biome_graphql_analyze/src/registry.rs new file mode 100644 index 000000000000..0ca9db70e9c9 --- /dev/null +++ b/crates/biome_graphql_analyze/src/registry.rs @@ -0,0 +1,7 @@ +//! Generated file, do not edit by hand, see `xtask/codegen` + +use biome_analyze::RegistryVisitor; +use biome_graphql_syntax::GraphqlLanguage; +pub fn visit_registry>(registry: &mut V) { + registry.record_category::(); +} diff --git a/crates/biome_graphql_analyze/src/suppression_action.rs b/crates/biome_graphql_analyze/src/suppression_action.rs new file mode 100644 index 000000000000..b5d1b60acbbe --- /dev/null +++ b/crates/biome_graphql_analyze/src/suppression_action.rs @@ -0,0 +1,26 @@ +use biome_analyze::{ApplySuppression, SuppressionAction}; +use biome_graphql_syntax::GraphqlLanguage; +use biome_rowan::{BatchMutation, SyntaxToken}; + +pub(crate) struct GraphqlSuppressionAction; + +impl SuppressionAction for GraphqlSuppressionAction { + type Language = GraphqlLanguage; + + fn find_token_to_apply_suppression( + &self, + _original_token: SyntaxToken, + ) -> Option> { + // TODO: property implement. Look for the JsSuppressionAction for an example + None + } + + fn apply_suppression( + &self, + _mutation: &mut BatchMutation, + _apply_suppression: ApplySuppression, + _suppression_text: &str, + ) { + unreachable!("find_token_to_apply_suppression return None") + } +} diff --git a/crates/biome_graphql_analyze/tests/spec_tests.rs b/crates/biome_graphql_analyze/tests/spec_tests.rs new file mode 100644 index 000000000000..9bcde490734e --- /dev/null +++ b/crates/biome_graphql_analyze/tests/spec_tests.rs @@ -0,0 +1,226 @@ +use biome_analyze::{AnalysisFilter, AnalyzerAction, ControlFlow, Never, RuleFilter}; +use biome_diagnostics::advice::CodeSuggestionAdvice; +use biome_diagnostics::{DiagnosticExt, Severity}; +use biome_graphql_parser::parse_graphql; +use biome_graphql_syntax::{GraphqlFileSource, GraphqlLanguage}; +use biome_rowan::AstNode; +use biome_test_utils::{ + assert_errors_are_absent, code_fix_to_string, create_analyzer_options, diagnostic_to_string, + has_bogus_nodes_or_empty_slots, parse_test_path, register_leak_checker, scripts_from_json, + write_analyzer_snapshot, CheckActionType, +}; +use std::{ffi::OsStr, fs::read_to_string, path::Path, slice}; + +tests_macros::gen_tests! {"tests/specs/**/*.{graphql,json,jsonc}", crate::run_test, "module"} +tests_macros::gen_tests! {"tests/suppression/**/*.{graphql,json,jsonc}", crate::run_suppression_test, "module"} + +fn run_test(input: &'static str, _: &str, _: &str, _: &str) { + register_leak_checker(); + + let input_file = Path::new(input); + let file_name = input_file.file_name().and_then(OsStr::to_str).unwrap(); + + let (group, rule) = parse_test_path(input_file); + if rule == "specs" || rule == "suppression" { + panic!("the test file must be placed in the {rule}/// directory"); + } + if group == "specs" || group == "suppression" { + panic!("the test file must be placed in the {group}/{rule}// directory"); + } + if biome_graphql_analyze::metadata() + .find_rule(group, rule) + .is_none() + { + panic!("could not find rule {group}/{rule}"); + } + + let rule_filter = RuleFilter::Rule(group, rule); + let filter = AnalysisFilter { + enabled_rules: Some(slice::from_ref(&rule_filter)), + ..AnalysisFilter::default() + }; + + let mut snapshot = String::new(); + let extension = input_file.extension().unwrap_or_default(); + + let input_code = read_to_string(input_file) + .unwrap_or_else(|err| panic!("failed to read {:?}: {:?}", input_file, err)); + let quantity_diagnostics = if let Some(scripts) = scripts_from_json(extension, &input_code) { + for script in scripts { + analyze_and_snap( + &mut snapshot, + &script, + GraphqlFileSource {}, + filter, + file_name, + input_file, + CheckActionType::Lint, + ); + } + + 0 + } else { + let Ok(source_type) = input_file.try_into() else { + return; + }; + analyze_and_snap( + &mut snapshot, + &input_code, + source_type, + filter, + file_name, + input_file, + CheckActionType::Lint, + ) + }; + + insta::with_settings!({ + prepend_module_to_snapshot => false, + snapshot_path => input_file.parent().unwrap(), + }, { + insta::assert_snapshot!(file_name, snapshot, file_name); + }); + + if input_code.contains("/* should not generate diagnostics */") && quantity_diagnostics > 0 { + panic!("This test should not generate diagnostics"); + } +} + +#[allow(clippy::too_many_arguments)] +pub(crate) fn analyze_and_snap( + snapshot: &mut String, + input_code: &str, + source_type: GraphqlFileSource, + filter: AnalysisFilter, + file_name: &str, + input_file: &Path, + check_action_type: CheckActionType, +) -> usize { + let parsed = parse_graphql(input_code); + let root = parsed.tree(); + + let mut diagnostics = Vec::new(); + let mut code_fixes = Vec::new(); + let options = create_analyzer_options(input_file, &mut diagnostics); + + let (_, errors) = biome_graphql_analyze::analyze(&root, filter, &options, |event| { + if let Some(mut diag) = event.diagnostic() { + for action in event.actions() { + if check_action_type.is_suppression() { + if action.is_suppression() { + check_code_action(input_file, input_code, source_type, &action); + diag = diag.add_code_suggestion(CodeSuggestionAdvice::from(action)); + } + } else if !action.is_suppression() { + check_code_action(input_file, input_code, source_type, &action); + diag = diag.add_code_suggestion(CodeSuggestionAdvice::from(action)); + } + } + + let error = diag.with_severity(Severity::Warning); + diagnostics.push(diagnostic_to_string(file_name, input_code, error)); + return ControlFlow::Continue(()); + } + + for action in event.actions() { + if check_action_type.is_suppression() { + if action.category.matches("quickfix.suppressRule") { + check_code_action(input_file, input_code, source_type, &action); + code_fixes.push(code_fix_to_string(input_code, action)); + } + } else if !action.category.matches("quickfix.suppressRule") { + check_code_action(input_file, input_code, source_type, &action); + code_fixes.push(code_fix_to_string(input_code, action)); + } + } + + ControlFlow::::Continue(()) + }); + + for error in errors { + diagnostics.push(diagnostic_to_string(file_name, input_code, error)); + } + + write_analyzer_snapshot( + snapshot, + input_code, + diagnostics.as_slice(), + code_fixes.as_slice(), + "graphql", + ); + + diagnostics.len() +} + +fn check_code_action( + path: &Path, + source: &str, + _source_type: GraphqlFileSource, + action: &AnalyzerAction, +) { + let (new_tree, text_edit) = match action + .mutation + .clone() + .commit_with_text_range_and_edit(true) + { + (new_tree, Some((_, text_edit))) => (new_tree, text_edit), + (new_tree, None) => (new_tree, Default::default()), + }; + + let output = text_edit.new_string(source); + + // Checks that applying the text edits returned by the BatchMutation + // returns the same code as printing the modified syntax tree + assert_eq!(new_tree.to_string(), output); + + if has_bogus_nodes_or_empty_slots(&new_tree) { + panic!( + "modified tree has bogus nodes or empty slots:\n{new_tree:#?} \n\n {}", + new_tree + ) + } + + // Checks the returned tree contains no missing children node + if format!("{new_tree:?}").contains("missing (required)") { + panic!("modified tree has missing children:\n{new_tree:#?}") + } + + // Re-parse the modified code and panic if the resulting tree has syntax errors + let re_parse = parse_graphql(&output); + assert_errors_are_absent(re_parse.tree().syntax(), re_parse.diagnostics(), path); +} + +pub(crate) fn _run_suppression_test(input: &'static str, _: &str, _: &str, _: &str) { + register_leak_checker(); + + let input_file = Path::new(input); + let file_name = input_file.file_name().and_then(OsStr::to_str).unwrap(); + let input_code = read_to_string(input_file) + .unwrap_or_else(|err| panic!("failed to read {:?}: {:?}", input_file, err)); + + let (group, rule) = parse_test_path(input_file); + + let rule_filter = RuleFilter::Rule(group, rule); + let filter = AnalysisFilter { + enabled_rules: Some(slice::from_ref(&rule_filter)), + ..AnalysisFilter::default() + }; + + let mut snapshot = String::new(); + analyze_and_snap( + &mut snapshot, + &input_code, + GraphqlFileSource {}, + filter, + file_name, + input_file, + CheckActionType::Suppression, + ); + + insta::with_settings!({ + prepend_module_to_snapshot => false, + snapshot_path => input_file.parent().unwrap(), + }, { + insta::assert_snapshot!(file_name, snapshot, file_name); + }); +} diff --git a/crates/biome_graphql_analyze/tests/specs/nursery/useDummyRule/valid.graphql b/crates/biome_graphql_analyze/tests/specs/nursery/useDummyRule/valid.graphql new file mode 100644 index 000000000000..9bd66d0681b1 --- /dev/null +++ b/crates/biome_graphql_analyze/tests/specs/nursery/useDummyRule/valid.graphql @@ -0,0 +1,3 @@ +type User { + name String +} diff --git a/crates/biome_graphql_analyze/tests/specs/nursery/useDummyRule/valid.graphql.snap b/crates/biome_graphql_analyze/tests/specs/nursery/useDummyRule/valid.graphql.snap new file mode 100644 index 000000000000..0ea11adf96dd --- /dev/null +++ b/crates/biome_graphql_analyze/tests/specs/nursery/useDummyRule/valid.graphql.snap @@ -0,0 +1,11 @@ +--- +source: crates/biome_graphql_analyze/tests/spec_tests.rs +expression: valid.graphql +--- +# Input +```graphql +type User { + name String +} + +``` diff --git a/justfile b/justfile index bd8b899e334d..2114bdf4bb09 100644 --- a/justfile +++ b/justfile @@ -79,6 +79,12 @@ new-css-lintrule rulename: cargo run -p xtask_codegen -- new-lintrule --kind=css --category=lint --name={{rulename}} just gen-lint +# Creates a new css lint rule in the given path, with the given name. Name has to be camel case. +new-graphql-lintrule rulename: + cargo run -p xtask_codegen -- new-lintrule --kind=graphql --category=lint --name={{rulename}} + just gen-lint + + # Promotes a rule from the nursery group to a new group promote-rule rulename group: cargo run -p xtask_codegen -- promote-rule --name={{rulename}} --group={{group}} @@ -118,9 +124,11 @@ test-lintrule name: just _touch crates/biome_js_analyze/tests/spec_tests.rs just _touch crates/biome_json_analyze/tests/spec_tests.rs just _touch crates/biome_css_analyze/tests/spec_tests.rs + just _touch crates/biome_graphql_analyze/tests/spec_tests.rs cargo test -p biome_js_analyze -- {{snakecase(name)}} --show-output cargo test -p biome_json_analyze -- {{snakecase(name)}} --show-output cargo test -p biome_css_analyze -- {{snakecase(name)}} --show-output + cargo test -p biome_graphql_analyze -- {{snakecase(name)}} --show-output # Tests a lint rule. The name of the rule needs to be camel case test-transformation name: diff --git a/knope.toml b/knope.toml index ba5f871ad5f3..aede1a2f834b 100644 --- a/knope.toml +++ b/knope.toml @@ -196,6 +196,10 @@ versioned_files = ["crates/biome_configuration/Cargo.toml"] changelog = "crates/biome_grit_formatter/CHANGELOG.md" versioned_files = ["crates/biome_grit_formatter/Cargo.toml"] +[packages.biome_graphql_analyze] +changelog = "crates/biome_graphql_analyze/CHANGELOG.md" +versioned_files = ["crates/biome_graphql_analyze/Cargo.toml"] + ## End of crates. DO NOT CHANGE! # Workflow to create a changeset diff --git a/xtask/codegen/src/generate_analyzer.rs b/xtask/codegen/src/generate_analyzer.rs index 7a3458511750..4634fa2376eb 100644 --- a/xtask/codegen/src/generate_analyzer.rs +++ b/xtask/codegen/src/generate_analyzer.rs @@ -11,6 +11,7 @@ pub fn generate_analyzer() -> Result<()> { generate_js_analyzer()?; generate_json_analyzer()?; generate_css_analyzer()?; + generate_graphql_analyzer()?; Ok(()) } @@ -50,6 +51,14 @@ fn generate_css_analyzer() -> Result<()> { update_css_registry_builder(analyzers) } +fn generate_graphql_analyzer() -> Result<()> { + let base_path = project_root().join("crates/biome_graphql_analyze/src"); + let mut analyzers = BTreeMap::new(); + generate_category("lint", &mut analyzers, &base_path)?; + generate_options(&base_path)?; + update_graphql_registry_builder(analyzers) +} + fn generate_options(base_path: &Path) -> Result<()> { let mut rules_options = BTreeMap::new(); let nl = Punct::new('\n', Spacing::Alone); @@ -299,6 +308,25 @@ fn update_css_registry_builder(analyzers: BTreeMap<&'static str, TokenStream>) - Ok(()) } +fn update_graphql_registry_builder(analyzers: BTreeMap<&'static str, TokenStream>) -> Result<()> { + let path = project_root().join("crates/biome_graphql_analyze/src/registry.rs"); + + let categories = analyzers.into_values(); + + let tokens = xtask::reformat(quote! { + use biome_analyze::RegistryVisitor; + use biome_graphql_syntax::GraphqlLanguage; + + pub fn visit_registry>(registry: &mut V) { + #( #categories )* + } + })?; + + fs2::write(path, tokens)?; + + Ok(()) +} + /// Returns file paths of the given directory. fn list_entry_paths(dir: &Path) -> Result> { Ok(fs2::read_dir(dir) diff --git a/xtask/codegen/src/generate_new_analyzer_rule.rs b/xtask/codegen/src/generate_new_analyzer_rule.rs index f37b8eb015df..e5f889ebc565 100644 --- a/xtask/codegen/src/generate_new_analyzer_rule.rs +++ b/xtask/codegen/src/generate_new_analyzer_rule.rs @@ -8,6 +8,7 @@ pub enum RuleKind { Js, Json, Css, + Graphql, } impl RuleKind { @@ -16,6 +17,7 @@ impl RuleKind { Self::Js => "js", Self::Json => "json", Self::Css => "css", + Self::Graphql => "graphql", } } } @@ -27,6 +29,7 @@ impl FromStr for RuleKind { "js" => Ok(Self::Js), "json" => Ok(Self::Json), "css" => Ok(Self::Css), + "graphql" => Ok(Self::Graphql), _ => Err("Unsupported value"), } } @@ -283,6 +286,81 @@ impl Rule for {rule_name_upper_camel} {{ ) }} }} +"# + ) + } + RuleKind::Graphql => { + format!( + r#"use biome_analyze::{{context::RuleContext, declare_rule, Ast, Rule, RuleDiagnostic}}; +use biome_console::markup; +use biome_graphql_syntax::GraphqlRoot; +use biome_rowan::AstNode; + +declare_rule! {{ + /// Succinct description of the rule. + /// + /// Put context and details about the rule. + /// As a starting point, you can take the description of the corresponding _ESLint_ rule (if any). + /// + /// Try to stay consistent with the descriptions of implemented rules. + /// + /// Add a link to the corresponding stylelint rule (if any): + /// + /// ## Examples + /// + /// ### Invalid + /// + /// ```graphql,expect_diagnostic + /// p {{}} + /// ``` + /// + /// ### Valid + /// + /// ```graphql + /// p {{ + /// color: red; + /// }} + /// ``` + /// + pub {rule_name_upper_camel} {{ + version: "next", + name: "{rule_name_lower_camel}", + language: "graphql", + recommended: false, + }} +}} + +impl Rule for {rule_name_upper_camel} {{ + type Query = Ast; + type State = (); + type Signals = Option; + type Options = (); + + fn run(ctx: &RuleContext) -> Option {{ + let _node = ctx.query(); + None + }} + + fn diagnostic(ctx: &RuleContext, _state: &Self::State) -> Option {{ + // + // Read our guidelines to write great diagnostics: + // https://docs.rs/biome_analyze/latest/biome_analyze/#what-a-rule-should-say-to-the-user + // + let span = ctx.query().range(); + Some( + RuleDiagnostic::new( + rule_category!(), + span, + markup! {{ + "Unexpected empty block is not allowed" + }}, + ) + .note(markup! {{ + "This note will give you more information." + }}), + ) + }} +}} "# ) }