diff --git a/.editorconfig b/.editorconfig
index eab3d4286..6e9ad419f 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -12,6 +12,7 @@
root = true
# All Files
+# Don't use tabs for indentation.
[*]
charset = utf-8
indent_style = space
@@ -19,6 +20,17 @@ indent_size = 4
insert_final_newline = true
trim_trailing_whitespace = true
+# Microsoft .NET properties
+csharp_using_directive_placement = outside_namespace:silent
+
+dotnet_style_parentheses_in_arithmetic_binary_operators = never_if_unnecessary:none
+dotnet_style_parentheses_in_other_binary_operators = never_if_unnecessary:none
+dotnet_style_parentheses_in_relational_binary_operators = never_if_unnecessary:none
+
+
+# Standard properties
+insert_final_newline = true
+
##########################################
# File Extension Settings
##########################################
@@ -68,501 +80,304 @@ indent_style = tab
# https://docs.microsoft.com/dotnet/fundamentals/code-analysis/configuration-options#scope
##########################################
-[*.{cs,csx,cake,vb,vbx}]
-# Default Severity for all .NET Code Style rules below
-dotnet_analyzer_diagnostic.severity = silent
+# Xml project files
+[*.{csproj,vbproj,vcxproj,vcxproj.filters,proj,projitems,shproj}]
+indent_size = 2
+space_after_last_pi_attribute = false
+# Xml config files
+[*.{props,targets,ruleset,config,nuspec,resx,vsixmanifest,vsct}]
+indent_size = 2
+space_after_last_pi_attribute = false
-##########################################
-# File Header (Uncomment to support file headers)
-# https://docs.microsoft.com/visualstudio/ide/reference/add-file-header
-##########################################
+# JSON files
+[*.json]
+indent_size = 2
-# [*.{cs,csx,cake,vb,vbx}]
-# file_header_template = \n© PROJECT-AUTHOR\n
+# Code files
+[*.{cs,csx,vb,vbx}]
+indent_size = 4
+indent_style = space
+# Sort using and Import directives with System.* appearing first
+dotnet_sort_system_directives_first = true
+dotnet_separate_import_directive_groups = false
-# SA1636: File header copyright text should match
-# Justification: .editorconfig supports file headers. If this is changed to a value other than "none", a stylecop.json file will need to added to the project.
-# dotnet_diagnostic.SA1636.severity = none
+# Avoid "this." and "Me." if not necessary
+dotnet_style_qualification_for_field = false:suggestion
+dotnet_style_qualification_for_property = false:suggestion
+dotnet_style_qualification_for_method = false:suggestion
+dotnet_style_qualification_for_event = false:suggestion
-##########################################
-# .NET Language Conventions
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions
-##########################################
+# Use language keywords instead of framework type names for type references
+dotnet_style_predefined_type_for_locals_parameters_members = true:suggestion
+dotnet_style_predefined_type_for_member_access = true:suggestion
+# Parentheses preferences
+dotnet_style_parentheses_in_arithmetic_binary_operators = never_if_unnecessary:silent
+dotnet_style_parentheses_in_relational_binary_operators = never_if_unnecessary:silent
+dotnet_style_parentheses_in_other_binary_operators = never_if_unnecessary:silent
+dotnet_style_parentheses_in_other_operators = never_if_unnecessary:silent
-# .NET Code Style Settings
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#net-code-style-settings
-[*.{cs,csx,cake,vb,vbx}]
-# "this." and "Me." qualifiers
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#this-and-me
-#dotnet_style_qualification_for_field = true:warning
-#dotnet_style_qualification_for_property = true:warning
-#dotnet_style_qualification_for_method = true:warning
-#dotnet_style_qualification_for_event = true:warning
-# Language keywords instead of framework type names for type references
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#language-keywords
-dotnet_style_predefined_type_for_locals_parameters_members = true:warning
-dotnet_style_predefined_type_for_member_access = true:warning
# Modifier preferences
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#normalize-modifiers
-dotnet_style_require_accessibility_modifiers = always:warning
-csharp_preferred_modifier_order = public,private,protected,internal,static,extern,new,virtual,abstract,sealed,override,readonly,unsafe,volatile,async:warning
-visual_basic_preferred_modifier_order = Partial,Default,Private,Protected,Public,Friend,NotOverridable,Overridable,MustOverride,Overloads,Overrides,MustInherit,NotInheritable,Static,Shared,Shadows,ReadOnly,WriteOnly,Dim,Const,WithEvents,Widening,Narrowing,Custom,Async:warning
-dotnet_style_readonly_field = true:warning
-# Parentheses preferences
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#parentheses-preferences
-dotnet_style_parentheses_in_arithmetic_binary_operators = always_for_clarity:warning
-dotnet_style_parentheses_in_relational_binary_operators = always_for_clarity:warning
-dotnet_style_parentheses_in_other_binary_operators = always_for_clarity:warning
-dotnet_style_parentheses_in_other_operators = always_for_clarity:suggestion
+dotnet_style_require_accessibility_modifiers = for_non_interface_members:silent
+dotnet_style_readonly_field = true:suggestion
+
# Expression-level preferences
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#expression-level-preferences
-dotnet_style_object_initializer = true:warning
-dotnet_style_collection_initializer = true:warning
-dotnet_style_explicit_tuple_names = true:warning
-dotnet_style_prefer_inferred_tuple_names = true:warning
-dotnet_style_prefer_inferred_anonymous_type_member_names = true:warning
+dotnet_style_object_initializer = true:suggestion
+dotnet_style_collection_initializer = true:suggestion
+dotnet_style_coalesce_expression = true:suggestion
+dotnet_style_null_propagation = true:suggestion
+dotnet_style_explicit_tuple_names = true:suggestion
+dotnet_style_prefer_is_null_check_over_reference_equality_method = true:silent
+dotnet_style_prefer_inferred_tuple_names = true:suggestion
+dotnet_style_prefer_inferred_anonymous_type_member_names = true:suggestion
dotnet_style_prefer_auto_properties = true:warning
-dotnet_style_prefer_is_null_check_over_reference_equality_method = true:warning
-dotnet_style_prefer_conditional_expression_over_assignment = false:suggestion
-dotnet_diagnostic.IDE0045.severity = suggestion
-dotnet_style_prefer_conditional_expression_over_return = false:suggestion
-dotnet_diagnostic.IDE0046.severity = suggestion
-dotnet_style_prefer_compound_assignment = true:warning
-# Null-checking preferences
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#null-checking-preferences
-dotnet_style_coalesce_expression = true:warning
-dotnet_style_null_propagation = true:warning
-# Parameter preferences
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#parameter-preferences
-dotnet_code_quality_unused_parameters = all:warning
-# More style options (Undocumented)
-# https://github.com/MicrosoftDocs/visualstudio-docs/issues/3641
-dotnet_style_operator_placement_when_wrapping = end_of_line
-# https://github.com/dotnet/roslyn/pull/40070
-dotnet_style_prefer_simplified_interpolation = true:warning
-
-# C# Code Style Settings
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#c-code-style-settings
-[*.{cs,csx,cake}]
-# Implicit and explicit types
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#implicit-and-explicit-types
-csharp_style_var_for_built_in_types = true:warning
-csharp_style_var_when_type_is_apparent = true:warning
-csharp_style_var_elsewhere = true:warning
-# Expression-bodied members
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#expression-bodied-members
-csharp_style_expression_bodied_methods = true:warning
-csharp_style_expression_bodied_constructors = true:warning
-csharp_style_expression_bodied_operators = true:warning
-csharp_style_expression_bodied_properties = true:warning
-csharp_style_expression_bodied_indexers = true:warning
-csharp_style_expression_bodied_accessors = true:warning
-csharp_style_expression_bodied_lambdas = true:warning
-csharp_style_expression_bodied_local_functions = true:warning
-# Pattern matching
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#pattern-matching
-csharp_style_pattern_matching_over_is_with_cast_check = true:warning
-csharp_style_pattern_matching_over_as_with_null_check = true:warning
-# Inlined variable declarations
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#inlined-variable-declarations
-csharp_style_inlined_variable_declaration = true:warning
-# Expression-level preferences
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#expression-level-preferences
-csharp_prefer_simple_default_expression = true:warning
-# "Null" checking preferences
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#c-null-checking-preferences
-csharp_style_throw_expression = true:warning
-csharp_style_conditional_delegate_call = true:warning
-# Code block preferences
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#code-block-preferences
-csharp_prefer_braces = true:warning
-# Unused value preferences
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#unused-value-preferences
-csharp_style_unused_value_expression_statement_preference = discard_variable:suggestion
-dotnet_diagnostic.IDE0058.severity = suggestion
-csharp_style_unused_value_assignment_preference = discard_variable:suggestion
-dotnet_diagnostic.IDE0059.severity = suggestion
-# Index and range preferences
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#index-and-range-preferences
-csharp_style_prefer_index_operator = true:warning
-csharp_style_prefer_range_operator = true:warning
-# Miscellaneous preferences
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-language-conventions#miscellaneous-preferences
-csharp_style_deconstructed_variable_declaration = true:warning
-csharp_style_pattern_local_over_anonymous_function = true:warning
-csharp_using_directive_placement = outside_namespace:warning
-csharp_prefer_static_local_function = true:warning
-csharp_prefer_simple_using_statement = true:suggestion
-dotnet_diagnostic.IDE0063.severity = suggestion
-
-##########################################
-# .NET Formatting Conventions
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-code-style-settings-reference#formatting-conventions
-##########################################
+dotnet_style_prefer_conditional_expression_over_assignment = true:silent
+dotnet_style_prefer_conditional_expression_over_return = true:silent
+
+
+# CSharp code style settings:
+[*.cs]
+# Prefer "var" everywhere
+csharp_style_var_elsewhere = false:none
+csharp_style_var_for_built_in_types = false:none
+csharp_style_var_when_type_is_apparent = false:none
+
+# Prefer method-like constructs to have a block body
+csharp_style_expression_bodied_methods = true:suggestion
+csharp_style_expression_bodied_constructors = false:suggestion
+csharp_style_expression_bodied_operators = true:suggestion
+
+# Prefer property-like constructs to have an expression-body
+csharp_style_expression_bodied_properties = true:suggestion
+csharp_style_expression_bodied_indexers = true:suggestion
+csharp_style_expression_bodied_accessors = true:suggestion
+
+# Suggest more modern language features when available
+csharp_style_pattern_matching_over_is_with_cast_check = true:suggestion
+csharp_style_pattern_matching_over_as_with_null_check = true:suggestion
+csharp_style_inlined_variable_declaration = true:suggestion
+csharp_style_throw_expression = true:suggestion
+csharp_style_conditional_delegate_call = true:suggestion
+csharp_style_namespace_declarations = file_scoped
-# Organize usings
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#organize-using-directives
-dotnet_sort_system_directives_first = true
-# Newline options
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#new-line-options
+# Newline settings
csharp_new_line_before_open_brace = all
csharp_new_line_before_else = true
csharp_new_line_before_catch = true
csharp_new_line_before_finally = true
csharp_new_line_before_members_in_object_initializers = true
csharp_new_line_before_members_in_anonymous_types = true
-csharp_new_line_between_query_expression_clauses = true
-# Indentation options
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#indentation-options
-csharp_indent_case_contents = true
-csharp_indent_switch_labels = true
-csharp_indent_labels = no_change
-csharp_indent_block_contents = true
-csharp_indent_braces = false
-csharp_indent_case_contents_when_block = false
-# Spacing options
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#spacing-options
+
+# Space preferences
csharp_space_after_cast = false
csharp_space_after_keywords_in_control_flow_statements = true
+csharp_space_between_method_call_parameter_list_parentheses = false
+csharp_space_between_method_declaration_parameter_list_parentheses = false
csharp_space_between_parentheses = false
csharp_space_before_colon_in_inheritance_clause = true
csharp_space_after_colon_in_inheritance_clause = true
csharp_space_around_binary_operators = before_and_after
-csharp_space_between_method_declaration_parameter_list_parentheses = false
csharp_space_between_method_declaration_empty_parameter_list_parentheses = false
-csharp_space_between_method_declaration_name_and_open_parenthesis = false
-csharp_space_between_method_call_parameter_list_parentheses = false
-csharp_space_between_method_call_empty_parameter_list_parentheses = false
csharp_space_between_method_call_name_and_opening_parenthesis = false
-csharp_space_after_comma = true
-csharp_space_before_comma = false
-csharp_space_after_dot = false
-csharp_space_before_dot = false
-csharp_space_after_semicolon_in_for_statement = true
-csharp_space_before_semicolon_in_for_statement = false
-csharp_space_around_declaration_statements = false
-csharp_space_before_open_square_brackets = false
-csharp_space_between_empty_square_brackets = false
-csharp_space_between_square_brackets = false
-# Wrapping options
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-formatting-conventions#wrap-options
-csharp_preserve_single_line_statements = false
+csharp_space_between_method_call_empty_parameter_list_parentheses = false
+
+# Wrapping preferences
+csharp_preserve_single_line_statements = true
csharp_preserve_single_line_blocks = true
-csharp_style_namespace_declarations = file_scoped
-##########################################
-# .NET Naming Conventions
-# https://docs.microsoft.com/visualstudio/ide/editorconfig-naming-conventions
-##########################################
-[*.{cs,csx,cake,vb,vbx}]
-dotnet_diagnostic.CA1000.severity = suggestion
-dotnet_diagnostic.CA1001.severity = error
-dotnet_diagnostic.CA1018.severity = error
-dotnet_diagnostic.CA1036.severity = silent
-dotnet_diagnostic.CA1051.severity = suggestion
-dotnet_diagnostic.CA1068.severity = error
-dotnet_diagnostic.CA1069.severity = error
-dotnet_diagnostic.CA1304.severity = error
-dotnet_diagnostic.CA1305.severity = suggestion
-dotnet_diagnostic.CA1307.severity = suggestion
-dotnet_diagnostic.CA1309.severity = suggestion
-dotnet_diagnostic.CA1310.severity = error
-dotnet_diagnostic.CA1507.severity = suggestion
-dotnet_diagnostic.CA1513.severity = suggestion
-dotnet_diagnostic.CA1707.severity = suggestion
-dotnet_diagnostic.CA1708.severity = suggestion
-dotnet_diagnostic.CA1711.severity = suggestion
-dotnet_diagnostic.CA1716.severity = suggestion
-dotnet_diagnostic.CA1720.severity = suggestion
-dotnet_diagnostic.CA1725.severity = suggestion
-dotnet_diagnostic.CA1805.severity = suggestion
-dotnet_diagnostic.CA1816.severity = suggestion
-dotnet_diagnostic.CA1822.severity = suggestion
-dotnet_diagnostic.CA1825.severity = error
-dotnet_diagnostic.CA1826.severity = silent
-dotnet_diagnostic.CA1827.severity = error
-dotnet_diagnostic.CA1829.severity = suggestion
-dotnet_diagnostic.CA1834.severity = error
-dotnet_diagnostic.CA1845.severity = suggestion
-dotnet_diagnostic.CA1848.severity = suggestion
-dotnet_diagnostic.CA1852.severity = suggestion
-dotnet_diagnostic.CA1860.severity = silent
-dotnet_diagnostic.CA2016.severity = suggestion
-dotnet_diagnostic.CA2201.severity = error
-dotnet_diagnostic.CA2206.severity = error
-dotnet_diagnostic.CA2208.severity = error
-dotnet_diagnostic.CA2211.severity = error
-dotnet_diagnostic.CA2249.severity = error
-dotnet_diagnostic.CA2251.severity = error
-dotnet_diagnostic.CA2252.severity = none
-dotnet_diagnostic.CA2254.severity = suggestion
-
-dotnet_diagnostic.CS0169.severity = error
-dotnet_diagnostic.CS0219.severity = error
-dotnet_diagnostic.CS0649.severity = suggestion
-dotnet_diagnostic.CS1998.severity = error
-dotnet_diagnostic.CS8602.severity = error
-dotnet_diagnostic.CS8604.severity = error
-dotnet_diagnostic.CS8618.severity = error
-dotnet_diagnostic.CS0618.severity = suggestion
-dotnet_diagnostic.CS1998.severity = error
-dotnet_diagnostic.CS4014.severity = error
-dotnet_diagnostic.CS8600.severity = error
-dotnet_diagnostic.CS8603.severity = error
-dotnet_diagnostic.CS8625.severity = error
-
-dotnet_diagnostic.BL0005.severity = suggestion
-
-dotnet_diagnostic.MVC1000.severity = suggestion
-
-dotnet_diagnostic.RZ10012.severity = error
-
-dotnet_diagnostic.IDE0004.severity = error # redundant cast
-dotnet_diagnostic.IDE0005.severity = suggestion
-dotnet_diagnostic.IDE0007.severity = error # Use var
-dotnet_diagnostic.IDE0011.severity = error # Use braces on if statements
-dotnet_diagnostic.IDE0010.severity = silent # populate switch
-dotnet_diagnostic.IDE0017.severity = suggestion # initialization can be simplified
-dotnet_diagnostic.IDE0021.severity = silent # expression body for constructors
-dotnet_diagnostic.IDE0022.severity = silent # expression body for methods
-dotnet_diagnostic.IDE0023.severity = suggestion # use expression body for operators
-dotnet_diagnostic.IDE0024.severity = silent # expression body for operators
-dotnet_diagnostic.IDE0025.severity = suggestion # use expression body for properties
-dotnet_diagnostic.IDE0027.severity = suggestion # Use expression body for accessors
-dotnet_diagnostic.IDE0028.severity = silent # expression body for accessors
-dotnet_diagnostic.IDE0032.severity = suggestion # Use auto property
-dotnet_diagnostic.IDE0033.severity = error # prefer tuple name
-dotnet_diagnostic.IDE0037.severity = suggestion # simplify anonymous type
-dotnet_diagnostic.IDE0040.severity = error # modifiers required
-dotnet_diagnostic.IDE0041.severity = error # simplify null
-dotnet_diagnostic.IDE0042.severity = error # deconstruct variable
-dotnet_diagnostic.IDE0044.severity = suggestion # make field only when possible
-dotnet_diagnostic.IDE0047.severity = suggestion # parameter name
-dotnet_diagnostic.IDE0051.severity = error # unused field
-dotnet_diagnostic.IDE0052.severity = error # unused member
-dotnet_diagnostic.IDE0053.severity = suggestion # lambda not needed
-dotnet_diagnostic.IDE0055.severity = suggestion # Fix formatting
-dotnet_diagnostic.IDE0057.severity = suggestion # substring can be simplified
-dotnet_diagnostic.IDE0060.severity = suggestion # unused parameters
-dotnet_diagnostic.IDE0061.severity = suggestion # local expression body
-dotnet_diagnostic.IDE0062.severity = suggestion # local to static
-dotnet_diagnostic.IDE0063.severity = error # simplify using
-dotnet_diagnostic.IDE0066.severity = suggestion # switch expression
-dotnet_diagnostic.IDE0072.severity = suggestion # Populate switch - forces population of all cases even when default specified
-dotnet_diagnostic.IDE0078.severity = suggestion # use pattern matching
-dotnet_diagnostic.IDE0090.severity = suggestion # new can be simplified
-dotnet_diagnostic.IDE0130.severity = suggestion # namespace folder structure
-dotnet_diagnostic.IDE0160.severity = silent # Use block namespaces ARE NOT required
-dotnet_diagnostic.IDE0161.severity = error # Please use file namespaces
-dotnet_diagnostic.IDE0200.severity = suggestion # lambda not needed
-dotnet_diagnostic.IDE1006.severity = suggestion # Naming rule violation: These words cannot contain lower case characters
-dotnet_diagnostic.IDE0260.severity = suggestion # Use pattern matching
-dotnet_diagnostic.IDE0270.severity = suggestion # Null check simplifcation
-dotnet_diagnostic.IDE0290.severity = error # Primary Constructor
-dotnet_diagnostic.IDE0300.severity = suggestion # Collection
-dotnet_diagnostic.IDE0305.severity = suggestion # Collection ToList
-
-dotnet_diagnostic.NX0001.severity = error
-dotnet_diagnostic.NX0002.severity = silent
-dotnet_diagnostic.NX0003.severity = silent
+# SYMBOL NAMING RULES
+# Copied from https://github.com/dotnet/roslyn/blob/main/.editorconfig
+# Adapted rules:
+# - Constants are ALL_UPPER
+# - Non-private fields are PascalCase
-##########################################
-# Styles
-##########################################
+# Non-private fields are PascalCase
+dotnet_naming_rule.non_private_readonly_fields_should_be_pascal_case.severity = warning
+dotnet_naming_rule.non_private_readonly_fields_should_be_pascal_case.symbols = non_private_readonly_fields
+dotnet_naming_rule.non_private_readonly_fields_should_be_pascal_case.style = non_private_readonly_field_style
+
+dotnet_naming_symbols.non_private_readonly_fields.applicable_kinds = field
+dotnet_naming_symbols.non_private_readonly_fields.applicable_accessibilities = public, protected, internal, protected_internal, private_protected
+
+dotnet_naming_style.non_private_readonly_field_style.capitalization = pascal_case
+
+# Constants are ALL_UPPER
+dotnet_naming_rule.constants_should_be_all_upper.severity = warning
+dotnet_naming_rule.constants_should_be_all_upper.symbols = constants
+dotnet_naming_rule.constants_should_be_all_upper.style = constant_style
+
+dotnet_naming_symbols.constants.applicable_kinds = field, local
+dotnet_naming_symbols.constants.required_modifiers = const
+
+dotnet_naming_style.constant_style.capitalization = all_upper
+
+# Private static fields are camelCase and start with s_
+dotnet_naming_rule.static_fields_should_be_camel_case.severity = warning
+dotnet_naming_rule.static_fields_should_be_camel_case.symbols = static_fields
+dotnet_naming_rule.static_fields_should_be_camel_case.style = static_field_style
+
+dotnet_naming_symbols.static_fields.applicable_accessibilities = private
+dotnet_naming_symbols.static_fields.applicable_kinds = field
+dotnet_naming_symbols.static_fields.required_modifiers = static
+
+dotnet_naming_style.static_field_style.capitalization = camel_case
+dotnet_naming_style.static_field_style.required_prefix = s_
+
+
+# Instance fields are camelCase and start with _
+dotnet_naming_rule.instance_fields_should_be_camel_case.severity = warning
+dotnet_naming_rule.instance_fields_should_be_camel_case.symbols = instance_fields
+dotnet_naming_rule.instance_fields_should_be_camel_case.style = instance_field_style
+
+dotnet_naming_symbols.instance_fields.applicable_kinds = field
+
+dotnet_naming_style.instance_field_style.capitalization = camel_case
+dotnet_naming_style.instance_field_style.required_prefix = _
+
+# Locals and parameters are camelCase
+dotnet_naming_rule.locals_should_be_camel_case.severity = warning
+dotnet_naming_rule.locals_should_be_camel_case.symbols = locals_and_parameters
+dotnet_naming_rule.locals_should_be_camel_case.style = camel_case_style
+
+dotnet_naming_symbols.locals_and_parameters.applicable_kinds = parameter, local
-# camel_case_style - Define the camelCase style
dotnet_naming_style.camel_case_style.capitalization = camel_case
-# pascal_case_style - Define the PascalCase style
-dotnet_naming_style.pascal_case_style.capitalization = pascal_case
-# constant_case - Define the CONSTANT_CASE style
-dotnet_naming_style.constant_case.capitalization = all_upper
-dotnet_naming_style.constant_case.word_separator = _
-# first_upper_style - The first character must start with an upper-case character
-dotnet_naming_style.first_upper_style.capitalization = first_word_upper
-# prefix_interface_with_i_style - Interfaces must be PascalCase and the first character of an interface must be an 'I'
-dotnet_naming_style.prefix_interface_with_i_style.capitalization = pascal_case
-dotnet_naming_style.prefix_interface_with_i_style.required_prefix = I
-# prefix_type_parameters_with_t_style - Generic Type Parameters must be PascalCase and the first character must be a 'T'
-dotnet_naming_style.prefix_type_parameters_with_t_style.capitalization = pascal_case
-dotnet_naming_style.prefix_type_parameters_with_t_style.required_prefix = T
-# disallowed_style - Anything that has this style applied is marked as disallowed
-dotnet_naming_style.disallowed_style.capitalization = pascal_case
-dotnet_naming_style.disallowed_style.required_prefix = ____RULE_VIOLATION____
-dotnet_naming_style.disallowed_style.required_suffix = ____RULE_VIOLATION____
-# internal_error_style - This style should never occur... if it does, it indicates a bug in file or in the parser using the file
-dotnet_naming_style.internal_error_style.capitalization = pascal_case
-dotnet_naming_style.internal_error_style.required_prefix = ____INTERNAL_ERROR____
-dotnet_naming_style.internal_error_style.required_suffix = ____INTERNAL_ERROR____
-
-# prefix_interface_with_i_style - Interfaces must be PascalCase and the first character of an interface must be an 'I'
-dotnet_naming_style.underscore_camel_case_style.capitalization = camel_case
-dotnet_naming_style.underscore_camel_case_style.required_prefix = _
-##########################################
-# .NET Design Guideline Field Naming Rules
-# Naming rules for fields follow the .NET Framework design guidelines
-# https://docs.microsoft.com/dotnet/standard/design-guidelines/index
-##########################################
+# Local functions are PascalCase
+dotnet_naming_rule.local_functions_should_be_pascal_case.severity = warning
+dotnet_naming_rule.local_functions_should_be_pascal_case.symbols = local_functions
+dotnet_naming_rule.local_functions_should_be_pascal_case.style = local_function_style
-# All public/protected/protected_internal constant fields must be constant_case
-# https://docs.microsoft.com/dotnet/standard/design-guidelines/field
-dotnet_naming_symbols.public_protected_constant_fields_group.applicable_accessibilities = public, protected, protected_internal
-dotnet_naming_symbols.public_protected_constant_fields_group.required_modifiers = const
-dotnet_naming_symbols.public_protected_constant_fields_group.applicable_kinds = field
-dotnet_naming_rule.public_protected_constant_fields_must_be_pascal_case_rule.symbols = public_protected_constant_fields_group
-dotnet_naming_rule.public_protected_constant_fields_must_be_pascal_case_rule.style = constant_case
-dotnet_naming_rule.public_protected_constant_fields_must_be_pascal_case_rule.severity = warning
-
-# All public/protected/protected_internal static readonly fields must be constant_case
-# https://docs.microsoft.com/dotnet/standard/design-guidelines/field
-dotnet_naming_symbols.public_protected_static_readonly_fields_group.applicable_accessibilities = public, protected, protected_internal
-dotnet_naming_symbols.public_protected_static_readonly_fields_group.required_modifiers = static, readonly
-dotnet_naming_symbols.public_protected_static_readonly_fields_group.applicable_kinds = field
-dotnet_naming_rule.public_protected_static_readonly_fields_must_be_pascal_case_rule.symbols = public_protected_static_readonly_fields_group
-dotnet_naming_rule.public_protected_static_readonly_fields_must_be_pascal_case_rule.style = constant_case
-dotnet_naming_rule.public_protected_static_readonly_fields_must_be_pascal_case_rule.severity = warning
-
-# No other public/protected/protected_internal fields are allowed
-# https://docs.microsoft.com/dotnet/standard/design-guidelines/field
-dotnet_naming_symbols.other_public_protected_fields_group.applicable_accessibilities = public, protected, protected_internal
-dotnet_naming_symbols.other_public_protected_fields_group.applicable_kinds = field
-dotnet_naming_rule.other_public_protected_fields_disallowed_rule.symbols = other_public_protected_fields_group
-dotnet_naming_rule.other_public_protected_fields_disallowed_rule.style = disallowed_style
-dotnet_naming_rule.other_public_protected_fields_disallowed_rule.severity = error
+dotnet_naming_symbols.local_functions.applicable_kinds = local_function
-##########################################
-# StyleCop Field Naming Rules
-# Naming rules for fields follow the StyleCop analyzers
-# This does not override any rules using disallowed_style above
-# https://github.com/DotNetAnalyzers/StyleCopAnalyzers
-##########################################
+dotnet_naming_style.local_function_style.capitalization = pascal_case
-# All constant fields must be constant_case
-# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1303.md
-dotnet_naming_symbols.stylecop_constant_fields_group.applicable_accessibilities = public, internal, protected_internal, protected, private_protected, private
-dotnet_naming_symbols.stylecop_constant_fields_group.required_modifiers = const
-dotnet_naming_symbols.stylecop_constant_fields_group.applicable_kinds = field
-dotnet_naming_rule.stylecop_constant_fields_must_be_pascal_case_rule.symbols = stylecop_constant_fields_group
-dotnet_naming_rule.stylecop_constant_fields_must_be_pascal_case_rule.style = constant_case
-dotnet_naming_rule.stylecop_constant_fields_must_be_pascal_case_rule.severity = warning
-
-# All static readonly fields must be constant_case
-# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1311.md
-dotnet_naming_symbols.stylecop_static_readonly_fields_group.applicable_accessibilities = public, internal, protected_internal, protected, private_protected, private
-dotnet_naming_symbols.stylecop_static_readonly_fields_group.required_modifiers = static, readonly
-dotnet_naming_symbols.stylecop_static_readonly_fields_group.applicable_kinds = field
-dotnet_naming_rule.stylecop_static_readonly_fields_must_be_pascal_case_rule.symbols = stylecop_static_readonly_fields_group
-dotnet_naming_rule.stylecop_static_readonly_fields_must_be_pascal_case_rule.style = constant_case
-dotnet_naming_rule.stylecop_static_readonly_fields_must_be_pascal_case_rule.severity = warning
-
-# No non-private instance fields are allowed
-# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1401.md
-dotnet_naming_symbols.stylecop_fields_must_be_private_group.applicable_accessibilities = public, internal, protected_internal, protected, private_protected
-dotnet_naming_symbols.stylecop_fields_must_be_private_group.applicable_kinds = field
-dotnet_naming_rule.stylecop_instance_fields_must_be_private_rule.symbols = stylecop_fields_must_be_private_group
-dotnet_naming_rule.stylecop_instance_fields_must_be_private_rule.style = disallowed_style
-dotnet_naming_rule.stylecop_instance_fields_must_be_private_rule.severity = error
-
-# Private fields must be camelCase
-# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1306.md
-dotnet_naming_symbols.stylecop_private_fields_group.applicable_accessibilities = private
-dotnet_naming_symbols.stylecop_private_fields_group.applicable_kinds = field
-dotnet_naming_rule.stylecop_private_fields_must_be_camel_case_rule.symbols = stylecop_private_fields_group
-dotnet_naming_rule.stylecop_private_fields_must_be_camel_case_rule.style = underscore_camel_case_style
-dotnet_naming_rule.stylecop_private_fields_must_be_camel_case_rule.severity = warning
-
-# Local variables must be camelCase
-# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1312.md
-dotnet_naming_symbols.stylecop_local_fields_group.applicable_accessibilities = local
-dotnet_naming_symbols.stylecop_local_fields_group.applicable_kinds = local
-dotnet_naming_rule.stylecop_local_fields_must_be_camel_case_rule.symbols = stylecop_local_fields_group
-dotnet_naming_rule.stylecop_local_fields_must_be_camel_case_rule.style = camel_case_style
-dotnet_naming_rule.stylecop_local_fields_must_be_camel_case_rule.severity = warning
-
-# This rule should never fire. However, it's included for at least two purposes:
-# First, it helps to understand, reason about, and root-case certain types of issues, such as bugs in .editorconfig parsers.
-# Second, it helps to raise immediate awareness if a new field type is added (as occurred recently in C#).
-dotnet_naming_symbols.sanity_check_uncovered_field_case_group.applicable_accessibilities = *
-dotnet_naming_symbols.sanity_check_uncovered_field_case_group.applicable_kinds = field
-dotnet_naming_rule.sanity_check_uncovered_field_case_rule.symbols = sanity_check_uncovered_field_case_group
-dotnet_naming_rule.sanity_check_uncovered_field_case_rule.style = internal_error_style
-dotnet_naming_rule.sanity_check_uncovered_field_case_rule.severity = error
+# By default, name items with PascalCase
+dotnet_naming_rule.members_should_be_pascal_case.severity = warning
+dotnet_naming_rule.members_should_be_pascal_case.symbols = all_members
+dotnet_naming_rule.members_should_be_pascal_case.style = pascal_case_style
+dotnet_naming_symbols.all_members.applicable_kinds = *
-##########################################
-# Other Naming Rules
-##########################################
+dotnet_naming_style.pascal_case_style.capitalization = pascal_case
-# All of the following must be PascalCase:
-# - Namespaces
-# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-namespaces
-# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1300.md
-# - Classes and Enumerations
-# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces
-# https://github.com/DotNetAnalyzers/StyleCopAnalyzers/blob/master/documentation/SA1300.md
-# - Delegates
-# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces#names-of-common-types
-# - Constructors, Properties, Events, Methods
-# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-type-members
-dotnet_naming_symbols.element_group.applicable_kinds = namespace, class, enum, struct, delegate, event, method, property
-dotnet_naming_rule.element_rule.symbols = element_group
-dotnet_naming_rule.element_rule.style = pascal_case_style
-dotnet_naming_rule.element_rule.severity = warning
-
-# Interfaces use PascalCase and are prefixed with uppercase 'I'
-# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces
-dotnet_naming_symbols.interface_group.applicable_kinds = interface
-dotnet_naming_rule.interface_rule.symbols = interface_group
-dotnet_naming_rule.interface_rule.style = prefix_interface_with_i_style
-dotnet_naming_rule.interface_rule.severity = warning
-
-# Generics Type Parameters use PascalCase and are prefixed with uppercase 'T'
-# https://docs.microsoft.com/dotnet/standard/design-guidelines/names-of-classes-structs-and-interfaces
-dotnet_naming_symbols.type_parameter_group.applicable_kinds = type_parameter
-dotnet_naming_rule.type_parameter_rule.symbols = type_parameter_group
-dotnet_naming_rule.type_parameter_rule.style = prefix_type_parameters_with_t_style
-dotnet_naming_rule.type_parameter_rule.severity = warning
-
-# Function parameters use camelCase
-# https://docs.microsoft.com/dotnet/standard/design-guidelines/naming-parameters
-dotnet_naming_symbols.parameters_group.applicable_kinds = parameter
-dotnet_naming_rule.parameters_rule.symbols = parameters_group
-dotnet_naming_rule.parameters_rule.style = camel_case_style
-dotnet_naming_rule.parameters_rule.severity = warning
-##########################################
-# License
-##########################################
-# The following applies as to the .editorconfig file ONLY, and is
-# included below for reference, per the requirements of the license
-# corresponding to this .editorconfig file.
-# See: https://github.com/RehanSaeed/EditorConfig
-#
-# MIT License
-#
-# Copyright (c) 2017-2019 Muhammad Rehan Saeed
-# Copyright (c) 2019 Henry Gabryjelski
-#
-# Permission is hereby granted, free of charge, to any
-# person obtaining a copy of this software and associated
-# documentation files (the "Software"), to deal in the
-# Software without restriction, including without limitation
-# the rights to use, copy, modify, merge, publish, distribute,
-# sublicense, and/or sell copies of the Software, and to permit
-# persons to whom the Software is furnished to do so, subject
-# to the following conditions:
-#
-# The above copyright notice and this permission notice shall be
-# included in all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-# OTHER DEALINGS IN THE SOFTWARE.
-##########################################
+# Analyzer settings
+dotnet_analyzer_diagnostic.category-Style.severity = warning # All rules will use this severity unless overriden
+dotnet_diagnostic.ide0055.severity = none # Formatting rule: Incompatible with CSharpier
+dotnet_diagnostic.ide0007.severity = none # Use var instead of explicit type: Preference
+dotnet_diagnostic.ide0009.severity = none # Add this or Me qualification: Preference
+dotnet_diagnostic.ide0200.severity = none # Remove unnecessary lambda expression: may be performance reasons not to
+dotnet_diagnostic.ide0058.severity = none # Remove unnecessary expression value: Subjective
+dotnet_diagnostic.ide0010.severity = none # Add missing cases to switch statement: Too verbose
+dotnet_diagnostic.ide0200.severity = none # Remove unnecessary lambda expression: may be performance reasons not to
+dotnet_diagnostic.ide0058.severity = none # Remove unnecessary expression value: Subjective
+dotnet_diagnostic.ide0305.severity = none # Use collection expression for fluent: Can obfuscate intent
+dotnet_diagnostic.ide0001.severity = suggestion # Name can be simplified: Non enforceable in build
+dotnet_diagnostic.ide0046.severity = suggestion # Use conditional expression for return: Subjective
+dotnet_diagnostic.ide0045.severity = suggestion # Use conditional expression for assignment: Subjective
+dotnet_diagnostic.ide0078.severity = suggestion # Use pattern matching: Subjective
+dotnet_diagnostic.ide0260.severity = suggestion # Use pattern matching: Subjective
+dotnet_diagnostic.ide0022.severity = suggestion # Use expression body for method: Subjective
+dotnet_diagnostic.ide0061.severity = suggestion # Use expression body for local functions: Subjective
+dotnet_diagnostic.ide0063.severity = suggestion # Using directive can be simplified
+dotnet_diagnostic.ide0066.severity = suggestion # Use switch expression: Subjective
+dotnet_diagnostic.ide0029.severity = suggestion # Null check can be simplified: Subjective
+dotnet_diagnostic.ide0030.severity = suggestion # Null check can be simplified: Subjective
+dotnet_diagnostic.ide0270.severity = suggestion # Null check can be simplified: Subjective
+dotnet_diagnostic.ide0042.severity = suggestion # Deconstruct variable declaration: Subjective
+dotnet_diagnostic.ide0039.severity = suggestion # Use local function instead of lambda: Subjective
+dotnet_diagnostic.ide0029.severity = suggestion # Null check can be simplified: Subjective
+dotnet_diagnostic.ide0030.severity = suggestion # Null check can be simplified: Subjective
+dotnet_diagnostic.ide0270.severity = suggestion # Null check can be simplified: Subjective
+dotnet_diagnostic.ide0042.severity = suggestion # Deconstruct variable declaration: Subjective
+dotnet_diagnostic.ide0028.severity = suggestion # Use collection initializers: Subjective
+dotnet_diagnostic.ide0072.severity = suggestion # Populate switch statement: Subjective
+dotnet_diagnostic.ide0074.severity = suggestion # Use compound assignment: Subjective
+dotnet_diagnostic.ide0300.severity = suggestion # Use collection expression for array: Subjective, maybe aspirational
+dotnet_diagnostic.ide0290.severity = suggestion # primary constructors: subjective, and readonly properties are not a thing
+dotnet_diagnostic.ide0290.severity = suggestion # Use primary constructor: Subjective
+dotnet_diagnostic.ide0037.severity = suggestion # Use inferred member names: Sometimes its nice to be explicit
+dotnet_diagnostic.ide0301.severity = suggestion # Use collection expression for empty: Subjective, intent
+dotnet_diagnostic.ide0021.severity = suggestion # Use expression body for constructors : Subjective
+dotnet_diagnostic.ide0090.severity = suggestion # Simplify new expression : Subjective
+
+dotnet_diagnostic.ide0047.severity = suggestion # Parentheses preferences: IDEs don't properly pick it up
+dotnet_diagnostic.ide0130.severity = suggestion # Namespace does not match folder structure : Aspirational
+dotnet_diagnostic.ide1006.severity = suggestion # Naming rule violation : Aspirational
+
+# Maintainability rules
+dotnet_diagnostic.ca1501.severity = warning # Avoid excessive inheritance
+dotnet_diagnostic.ca1502.severity = warning # Avoid excessive complexity
+dotnet_diagnostic.ca1505.severity = warning # Avoid unmaintainable code
+dotnet_diagnostic.ca1506.severity = warning # Avoid excessive class coupling
+dotnet_diagnostic.ca1507.severity = warning # Use nameof in place of string
+dotnet_diagnostic.ca1508.severity = warning # Avoid dead conditional code
+dotnet_diagnostic.ca1509.severity = warning # Invalid entry in code metrics configuration file
+dotnet_diagnostic.ca1861.severity = suggestion # Prefer 'static readonly' fields over constant array arguments if the called method is called repeatedly and is not mutating the passed array (https://learn.microsoft.com/dotnet/fundamentals/code-analysis/quality-rules/ca1861)
+
+
+# Performance rules
+dotnet_diagnostic.ca1849.severity = suggestion # Call async methods when in an async method: May decrease performance
+dotnet_diagnostic.ca1822.severity = suggestion # Mark member as static
+dotnet_diagnostic.ca1859.severity = suggestion # Use concrete types when possible for improved performance
+
+# Design rule
+dotnet_diagnostic.ca1002.severity = suggestion # Do not expose generic lists
+dotnet_diagnostic.ca1051.severity = warning # Do not declare visible instance fields
+dotnet_diagnostic.ca1056.severity = suggestion # URI properties should not be strings
+dotnet_diagnostic.ca1062.severity = none # Public method must check all parameters for null
+
+# Naming
+dotnet_diagnostic.ca1707.severity = none # Remove underscores in names
+
+# Usage
+dotnet_diagnostic.ca2227.severity = suggestion # Collection props should be read-only
+
+dotnet_code_quality.ca1051.exclude_structs = true # CA1051 is excluded in structs
+dotnet_code_quality.dispose_ownership_transfer_at_constructor = true # CA2000 has a lot of false positives without this
+dotnet_code_quality.dispose_ownership_transfer_at_method_call = true # CA2000 has a lot of false positives without this
+dotnet_code_quality.dispose_analysis_kind = NonExceptionPathsOnlyNotDisposed # CA2000 has a lot of false positives without this
+
+# NUnit
+dotnet_diagnostic.NUnit2001.severity = warning # Consider using Assert.That(expr, Is.False) instead of Assert.False(expr)
+dotnet_diagnostic.NUnit2002.severity = warning # Consider using Assert.That(expr, Is.False) instead of Assert.IsFalse(expr)
+dotnet_diagnostic.NUnit2003.severity = warning # Consider using Assert.That(expr, Is.True) instead of Assert.IsTrue(expr)
+dotnet_diagnostic.NUnit2004.severity = warning # Consider using Assert.That(expr, Is.True) instead of Assert.True(expr)
+dotnet_diagnostic.NUnit2005.severity = warning # Consider using Assert.That(actual, Is.EqualTo(expected)) instead of Assert.AreEqual(expected, actual)
+dotnet_diagnostic.NUnit2006.severity = warning # Consider using Assert.That(actual, Is.Not.EqualTo(expected)) instead of Assert.AreNotEqual(expected, actual)
+
+dotnet_diagnostic.NUnit2010.severity = warning # Use EqualConstraint for better assertion messages in case of failure
+dotnet_diagnostic.NUnit2011.severity = warning # Use ContainsConstraint for better assertion messages in case of failure
+dotnet_diagnostic.NUnit2011.severity = warning # Use StartsWithConstraint for better assertion messages in case of failure
+dotnet_diagnostic.NUnit2011.severity = warning # Use EndsWithConstraint for better assertion messages in case of failure
+dotnet_diagnostic.NUnit2014.severity = warning # Use SomeItemsConstraint for better assertion messages in case of failure
+
+dotnet_diagnostic.NUnit2015.severity = warning # Consider using Assert.That(actual, Is.SameAs(expected)) instead of Assert.AreSame(expected, actual)
+dotnet_diagnostic.NUnit2016.severity = warning # Consider using Assert.That(expr, Is.Null) instead of Assert.Null(expr)
+dotnet_diagnostic.NUnit2017.severity = warning # Consider using Assert.That(expr, Is.Null) instead of Assert.IsNull(expr)
+dotnet_diagnostic.NUnit2018.severity = warning # Consider using Assert.That(expr, Is.Not.Null) instead of Assert.NotNull(expr)
+dotnet_diagnostic.NUnit2028.severity = warning # Consider using Assert.That(actual, Is.GreaterThanOrEqualTo(expected)) instead of Assert.GreaterOrEqual(actual, expected)
+dotnet_diagnostic.NUnit2027.severity = warning # Consider using Assert.That(actual, Is.GreaterThan(expected)) instead of Assert.Greater(actual, expected)
+dotnet_diagnostic.NUnit2029.severity = warning # Consider using Assert.That(actual, Is.LessThan(expected)) instead of Assert.Less(actual, expected)
+dotnet_diagnostic.NUnit2030.severity = warning # Consider using Assert.That(actual, Is.LessThanOrEqualTo(expected)) instead of Assert.LessOrEqual(actual, expected)
+dotnet_diagnostic.NUnit2031.severity = warning # Consider using Assert.That(actual, Is.Not.SameAs(expected)) instead of Assert.AreNotSame(expected, actual)
+dotnet_diagnostic.NUnit2032.severity = warning # Consider using Assert.That(expr, Is.Zero) instead of Assert.Zero(expr)
+dotnet_diagnostic.NUnit2033.severity = warning # Consider using Assert.That(expr, Is.Not.Zero) instead of Assert.NotZero(expr)
+dotnet_diagnostic.NUnit2034.severity = warning # Consider using Assert.That(expr, Is.NaN) instead of Assert.IsNaN(expr)
+dotnet_diagnostic.NUnit2035.severity = warning # Consider using Assert.That(collection, Is.Empty) instead of Assert.IsEmpty(collection)
+dotnet_diagnostic.NUnit2036.severity = warning # Consider using Assert.That(collection, Is.Not.Empty) instead of Assert.IsNotEmpty(collection)
+dotnet_diagnostic.NUnit2037.severity = warning # Consider using Assert.That(collection, Does.Contain(instance)) instead of Assert.Contains(instance, collection)
+dotnet_diagnostic.NUnit2038.severity = warning # Consider using Assert.That(actual, Is.InstanceOf(expected)) instead of Assert.IsInstanceOf(expected, actual)
+dotnet_diagnostic.NUnit2039.severity = warning # Consider using Assert.That(actual, Is.Not.InstanceOf(expected)) instead of Assert.IsNotInstanceOf(expected, actual)
+
+# Verify
+[*.{received,verified}.{json}]
+charset = utf-8-bom
+end_of_line = lf
+indent_size = unset
+indent_style = unset
+insert_final_newline = false
+tab_width = unset
+trim_trailing_whitespace = false
\ No newline at end of file
diff --git a/Directory.Build.props b/Directory.Build.props
index b3d0b5954..201f39e1d 100644
--- a/Directory.Build.props
+++ b/Directory.Build.props
@@ -3,14 +3,37 @@
latest
enable
Recommended
- true
- true
- true
- true
- true
- False
- False
true
true
+
+ true
+ latest-AllEnabledByDefault
+ true
+ true
+ true
+ true
+ false
+ false
+ true
+
+
+ CA5399;CA1812;
+
+ CS1591;CS1573;
+
+ CA1303;CA1304;CA1305;CA1307;CA1308;CA1309;CA1310;CA1311;
+
+ CA1848;CA1727;
+
+ CA1815;CA1725;
+
+ CA1710;CA1711;CA1720;CA1724;
+
+ CA1502;CA1716;NETSDK1206;IDE0017;IDE0032;IDE0040;IDE0044;IDE0051;IDE0052;IDE0060;IDE0251;IDE1006;IDE1007;IDE1033;IDE1036;IDE1040;IDE1041;IDE1045;IDE1050;IDE1051;IDE1055;IDE1060;IDE1063;
+ IDE1070;IDE1071;IDE1075;IDE1080;IDE1090;CA1024;CA1027;CA1031;CA1032;CA1033;CA1028;CA1034;CA1051;CA1063;CA1069;CA1505;CA1802;CA1814;CA1819;CA1805;CA2214;CA2225;CA2237;
+ $(NoWarn)
+
+
diff --git a/build/Program.cs b/build/Program.cs
index b3bb2c47e..6d7d22f05 100644
--- a/build/Program.cs
+++ b/build/Program.cs
@@ -92,4 +92,4 @@ IEnumerable GetFiles(string d)
Target("default", [Publish], () => Console.WriteLine("Done!"));
-await RunTargetsAndExitAsync(args);
+await RunTargetsAndExitAsync(args).ConfigureAwait(false);
diff --git a/src/SharpCompress/Algorithms/Adler32.cs b/src/SharpCompress/Algorithms/Adler32.cs
index f7ea0eb35..e57584b71 100644
--- a/src/SharpCompress/Algorithms/Adler32.cs
+++ b/src/SharpCompress/Algorithms/Adler32.cs
@@ -8,8 +8,8 @@
using System;
using System.Runtime.CompilerServices;
-using System.Runtime.InteropServices;
#if SUPPORTS_RUNTIME_INTRINSICS
+using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
#endif
@@ -108,8 +108,7 @@ public static int EvenReduceSum(Vector256 accumulator)
// The C# compiler emits this as a compile-time constant embedded in the PE file.
private static ReadOnlySpan Tap1Tap2 =>
- new byte[]
- {
+ [
32,
31,
30,
@@ -142,7 +141,7 @@ public static int EvenReduceSum(Vector256 accumulator)
3,
2,
1, // tap2
- };
+ ];
#endif
///
diff --git a/src/SharpCompress/Archives/AbstractArchive.cs b/src/SharpCompress/Archives/AbstractArchive.cs
index bf7fb9edb..992da114e 100644
--- a/src/SharpCompress/Archives/AbstractArchive.cs
+++ b/src/SharpCompress/Archives/AbstractArchive.cs
@@ -37,8 +37,8 @@ internal AbstractArchive(ArchiveType type)
{
Type = type;
ReaderOptions = new();
- _lazyVolumes = new LazyReadOnlyCollection(Enumerable.Empty());
- _lazyEntries = new LazyReadOnlyCollection(Enumerable.Empty());
+ _lazyVolumes = new LazyReadOnlyCollection([]);
+ _lazyEntries = new LazyReadOnlyCollection([]);
}
public ArchiveType Type { get; }
diff --git a/src/SharpCompress/Archives/AbstractWritableArchive.cs b/src/SharpCompress/Archives/AbstractWritableArchive.cs
index 082b96312..dc3ebcbe9 100644
--- a/src/SharpCompress/Archives/AbstractWritableArchive.cs
+++ b/src/SharpCompress/Archives/AbstractWritableArchive.cs
@@ -31,10 +31,10 @@ public void Dispose()
}
}
- private readonly List newEntries = new();
- private readonly List removedEntries = new();
+ private readonly List newEntries = [];
+ private readonly List removedEntries = [];
- private readonly List modifiedEntries = new();
+ private readonly List modifiedEntries = [];
private bool hasModifications;
private bool pauseRebuilding;
diff --git a/src/SharpCompress/Archives/ArchiveVolumeFactory.cs b/src/SharpCompress/Archives/ArchiveVolumeFactory.cs
index 81a5d4fdd..26d7c6409 100644
--- a/src/SharpCompress/Archives/ArchiveVolumeFactory.cs
+++ b/src/SharpCompress/Archives/ArchiveVolumeFactory.cs
@@ -13,6 +13,7 @@ internal abstract class ArchiveVolumeFactory
//split 001, 002 ...
var m = Regex.Match(part1.Name, @"^(.*\.)([0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
+ {
item = new FileInfo(
Path.Combine(
part1.DirectoryName!,
@@ -22,9 +23,13 @@ internal abstract class ArchiveVolumeFactory
)
)
);
+ }
if (item != null && item.Exists)
+ {
return item;
+ }
+
return null;
}
}
diff --git a/src/SharpCompress/Archives/AutoArchiveFactory.cs b/src/SharpCompress/Archives/AutoArchiveFactory.cs
index 78313df54..6982e108e 100644
--- a/src/SharpCompress/Archives/AutoArchiveFactory.cs
+++ b/src/SharpCompress/Archives/AutoArchiveFactory.cs
@@ -6,7 +6,7 @@
namespace SharpCompress.Archives;
-class AutoArchiveFactory : IArchiveFactory
+internal class AutoArchiveFactory : IArchiveFactory
{
public string Name => nameof(AutoArchiveFactory);
diff --git a/src/SharpCompress/Archives/GZip/GZipArchive.cs b/src/SharpCompress/Archives/GZip/GZipArchive.cs
index 4437ff57d..ca3deb7ae 100644
--- a/src/SharpCompress/Archives/GZip/GZipArchive.cs
+++ b/src/SharpCompress/Archives/GZip/GZipArchive.cs
@@ -166,7 +166,7 @@ protected override GZipArchiveEntry CreateEntryInternal(
bool closeStream
)
{
- if (Entries.Any())
+ if (Entries.Count != 0)
{
throw new InvalidFormatException("Only one entry is allowed in a GZip Archive");
}
diff --git a/src/SharpCompress/Archives/GZip/GZipArchiveEntry.cs b/src/SharpCompress/Archives/GZip/GZipArchiveEntry.cs
index f00e889cb..34b961a09 100644
--- a/src/SharpCompress/Archives/GZip/GZipArchiveEntry.cs
+++ b/src/SharpCompress/Archives/GZip/GZipArchiveEntry.cs
@@ -7,7 +7,10 @@ namespace SharpCompress.Archives.GZip;
public class GZipArchiveEntry : GZipEntry, IArchiveEntry
{
internal GZipArchiveEntry(GZipArchive archive, GZipFilePart? part)
- : base(part) => Archive = archive;
+ : base(part)
+ {
+ Archive = archive;
+ }
public virtual Stream OpenEntryStream()
{
diff --git a/src/SharpCompress/Archives/IArchiveExtensions.cs b/src/SharpCompress/Archives/IArchiveExtensions.cs
index 337e4ccc5..2a7191337 100644
--- a/src/SharpCompress/Archives/IArchiveExtensions.cs
+++ b/src/SharpCompress/Archives/IArchiveExtensions.cs
@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.IO;
-using System.Linq;
using System.Threading;
using SharpCompress.Common;
using SharpCompress.Readers;
diff --git a/src/SharpCompress/Archives/IWriteableArchiveFactory.cs b/src/SharpCompress/Archives/IWriteableArchiveFactory.cs
index 4fae9f558..d7d9f24d3 100644
--- a/src/SharpCompress/Archives/IWriteableArchiveFactory.cs
+++ b/src/SharpCompress/Archives/IWriteableArchiveFactory.cs
@@ -1,20 +1,6 @@
namespace SharpCompress.Archives;
-///
-/// Decorator for used to declare an archive format as able to create writeable archives
-///
-///
-/// Implemented by:
-///
-///
-///
-///
-///
public interface IWriteableArchiveFactory : Factories.IFactory
{
- ///
- /// Creates a new, empty archive, ready to be written.
- ///
- ///
IWritableArchive CreateWriteableArchive();
}
diff --git a/src/SharpCompress/Archives/Rar/FileInfoRarFilePart.cs b/src/SharpCompress/Archives/Rar/FileInfoRarFilePart.cs
index 02d2f769f..84407da8a 100644
--- a/src/SharpCompress/Archives/Rar/FileInfoRarFilePart.cs
+++ b/src/SharpCompress/Archives/Rar/FileInfoRarFilePart.cs
@@ -12,7 +12,10 @@ internal FileInfoRarFilePart(
FileHeader fh,
FileInfo fi
)
- : base(mh, fh, volume.Index, volume.Stream, password) => FileInfo = fi;
+ : base(mh, fh, volume.Index, volume.Stream, password)
+ {
+ FileInfo = fi;
+ }
internal FileInfo FileInfo { get; }
diff --git a/src/SharpCompress/Archives/Rar/RarArchive.cs b/src/SharpCompress/Archives/Rar/RarArchive.cs
index ee4be60e9..59bb77453 100644
--- a/src/SharpCompress/Archives/Rar/RarArchive.cs
+++ b/src/SharpCompress/Archives/Rar/RarArchive.cs
@@ -6,6 +6,7 @@
using SharpCompress.Common.Rar;
using SharpCompress.Common.Rar.Headers;
using SharpCompress.Compressors.Rar;
+using SharpCompress.Compressors.Rar.UnpackV2017;
using SharpCompress.IO;
using SharpCompress.Readers;
using SharpCompress.Readers.Rar;
@@ -15,8 +16,7 @@ namespace SharpCompress.Archives.Rar;
public class RarArchive : AbstractArchive
{
private bool _disposed;
- internal Lazy UnpackV2017 { get; } =
- new(() => new Compressors.Rar.UnpackV2017.Unpack());
+ internal Lazy UnpackV2017 { get; } = new(() => new Unpack());
internal Lazy UnpackV1 { get; } = new(() => new Compressors.Rar.UnpackV1.Unpack());
///
diff --git a/src/SharpCompress/Archives/Rar/RarArchiveEntry.cs b/src/SharpCompress/Archives/Rar/RarArchiveEntry.cs
index 262d7cbe5..40255b18f 100644
--- a/src/SharpCompress/Archives/Rar/RarArchiveEntry.cs
+++ b/src/SharpCompress/Archives/Rar/RarArchiveEntry.cs
@@ -32,7 +32,7 @@ ReaderOptions readerOptions
public IArchive Archive => archive;
- internal override IEnumerable Parts => parts.Cast();
+ internal override IEnumerable Parts => parts;
internal override FileHeader FileHeader => parts.First().FileHeader;
@@ -88,8 +88,10 @@ public bool IsComplete
{
get
{
+#pragma warning disable CA1851
var headers = parts.Select(x => x.FileHeader);
return !headers.First().IsSplitBefore && !headers.Last().IsSplitAfter;
+#pragma warning restore CA1851
}
}
diff --git a/src/SharpCompress/Archives/Rar/RarArchiveEntryFactory.cs b/src/SharpCompress/Archives/Rar/RarArchiveEntryFactory.cs
index ec4ace7c6..21b9c3009 100644
--- a/src/SharpCompress/Archives/Rar/RarArchiveEntryFactory.cs
+++ b/src/SharpCompress/Archives/Rar/RarArchiveEntryFactory.cs
@@ -29,7 +29,7 @@ IEnumerable parts
if (!fp.FileHeader.IsSplitAfter)
{
yield return groupedParts;
- groupedParts = new List();
+ groupedParts = [];
}
}
if (groupedParts.Count > 0)
diff --git a/src/SharpCompress/Archives/Rar/RarArchiveVolumeFactory.cs b/src/SharpCompress/Archives/Rar/RarArchiveVolumeFactory.cs
index f5ce89b8b..1bd98f58f 100644
--- a/src/SharpCompress/Archives/Rar/RarArchiveVolumeFactory.cs
+++ b/src/SharpCompress/Archives/Rar/RarArchiveVolumeFactory.cs
@@ -13,6 +13,7 @@ internal static class RarArchiveVolumeFactory
//new style rar - ..part1 | /part01 | part001 ....
var m = Regex.Match(part1.Name, @"^(.*\.part)([0-9]+)(\.rar)$", RegexOptions.IgnoreCase);
if (m.Success)
+ {
item = new FileInfo(
Path.Combine(
part1.DirectoryName!,
@@ -23,11 +24,13 @@ internal static class RarArchiveVolumeFactory
)
)
);
+ }
else
{
//old style - ...rar, .r00, .r01 ...
m = Regex.Match(part1.Name, @"^(.*\.)([r-z{])(ar|[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
+ {
item = new FileInfo(
Path.Combine(
part1.DirectoryName!,
@@ -40,12 +43,17 @@ internal static class RarArchiveVolumeFactory
)
)
);
+ }
else //split .001, .002 ....
+ {
return ArchiveVolumeFactory.GetFilePart(index, part1);
+ }
}
if (item != null && item.Exists)
+ {
return item;
+ }
return null; //no more items
}
diff --git a/src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs b/src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs
index 323f07ac7..d0222acbb 100644
--- a/src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs
+++ b/src/SharpCompress/Archives/SevenZip/SevenZipArchive.cs
@@ -113,7 +113,7 @@ private SevenZipArchive(SourceStream sourceStream)
protected override IEnumerable LoadVolumes(SourceStream sourceStream)
{
sourceStream.NotNull("SourceStream is null").LoadAllParts(); //request all streams
- return new SevenZipVolume(sourceStream, ReaderOptions, 0).AsEnumerable(); //simple single volume or split, multivolume not supported
+ return new SevenZipVolume(sourceStream, ReaderOptions).AsEnumerable(); //simple single volume or split, multivolume not supported
}
public static bool IsSevenZipFile(string filePath) => IsSevenZipFile(new FileInfo(filePath));
@@ -139,7 +139,7 @@ IEnumerable volumes
LoadFactory(stream);
if (_database is null)
{
- return Enumerable.Empty();
+ return [];
}
var entries = new SevenZipArchiveEntry[_database._files.Count];
for (var i = 0; i < _database._files.Count; i++)
@@ -186,8 +186,7 @@ public static bool IsSevenZipFile(Stream stream)
}
}
- private static ReadOnlySpan Signature =>
- new byte[] { (byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C };
+ private static ReadOnlySpan Signature => [(byte)'7', (byte)'z', 0xBC, 0xAF, 0x27, 0x1C];
private static bool SignatureMatch(Stream stream)
{
@@ -216,7 +215,10 @@ private sealed class SevenZipReader : AbstractReader this._archive = archive;
+ : base(readerOptions, ArchiveType.SevenZip)
+ {
+ _archive = archive;
+ }
public override SevenZipVolume Volume => _archive.Volumes.Single();
@@ -266,7 +268,10 @@ private class PasswordProvider : IPasswordProvider
{
private readonly string? _password;
- public PasswordProvider(string? password) => _password = password;
+ public PasswordProvider(string? password)
+ {
+ _password = password;
+ }
public string? CryptoGetTextPassword() => _password;
}
diff --git a/src/SharpCompress/Archives/SevenZip/SevenZipArchiveEntry.cs b/src/SharpCompress/Archives/SevenZip/SevenZipArchiveEntry.cs
index 9824ca472..afaf58b4f 100644
--- a/src/SharpCompress/Archives/SevenZip/SevenZipArchiveEntry.cs
+++ b/src/SharpCompress/Archives/SevenZip/SevenZipArchiveEntry.cs
@@ -6,7 +6,10 @@ namespace SharpCompress.Archives.SevenZip;
public class SevenZipArchiveEntry : SevenZipEntry, IArchiveEntry
{
internal SevenZipArchiveEntry(SevenZipArchive archive, SevenZipFilePart part)
- : base(part) => Archive = archive;
+ : base(part)
+ {
+ Archive = archive;
+ }
public Stream OpenEntryStream() => FilePart.GetCompressedStream();
diff --git a/src/SharpCompress/Archives/Tar/TarArchiveEntry.cs b/src/SharpCompress/Archives/Tar/TarArchiveEntry.cs
index 770a71095..7cb08a0fc 100644
--- a/src/SharpCompress/Archives/Tar/TarArchiveEntry.cs
+++ b/src/SharpCompress/Archives/Tar/TarArchiveEntry.cs
@@ -8,7 +8,10 @@ namespace SharpCompress.Archives.Tar;
public class TarArchiveEntry : TarEntry, IArchiveEntry
{
internal TarArchiveEntry(TarArchive archive, TarFilePart? part, CompressionType compressionType)
- : base(part, compressionType) => Archive = archive;
+ : base(part, compressionType)
+ {
+ Archive = archive;
+ }
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
diff --git a/src/SharpCompress/Archives/Zip/ZipArchive.cs b/src/SharpCompress/Archives/Zip/ZipArchive.cs
index a75b40954..c0dbfa25d 100644
--- a/src/SharpCompress/Archives/Zip/ZipArchive.cs
+++ b/src/SharpCompress/Archives/Zip/ZipArchive.cs
@@ -28,13 +28,14 @@ public class ZipArchive : AbstractWritableArchive
/// Constructor with a SourceStream able to handle FileInfo and Streams.
///
///
- ///
internal ZipArchive(SourceStream sourceStream)
- : base(ArchiveType.Zip, sourceStream) =>
+ : base(ArchiveType.Zip, sourceStream)
+ {
headerFactory = new SeekableZipHeaderFactory(
sourceStream.ReaderOptions.Password,
sourceStream.ReaderOptions.ArchiveEncoding
);
+ }
///
/// Constructor expects a filepath to an existing file.
@@ -224,7 +225,7 @@ protected override IEnumerable LoadVolumes(SourceStream stream)
var streams = stream.Streams.ToList();
var idx = 0;
- if (streams.Count() > 1) //test part 2 - true = multipart not split
+ if (streams.Count > 1) //test part 2 - true = multipart not split
{
streams[1].Position += 4; //skip the POST_DATA_DESCRIPTOR to prevent an exception
var isZip = IsZipFile(streams[1], ReaderOptions.Password, ReaderOptions.BufferSize);
@@ -287,7 +288,7 @@ protected override IEnumerable LoadEntries(IEnumerable();
+ var bytes = ((DirectoryEndHeader)h).Comment ?? [];
vols.Last().Comment = ReaderOptions.ArchiveEncoding.Decode(bytes);
yield break;
}
diff --git a/src/SharpCompress/Archives/Zip/ZipArchiveEntry.cs b/src/SharpCompress/Archives/Zip/ZipArchiveEntry.cs
index 4c980f910..3b7f9110d 100644
--- a/src/SharpCompress/Archives/Zip/ZipArchiveEntry.cs
+++ b/src/SharpCompress/Archives/Zip/ZipArchiveEntry.cs
@@ -7,7 +7,10 @@ namespace SharpCompress.Archives.Zip;
public class ZipArchiveEntry : ZipEntry, IArchiveEntry
{
internal ZipArchiveEntry(ZipArchive archive, SeekableZipFilePart? part)
- : base(part) => Archive = archive;
+ : base(part)
+ {
+ Archive = archive;
+ }
public virtual Stream OpenEntryStream() => Parts.Single().GetCompressedStream().NotNull();
diff --git a/src/SharpCompress/Archives/Zip/ZipArchiveVolumeFactory.cs b/src/SharpCompress/Archives/Zip/ZipArchiveVolumeFactory.cs
index 1b2f093d1..b484ea70c 100644
--- a/src/SharpCompress/Archives/Zip/ZipArchiveVolumeFactory.cs
+++ b/src/SharpCompress/Archives/Zip/ZipArchiveVolumeFactory.cs
@@ -8,12 +8,13 @@ internal static class ZipArchiveVolumeFactory
{
internal static FileInfo? GetFilePart(int index, FileInfo part1) //base the name on the first part
{
- FileInfo? item = null;
+ FileInfo item;
//load files with zip/zipx first. Swapped to end once loaded in ZipArchive
//new style .zip, z01.. | .zipx, zx01 - if the numbers go beyond 99 then they use 100 ...1000 etc
var m = Regex.Match(part1.Name, @"^(.*\.)(zipx?|zx?[0-9]+)$", RegexOptions.IgnoreCase);
if (m.Success)
+ {
item = new FileInfo(
Path.Combine(
part1.DirectoryName!,
@@ -24,11 +25,16 @@ internal static class ZipArchiveVolumeFactory
)
)
);
+ }
else //split - 001, 002 ...
+ {
return ArchiveVolumeFactory.GetFilePart(index, part1);
+ }
- if (item != null && item.Exists)
+ if (item.Exists)
+ {
return item;
+ }
return null; //no more items
}
diff --git a/src/SharpCompress/Common/Arc/ArcEntry.cs b/src/SharpCompress/Common/Arc/ArcEntry.cs
index a67f10d11..a11c20480 100644
--- a/src/SharpCompress/Common/Arc/ArcEntry.cs
+++ b/src/SharpCompress/Common/Arc/ArcEntry.cs
@@ -1,60 +1,55 @@
using System;
using System.Collections.Generic;
-using System.IO;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
-using SharpCompress.Common.GZip;
-using SharpCompress.Common.Tar;
-
-namespace SharpCompress.Common.Arc
+
+namespace SharpCompress.Common.Arc;
+
+public class ArcEntry : Entry
{
- public class ArcEntry : Entry
- {
- private readonly ArcFilePart? _filePart;
+ private readonly ArcFilePart? _filePart;
- internal ArcEntry(ArcFilePart? filePart)
- {
- _filePart = filePart;
- }
+ internal ArcEntry(ArcFilePart? filePart)
+ {
+ _filePart = filePart;
+ }
- public override long Crc
+ public override long Crc
+ {
+ get
{
- get
+ if (_filePart == null)
{
- if (_filePart == null)
- {
- return 0;
- }
- return _filePart.Header.Crc16;
+ return 0;
}
+ return _filePart.Header.Crc16;
}
+ }
- public override string? Key => _filePart?.Header.Name;
+ public override string? Key => _filePart?.Header.Name;
- public override string? LinkTarget => null;
+ public override string? LinkTarget => null;
- public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
+ public override long CompressedSize => _filePart?.Header.CompressedSize ?? 0;
- public override CompressionType CompressionType =>
- _filePart?.Header.CompressionMethod ?? CompressionType.Unknown;
+ public override CompressionType CompressionType =>
+ _filePart?.Header.CompressionMethod ?? CompressionType.Unknown;
- public override long Size => throw new NotImplementedException();
+#pragma warning disable CA1065
+ public override long Size => throw new NotImplementedException();
+#pragma warning restore CA1065
- public override DateTime? LastModifiedTime => null;
+ public override DateTime? LastModifiedTime => null;
- public override DateTime? CreatedTime => null;
+ public override DateTime? CreatedTime => null;
- public override DateTime? LastAccessedTime => null;
+ public override DateTime? LastAccessedTime => null;
- public override DateTime? ArchivedTime => null;
+ public override DateTime? ArchivedTime => null;
- public override bool IsEncrypted => false;
+ public override bool IsEncrypted => false;
- public override bool IsDirectory => false;
+ public override bool IsDirectory => false;
- public override bool IsSplitAfter => false;
+ public override bool IsSplitAfter => false;
- internal override IEnumerable Parts => _filePart.Empty();
- }
+ internal override IEnumerable Parts => _filePart.Empty();
}
diff --git a/src/SharpCompress/Common/Arc/ArcEntryHeader.cs b/src/SharpCompress/Common/Arc/ArcEntryHeader.cs
index 137b190a6..52ddac8ca 100644
--- a/src/SharpCompress/Common/Arc/ArcEntryHeader.cs
+++ b/src/SharpCompress/Common/Arc/ArcEntryHeader.cs
@@ -1,76 +1,70 @@
using System;
using System.IO;
-using System.Linq;
using System.Text;
-namespace SharpCompress.Common.Arc
+namespace SharpCompress.Common.Arc;
+
+public class ArcEntryHeader
{
- public class ArcEntryHeader
- {
- public ArchiveEncoding ArchiveEncoding { get; }
- public CompressionType CompressionMethod { get; private set; }
- public string? Name { get; private set; }
- public long CompressedSize { get; private set; }
- public DateTime DateTime { get; private set; }
- public int Crc16 { get; private set; }
- public long OriginalSize { get; private set; }
- public long DataStartPosition { get; private set; }
+ public ArchiveEncoding ArchiveEncoding { get; }
+ public CompressionType CompressionMethod { get; private set; }
+ public string? Name { get; private set; }
+ public long CompressedSize { get; private set; }
+ public DateTime DateTime { get; private set; }
+ public int Crc16 { get; private set; }
+ public long OriginalSize { get; private set; }
+ public long DataStartPosition { get; private set; }
- public ArcEntryHeader(ArchiveEncoding archiveEncoding)
- {
- this.ArchiveEncoding = archiveEncoding;
- }
+ public ArcEntryHeader(ArchiveEncoding archiveEncoding)
+ {
+ ArchiveEncoding = archiveEncoding;
+ }
- public ArcEntryHeader? ReadHeader(Stream stream)
+ public ArcEntryHeader? ReadHeader(Stream stream)
+ {
+ byte[] headerBytes = new byte[29];
+ if (stream.Read(headerBytes, 0, headerBytes.Length) != headerBytes.Length)
{
- byte[] headerBytes = new byte[29];
- if (stream.Read(headerBytes, 0, headerBytes.Length) != headerBytes.Length)
- {
- return null;
- }
- DataStartPosition = stream.Position;
- return LoadFrom(headerBytes);
+ return null;
}
+ DataStartPosition = stream.Position;
+ return LoadFrom(headerBytes);
+ }
- public ArcEntryHeader LoadFrom(byte[] headerBytes)
- {
- CompressionMethod = GetCompressionType(headerBytes[1]);
+ public ArcEntryHeader LoadFrom(byte[] headerBytes)
+ {
+ CompressionMethod = GetCompressionType(headerBytes[1]);
- // Read name
- int nameEnd = Array.IndexOf(headerBytes, (byte)0, 1); // Find null terminator
- Name = Encoding.UTF8.GetString(headerBytes, 2, nameEnd > 0 ? nameEnd - 2 : 12);
+ // Read name
+ int nameEnd = Array.IndexOf(headerBytes, (byte)0, 1); // Find null terminator
+ Name = Encoding.UTF8.GetString(headerBytes, 2, nameEnd > 0 ? nameEnd - 2 : 12);
- int offset = 15;
- CompressedSize = BitConverter.ToUInt32(headerBytes, offset);
- offset += 4;
- uint rawDateTime = BitConverter.ToUInt32(headerBytes, offset);
- DateTime = ConvertToDateTime(rawDateTime);
- offset += 4;
- Crc16 = BitConverter.ToUInt16(headerBytes, offset);
- offset += 2;
- OriginalSize = BitConverter.ToUInt32(headerBytes, offset);
- return this;
- }
+ int offset = 15;
+ CompressedSize = BitConverter.ToUInt32(headerBytes, offset);
+ offset += 4;
+ uint rawDateTime = BitConverter.ToUInt32(headerBytes, offset);
+ DateTime = ConvertToDateTime(rawDateTime);
+ offset += 4;
+ Crc16 = BitConverter.ToUInt16(headerBytes, offset);
+ offset += 2;
+ OriginalSize = BitConverter.ToUInt32(headerBytes, offset);
+ return this;
+ }
- private CompressionType GetCompressionType(byte value)
+ private CompressionType GetCompressionType(byte value) =>
+ value switch
{
- return value switch
- {
- 1 or 2 => CompressionType.None,
- 3 => CompressionType.RLE90,
- 4 => CompressionType.Squeezed,
- 5 or 6 or 7 or 8 => CompressionType.Crunched,
- 9 => CompressionType.Squashed,
- 10 => CompressionType.Crushed,
- 11 => CompressionType.Distilled,
- _ => CompressionType.Unknown,
- };
- }
+ 1 or 2 => CompressionType.None,
+ 3 => CompressionType.RLE90,
+ 4 => CompressionType.Squeezed,
+ 5 or 6 or 7 or 8 => CompressionType.Crunched,
+ 9 => CompressionType.Squashed,
+ 10 => CompressionType.Crushed,
+ 11 => CompressionType.Distilled,
+ _ => CompressionType.Unknown,
+ };
- public static DateTime ConvertToDateTime(long rawDateTime)
- {
- // Convert Unix timestamp to DateTime (UTC)
- return DateTimeOffset.FromUnixTimeSeconds(rawDateTime).UtcDateTime;
- }
- }
+ public static DateTime ConvertToDateTime(long rawDateTime) =>
+ // Convert Unix timestamp to DateTime (UTC)
+ DateTimeOffset.FromUnixTimeSeconds(rawDateTime).UtcDateTime;
}
diff --git a/src/SharpCompress/Common/Arc/ArcFilePart.cs b/src/SharpCompress/Common/Arc/ArcFilePart.cs
index d1ff2cfc5..10dfdea10 100644
--- a/src/SharpCompress/Common/Arc/ArcFilePart.cs
+++ b/src/SharpCompress/Common/Arc/ArcFilePart.cs
@@ -1,75 +1,59 @@
using System;
-using System.Collections.Generic;
using System.IO;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
-using SharpCompress.Common.GZip;
-using SharpCompress.Common.Tar;
-using SharpCompress.Common.Tar.Headers;
-using SharpCompress.Common.Zip.Headers;
-using SharpCompress.Compressors.Lzw;
+using SharpCompress.Compressors.ArcLzw;
using SharpCompress.Compressors.RLE90;
using SharpCompress.Compressors.Squeezed;
using SharpCompress.IO;
-namespace SharpCompress.Common.Arc
+namespace SharpCompress.Common.Arc;
+
+public class ArcFilePart : FilePart
{
- public class ArcFilePart : FilePart
- {
- private readonly Stream? _stream;
+ private readonly Stream? _stream;
- internal ArcFilePart(ArcEntryHeader localArcHeader, Stream? seekableStream)
- : base(localArcHeader.ArchiveEncoding)
- {
- _stream = seekableStream;
- Header = localArcHeader;
- }
+ internal ArcFilePart(ArcEntryHeader localArcHeader, Stream? seekableStream)
+ : base(localArcHeader.ArchiveEncoding)
+ {
+ _stream = seekableStream;
+ Header = localArcHeader;
+ }
- internal ArcEntryHeader Header { get; set; }
+ internal ArcEntryHeader Header { get; set; }
- internal override string? FilePartName => Header.Name;
+ internal override string? FilePartName => Header.Name;
- internal override Stream GetCompressedStream()
+ internal override Stream GetCompressedStream()
+ {
+ if (_stream != null)
{
- if (_stream != null)
+ Stream compressedStream;
+ switch (Header.CompressionMethod)
{
- Stream compressedStream;
- switch (Header.CompressionMethod)
- {
- case CompressionType.None:
- compressedStream = new ReadOnlySubStream(
- _stream,
- Header.DataStartPosition,
- Header.CompressedSize
- );
- break;
- case CompressionType.RLE90:
- compressedStream = new RunLength90Stream(
- _stream,
- (int)Header.CompressedSize
- );
- break;
- case CompressionType.Squeezed:
- compressedStream = new SqueezeStream(_stream, (int)Header.CompressedSize);
- break;
- case CompressionType.Crunched:
- compressedStream = new ArcLzwStream(
- _stream,
- (int)Header.CompressedSize,
- true
- );
- break;
- default:
- throw new NotSupportedException(
- "CompressionMethod: " + Header.CompressionMethod
- );
- }
- return compressedStream;
+ case CompressionType.None:
+ compressedStream = new ReadOnlySubStream(
+ _stream,
+ Header.DataStartPosition,
+ Header.CompressedSize
+ );
+ break;
+ case CompressionType.RLE90:
+ compressedStream = new RunLength90Stream(_stream, (int)Header.CompressedSize);
+ break;
+ case CompressionType.Squeezed:
+ compressedStream = new SqueezeStream(_stream, (int)Header.CompressedSize);
+ break;
+ case CompressionType.Crunched:
+ compressedStream = new ArcLzwStream(_stream, (int)Header.CompressedSize);
+ break;
+ default:
+ throw new NotSupportedException(
+ "CompressionMethod: " + Header.CompressionMethod
+ );
}
- return _stream.NotNull();
+ return compressedStream;
}
-
- internal override Stream? GetRawStream() => _stream;
+ return _stream.NotNull();
}
+
+ internal override Stream? GetRawStream() => _stream;
}
diff --git a/src/SharpCompress/Common/Arc/ArcVolume.cs b/src/SharpCompress/Common/Arc/ArcVolume.cs
index 8ebd11ea9..2137454a7 100644
--- a/src/SharpCompress/Common/Arc/ArcVolume.cs
+++ b/src/SharpCompress/Common/Arc/ArcVolume.cs
@@ -1,16 +1,10 @@
-using System;
-using System.Collections.Generic;
using System.IO;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
using SharpCompress.Readers;
-namespace SharpCompress.Common.Arc
+namespace SharpCompress.Common.Arc;
+
+public class ArcVolume : Volume
{
- public class ArcVolume : Volume
- {
- public ArcVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
- : base(stream, readerOptions, index) { }
- }
+ public ArcVolume(Stream stream, ReaderOptions readerOptions, int index = 0)
+ : base(stream, readerOptions, index) { }
}
diff --git a/src/SharpCompress/Common/ArchiveEncoding.cs b/src/SharpCompress/Common/ArchiveEncoding.cs
index 3701a93bf..6079c7351 100644
--- a/src/SharpCompress/Common/ArchiveEncoding.cs
+++ b/src/SharpCompress/Common/ArchiveEncoding.cs
@@ -36,7 +36,10 @@ public ArchiveEncoding(Encoding def, Encoding password)
}
#if !NETFRAMEWORK
- static ArchiveEncoding() => Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
+ static ArchiveEncoding()
+ {
+ Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
+ }
#endif
public string Decode(byte[] bytes) => Decode(bytes, 0, bytes.Length);
diff --git a/src/SharpCompress/Common/ArchiveExtractionEventArgs.cs b/src/SharpCompress/Common/ArchiveExtractionEventArgs.cs
index 808177489..bd5d04a8b 100644
--- a/src/SharpCompress/Common/ArchiveExtractionEventArgs.cs
+++ b/src/SharpCompress/Common/ArchiveExtractionEventArgs.cs
@@ -4,7 +4,10 @@ namespace SharpCompress.Common;
public class ArchiveExtractionEventArgs : EventArgs
{
- internal ArchiveExtractionEventArgs(T entry) => Item = entry;
+ internal ArchiveExtractionEventArgs(T entry)
+ {
+ Item = entry;
+ }
public T Item { get; }
}
diff --git a/src/SharpCompress/Common/EntryStream.cs b/src/SharpCompress/Common/EntryStream.cs
index a0fe736a4..d1d8097f9 100644
--- a/src/SharpCompress/Common/EntryStream.cs
+++ b/src/SharpCompress/Common/EntryStream.cs
@@ -1,6 +1,5 @@
using System;
using System.IO;
-using System.IO.Compression;
using SharpCompress.IO;
using SharpCompress.Readers;
diff --git a/src/SharpCompress/Common/FilePart.cs b/src/SharpCompress/Common/FilePart.cs
index 54e3c9f9c..9bcff7a8b 100644
--- a/src/SharpCompress/Common/FilePart.cs
+++ b/src/SharpCompress/Common/FilePart.cs
@@ -4,7 +4,10 @@ namespace SharpCompress.Common;
public abstract class FilePart
{
- protected FilePart(ArchiveEncoding archiveEncoding) => ArchiveEncoding = archiveEncoding;
+ protected FilePart(ArchiveEncoding archiveEncoding)
+ {
+ ArchiveEncoding = archiveEncoding;
+ }
internal ArchiveEncoding ArchiveEncoding { get; }
diff --git a/src/SharpCompress/Common/GZip/GZipEntry.cs b/src/SharpCompress/Common/GZip/GZipEntry.cs
index 9a551d07f..465e89256 100644
--- a/src/SharpCompress/Common/GZip/GZipEntry.cs
+++ b/src/SharpCompress/Common/GZip/GZipEntry.cs
@@ -8,7 +8,10 @@ public class GZipEntry : Entry
{
private readonly GZipFilePart? _filePart;
- internal GZipEntry(GZipFilePart? filePart) => _filePart = filePart;
+ internal GZipEntry(GZipFilePart? filePart)
+ {
+ _filePart = filePart;
+ }
public override CompressionType CompressionType => CompressionType.GZip;
diff --git a/src/SharpCompress/Common/GZip/GZipFilePart.cs b/src/SharpCompress/Common/GZip/GZipFilePart.cs
index 4a1c95155..635d9b1b6 100644
--- a/src/SharpCompress/Common/GZip/GZipFilePart.cs
+++ b/src/SharpCompress/Common/GZip/GZipFilePart.cs
@@ -37,7 +37,7 @@ internal GZipFilePart(Stream stream, ArchiveEncoding archiveEncoding)
internal override string? FilePartName => _name;
internal override Stream GetCompressedStream() =>
- new DeflateStream(_stream, CompressionMode.Decompress, CompressionLevel.Default);
+ new DeflateStream(_stream, CompressionMode.Decompress);
internal override Stream GetRawStream() => _stream;
diff --git a/src/SharpCompress/Common/GZip/GZipVolume.cs b/src/SharpCompress/Common/GZip/GZipVolume.cs
index 600ba8e39..ea2c7e4fc 100644
--- a/src/SharpCompress/Common/GZip/GZipVolume.cs
+++ b/src/SharpCompress/Common/GZip/GZipVolume.cs
@@ -9,7 +9,10 @@ public GZipVolume(Stream stream, ReaderOptions? options, int index)
: base(stream, options, index) { }
public GZipVolume(FileInfo fileInfo, ReaderOptions options)
- : base(fileInfo.OpenRead(), options) => options.LeaveStreamOpen = false;
+ : base(fileInfo.OpenRead(), options)
+ {
+ options.LeaveStreamOpen = false;
+ }
public override bool IsFirstVolume => true;
diff --git a/src/SharpCompress/Common/Rar/CryptKey3.cs b/src/SharpCompress/Common/Rar/CryptKey3.cs
index 93e451b1e..254c692fb 100644
--- a/src/SharpCompress/Common/Rar/CryptKey3.cs
+++ b/src/SharpCompress/Common/Rar/CryptKey3.cs
@@ -12,7 +12,10 @@ internal class CryptKey3 : ICryptKey
private string _password;
- public CryptKey3(string password) => _password = password ?? "";
+ public CryptKey3(string password)
+ {
+ _password = password ?? "";
+ }
public ICryptoTransform Transformer(byte[] salt)
{
diff --git a/src/SharpCompress/Common/Rar/CryptKey5.cs b/src/SharpCompress/Common/Rar/CryptKey5.cs
index 90778c5af..e808d9fe3 100644
--- a/src/SharpCompress/Common/Rar/CryptKey5.cs
+++ b/src/SharpCompress/Common/Rar/CryptKey5.cs
@@ -8,14 +8,14 @@ namespace SharpCompress.Common.Rar;
internal class CryptKey5 : ICryptKey
{
- const int AES_256 = 256;
- const int DERIVED_KEY_LENGTH = 0x10;
- const int SHA256_DIGEST_SIZE = 32;
+ private const int AES_256 = 256;
+ private const int DERIVED_KEY_LENGTH = 0x10;
+ private const int SHA256_DIGEST_SIZE = 32;
private string _password;
private Rar5CryptoInfo _cryptoInfo;
- private byte[] _pswCheck = { };
- private byte[] _hashKey = { };
+ private byte[] _pswCheck = [];
+ private byte[] _hashKey = [];
public CryptKey5(string? password, Rar5CryptoInfo rar5CryptoInfo)
{
diff --git a/src/SharpCompress/Common/Rar/Headers/MarkHeader.cs b/src/SharpCompress/Common/Rar/Headers/MarkHeader.cs
index 9ca0f9b21..d359aef4f 100644
--- a/src/SharpCompress/Common/Rar/Headers/MarkHeader.cs
+++ b/src/SharpCompress/Common/Rar/Headers/MarkHeader.cs
@@ -11,7 +11,10 @@ internal class MarkHeader : IRarHeader
public bool IsRar5 { get; }
- private MarkHeader(bool isRar5) => IsRar5 = isRar5;
+ private MarkHeader(bool isRar5)
+ {
+ IsRar5 = isRar5;
+ }
public HeaderType HeaderType => HeaderType.Mark;
diff --git a/src/SharpCompress/Common/Rar/Headers/NewSubHeaderType.cs b/src/SharpCompress/Common/Rar/Headers/NewSubHeaderType.cs
index b45f98bbb..6c11bb517 100644
--- a/src/SharpCompress/Common/Rar/Headers/NewSubHeaderType.cs
+++ b/src/SharpCompress/Common/Rar/Headers/NewSubHeaderType.cs
@@ -42,4 +42,8 @@ internal bool Equals(byte[] bytes)
}
public bool Equals(NewSubHeaderType? other) => other is not null && Equals(other._bytes);
+
+ public override bool Equals(object? obj) => obj is NewSubHeaderType other && Equals(other);
+
+ public override int GetHashCode() => _bytes.GetHashCode();
}
diff --git a/src/SharpCompress/Common/Rar/Rar5CryptoInfo.cs b/src/SharpCompress/Common/Rar/Rar5CryptoInfo.cs
index 7b8edff9b..c48e0ef92 100644
--- a/src/SharpCompress/Common/Rar/Rar5CryptoInfo.cs
+++ b/src/SharpCompress/Common/Rar/Rar5CryptoInfo.cs
@@ -49,9 +49,9 @@ public void ReadInitV(MarkingBinaryReader reader) =>
public int LG2Count = 0;
- public byte[] InitV = { };
+ public byte[] InitV = [];
- public byte[] Salt = { };
+ public byte[] Salt = [];
- public byte[] PswCheck = { };
+ public byte[] PswCheck = [];
}
diff --git a/src/SharpCompress/Common/Rar/RarCryptoBinaryReader.cs b/src/SharpCompress/Common/Rar/RarCryptoBinaryReader.cs
index 6e44286b2..58845a9a1 100644
--- a/src/SharpCompress/Common/Rar/RarCryptoBinaryReader.cs
+++ b/src/SharpCompress/Common/Rar/RarCryptoBinaryReader.cs
@@ -22,7 +22,10 @@ public RarCryptoBinaryReader(Stream stream, ICryptKey cryptKey)
}
public RarCryptoBinaryReader(Stream stream, ICryptKey cryptKey, byte[] salt)
- : base(stream) => _rijndael = new BlockTransformer(cryptKey.Transformer(salt));
+ : base(stream)
+ {
+ _rijndael = new BlockTransformer(cryptKey.Transformer(salt));
+ }
// track read count ourselves rather than using the underlying stream since we buffer
public override long CurrentReadByteCount
diff --git a/src/SharpCompress/Common/Rar/RarVolume.cs b/src/SharpCompress/Common/Rar/RarVolume.cs
index 4bb2f84a4..dc9edb159 100644
--- a/src/SharpCompress/Common/Rar/RarVolume.cs
+++ b/src/SharpCompress/Common/Rar/RarVolume.cs
@@ -18,7 +18,10 @@ public abstract class RarVolume : Volume
private int _maxCompressionAlgorithm;
internal RarVolume(StreamingMode mode, Stream stream, ReaderOptions options, int index)
- : base(stream, options, index) => _headerFactory = new RarHeaderFactory(mode, options);
+ : base(stream, options, index)
+ {
+ _headerFactory = new RarHeaderFactory(mode, options);
+ }
private ArchiveHeader? ArchiveHeader { get; set; }
diff --git a/src/SharpCompress/Common/SevenZip/ArchiveDatabase.cs b/src/SharpCompress/Common/SevenZip/ArchiveDatabase.cs
index 4bac08ada..946daae3b 100644
--- a/src/SharpCompress/Common/SevenZip/ArchiveDatabase.cs
+++ b/src/SharpCompress/Common/SevenZip/ArchiveDatabase.cs
@@ -15,20 +15,22 @@ internal class ArchiveDatabase
internal long _startPositionAfterHeader;
internal long _dataStartPosition;
- internal List _packSizes = new();
- internal List _packCrCs = new();
- internal List _folders = new();
+ internal List _packSizes = [];
+ internal List _packCrCs = [];
+ internal List _folders = [];
internal List _numUnpackStreamsVector;
- internal List _files = new();
+ internal List _files = [];
- internal List _packStreamStartPositions = new();
- internal List _folderStartFileIndex = new();
- internal List _fileIndexToFolderIndexMap = new();
+ internal List _packStreamStartPositions = [];
+ internal List _folderStartFileIndex = [];
+ internal List _fileIndexToFolderIndexMap = [];
internal IPasswordProvider PasswordProvider { get; }
- public ArchiveDatabase(IPasswordProvider passwordProvider) =>
+ public ArchiveDatabase(IPasswordProvider passwordProvider)
+ {
PasswordProvider = passwordProvider;
+ }
internal void Clear()
{
diff --git a/src/SharpCompress/Common/SevenZip/ArchiveReader.cs b/src/SharpCompress/Common/SevenZip/ArchiveReader.cs
index 288a72989..2ec07e5b7 100644
--- a/src/SharpCompress/Common/SevenZip/ArchiveReader.cs
+++ b/src/SharpCompress/Common/SevenZip/ArchiveReader.cs
@@ -1387,7 +1387,7 @@ internal class CExtractFolderInfo
{
internal int _fileIndex;
internal int _folderIndex;
- internal List _extractStatuses = new();
+ internal List _extractStatuses = [];
internal CExtractFolderInfo(int fileIndex, int folderIndex)
{
diff --git a/src/SharpCompress/Common/SevenZip/CFileItem.cs b/src/SharpCompress/Common/SevenZip/CFileItem.cs
index 254a444f1..35077feff 100644
--- a/src/SharpCompress/Common/SevenZip/CFileItem.cs
+++ b/src/SharpCompress/Common/SevenZip/CFileItem.cs
@@ -22,5 +22,8 @@ internal class CFileItem
public long? StartPos { get; internal set; }
public bool IsAnti { get; internal set; }
- internal CFileItem() => HasStream = true;
+ internal CFileItem()
+ {
+ HasStream = true;
+ }
}
diff --git a/src/SharpCompress/Common/SevenZip/CFolder.cs b/src/SharpCompress/Common/SevenZip/CFolder.cs
index 8b0123da1..076a51d1d 100644
--- a/src/SharpCompress/Common/SevenZip/CFolder.cs
+++ b/src/SharpCompress/Common/SevenZip/CFolder.cs
@@ -6,11 +6,11 @@ namespace SharpCompress.Common.SevenZip;
internal class CFolder
{
- internal List _coders = new();
- internal List _bindPairs = new();
- internal List _packStreams = new();
+ internal List _coders = [];
+ internal List _bindPairs = [];
+ internal List _packStreams = [];
internal int _firstPackStreamId;
- internal List _unpackSizes = new();
+ internal List _unpackSizes = [];
internal uint? _unpackCrc;
internal bool UnpackCrcDefined => _unpackCrc != null;
diff --git a/src/SharpCompress/Common/SevenZip/CMethodId.cs b/src/SharpCompress/Common/SevenZip/CMethodId.cs
index 8494aad53..ec140a75b 100644
--- a/src/SharpCompress/Common/SevenZip/CMethodId.cs
+++ b/src/SharpCompress/Common/SevenZip/CMethodId.cs
@@ -1,6 +1,8 @@
-namespace SharpCompress.Common.SevenZip;
+using System;
-internal readonly struct CMethodId
+namespace SharpCompress.Common.SevenZip;
+
+internal readonly struct CMethodId : IEquatable
{
public const ulong K_COPY_ID = 0;
public const ulong K_LZMA_ID = 0x030101;
@@ -14,7 +16,10 @@ internal readonly struct CMethodId
public readonly ulong _id;
- public CMethodId(ulong id) => _id = id;
+ public CMethodId(ulong id)
+ {
+ _id = id;
+ }
public override int GetHashCode() => _id.GetHashCode();
diff --git a/src/SharpCompress/Common/SevenZip/SevenZipEntry.cs b/src/SharpCompress/Common/SevenZip/SevenZipEntry.cs
index 79df43a07..3f2434089 100644
--- a/src/SharpCompress/Common/SevenZip/SevenZipEntry.cs
+++ b/src/SharpCompress/Common/SevenZip/SevenZipEntry.cs
@@ -5,7 +5,10 @@ namespace SharpCompress.Common.SevenZip;
public class SevenZipEntry : Entry
{
- internal SevenZipEntry(SevenZipFilePart filePart) => FilePart = filePart;
+ internal SevenZipEntry(SevenZipFilePart filePart)
+ {
+ FilePart = filePart;
+ }
internal SevenZipFilePart FilePart { get; }
diff --git a/src/SharpCompress/Common/Tar/Headers/TarHeader.cs b/src/SharpCompress/Common/Tar/Headers/TarHeader.cs
index 1a04740ff..6f1592dbb 100644
--- a/src/SharpCompress/Common/Tar/Headers/TarHeader.cs
+++ b/src/SharpCompress/Common/Tar/Headers/TarHeader.cs
@@ -9,7 +9,10 @@ internal sealed class TarHeader
{
internal static readonly DateTime EPOCH = new(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
- public TarHeader(ArchiveEncoding archiveEncoding) => ArchiveEncoding = archiveEncoding;
+ public TarHeader(ArchiveEncoding archiveEncoding)
+ {
+ ArchiveEncoding = archiveEncoding;
+ }
internal string? Name { get; set; }
internal string? LinkName { get; set; }
@@ -306,7 +309,7 @@ private static long ReadAsciiInt64(byte[] buffer, int offset, int count)
}
private static readonly byte[] eightSpaces =
- {
+ [
(byte)' ',
(byte)' ',
(byte)' ',
@@ -315,7 +318,7 @@ private static long ReadAsciiInt64(byte[] buffer, int offset, int count)
(byte)' ',
(byte)' ',
(byte)' ',
- };
+ ];
internal static bool checkChecksum(byte[] buf)
{
diff --git a/src/SharpCompress/Common/Volume.cs b/src/SharpCompress/Common/Volume.cs
index 54dc49953..0c4e58a10 100644
--- a/src/SharpCompress/Common/Volume.cs
+++ b/src/SharpCompress/Common/Volume.cs
@@ -21,7 +21,9 @@ internal Volume(Stream stream, ReaderOptions? readerOptions, int index = 0)
}
if (stream is IStreamStack ss)
+ {
ss.SetBuffer(ReaderOptions.BufferSize, true);
+ }
_actualStream = stream;
}
diff --git a/src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs b/src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs
index bd36b0659..586d63398 100644
--- a/src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs
+++ b/src/SharpCompress/Common/Zip/Headers/ZipFileEntry.cs
@@ -10,7 +10,7 @@ internal abstract class ZipFileEntry : ZipHeader
protected ZipFileEntry(ZipHeaderType type, ArchiveEncoding archiveEncoding)
: base(type)
{
- Extra = new List();
+ Extra = [];
ArchiveEncoding = archiveEncoding;
}
diff --git a/src/SharpCompress/Common/Zip/PkwareTraditionalEncryptionData.cs b/src/SharpCompress/Common/Zip/PkwareTraditionalEncryptionData.cs
index 7b5178579..79d3e3a86 100644
--- a/src/SharpCompress/Common/Zip/PkwareTraditionalEncryptionData.cs
+++ b/src/SharpCompress/Common/Zip/PkwareTraditionalEncryptionData.cs
@@ -7,7 +7,7 @@ namespace SharpCompress.Common.Zip;
internal class PkwareTraditionalEncryptionData
{
private static readonly CRC32 CRC32 = new();
- private readonly uint[] _keys = { 0x12345678, 0x23456789, 0x34567890 };
+ private readonly uint[] _keys = [0x12345678, 0x23456789, 0x34567890];
private readonly ArchiveEncoding _archiveEncoding;
private PkwareTraditionalEncryptionData(string password, ArchiveEncoding archiveEncoding)
diff --git a/src/SharpCompress/Common/Zip/SeekableZipFilePart.cs b/src/SharpCompress/Common/Zip/SeekableZipFilePart.cs
index e75727112..78ff830ef 100644
--- a/src/SharpCompress/Common/Zip/SeekableZipFilePart.cs
+++ b/src/SharpCompress/Common/Zip/SeekableZipFilePart.cs
@@ -13,7 +13,10 @@ internal SeekableZipFilePart(
DirectoryEntryHeader header,
Stream stream
)
- : base(header, stream) => _headerFactory = headerFactory;
+ : base(header, stream)
+ {
+ _headerFactory = headerFactory;
+ }
internal override Stream GetCompressedStream()
{
diff --git a/src/SharpCompress/Common/Zip/SeekableZipHeaderFactory.cs b/src/SharpCompress/Common/Zip/SeekableZipHeaderFactory.cs
index 005f64801..f283e0f89 100644
--- a/src/SharpCompress/Common/Zip/SeekableZipHeaderFactory.cs
+++ b/src/SharpCompress/Common/Zip/SeekableZipHeaderFactory.cs
@@ -113,7 +113,7 @@ private static void SeekBackToHeader(Stream stream, BinaryReader reader)
? (int)stream.Length
: MAX_SEARCH_LENGTH_FOR_EOCD;
// We search for marker in reverse to find the first occurance
- byte[] needle = { 0x06, 0x05, 0x4b, 0x50 };
+ byte[] needle = [0x06, 0x05, 0x4b, 0x50];
stream.Seek(-len, SeekOrigin.End);
diff --git a/src/SharpCompress/Common/Zip/StreamingZipFilePart.cs b/src/SharpCompress/Common/Zip/StreamingZipFilePart.cs
index 5464a9cc8..76d7c5426 100644
--- a/src/SharpCompress/Common/Zip/StreamingZipFilePart.cs
+++ b/src/SharpCompress/Common/Zip/StreamingZipFilePart.cs
@@ -47,9 +47,9 @@ internal BinaryReader FixStreamedFileLocation(ref SharpCompressStream rewindable
// If we had TotalIn / TotalOut we could have used them
Header.CompressedSize = _decompressionStream.Position;
- if (_decompressionStream is DeflateStream deflateStream)
+ if (_decompressionStream is DeflateStream)
{
- ((IStreamStack)rewindableStream).StackSeek(0);
+ rewindableStream.StackSeek(0);
}
Skipped = true;
diff --git a/src/SharpCompress/Common/Zip/StreamingZipHeaderFactory.cs b/src/SharpCompress/Common/Zip/StreamingZipHeaderFactory.cs
index 70b61bae2..f8d620c46 100644
--- a/src/SharpCompress/Common/Zip/StreamingZipHeaderFactory.cs
+++ b/src/SharpCompress/Common/Zip/StreamingZipHeaderFactory.cs
@@ -16,7 +16,10 @@ internal StreamingZipHeaderFactory(
ArchiveEncoding archiveEncoding,
IEnumerable? entries
)
- : base(StreamingMode.Streaming, password, archiveEncoding) => _entries = entries;
+ : base(StreamingMode.Streaming, password, archiveEncoding)
+ {
+ _entries = entries;
+ }
internal IEnumerable ReadStreamHeader(Stream stream)
{
@@ -98,7 +101,9 @@ internal IEnumerable ReadStreamHeader(Stream stream)
else if (_lastEntryHeader != null && _lastEntryHeader.IsZip64)
{
if (_lastEntryHeader.Part is null)
+ {
continue;
+ }
//reader = ((StreamingZipFilePart)_lastEntryHeader.Part).FixStreamedFileLocation(
// ref rewindableStream
@@ -170,7 +175,7 @@ internal IEnumerable ReadStreamHeader(Stream stream)
&& local_header.CompressedSize == 0
&& local_header.UncompressedSize == 0
&& local_header.Crc == 0
- && local_header.IsDirectory == false
+ && !local_header.IsDirectory
);
if (dir_header != null)
@@ -188,7 +193,7 @@ internal IEnumerable ReadStreamHeader(Stream stream)
else if (local_header.Flags.HasFlag(HeaderFlags.UsePostDataDescriptor))
{
var nextHeaderBytes = reader.ReadUInt32();
- ((IStreamStack)rewindableStream).Rewind(sizeof(uint));
+ rewindableStream.Rewind(sizeof(uint));
// Check if next data is PostDataDescriptor, streamed file with 0 length
header.HasData = !IsHeader(nextHeaderBytes);
diff --git a/src/SharpCompress/Compressors/ADC/ADCStream.cs b/src/SharpCompress/Compressors/ADC/ADCStream.cs
index 70e8fd8b3..bdb9a32f3 100644
--- a/src/SharpCompress/Compressors/ADC/ADCStream.cs
+++ b/src/SharpCompress/Compressors/ADC/ADCStream.cs
@@ -152,11 +152,9 @@ public override int Read(byte[] buffer, int offset, int count)
throw new ArgumentOutOfRangeException(nameof(count));
}
- var size = -1;
-
if (_outBuffer is null)
{
- size = ADCBase.Decompress(_stream, out _outBuffer);
+ var _ = ADCBase.Decompress(_stream, out _outBuffer);
_outPosition = 0;
}
@@ -172,7 +170,7 @@ public override int Read(byte[] buffer, int offset, int count)
copied += piece;
_position += piece;
toCopy -= piece;
- size = ADCBase.Decompress(_stream, out _outBuffer);
+ int size = ADCBase.Decompress(_stream, out _outBuffer);
_outPosition = 0;
if (size == 0 || _outBuffer is null || _outBuffer.Length == 0)
{
diff --git a/src/SharpCompress/Compressors/ArcLzw/ArcLzwStream.cs b/src/SharpCompress/Compressors/ArcLzw/ArcLzwStream.cs
index be19b3fec..f2d2e62c3 100644
--- a/src/SharpCompress/Compressors/ArcLzw/ArcLzwStream.cs
+++ b/src/SharpCompress/Compressors/ArcLzw/ArcLzwStream.cs
@@ -3,9 +3,10 @@
using System.IO;
using System.Linq;
using SharpCompress.Compressors.RLE90;
-using SharpCompress.Compressors.Squeezed;
using SharpCompress.IO;
+namespace SharpCompress.Compressors.ArcLzw;
+
public partial class ArcLzwStream : Stream, IStreamStack
{
#if DEBUG_STREAMS
@@ -69,7 +70,7 @@ public ArcLzwStream(Stream stream, int compressedSize, bool useCrunched = true)
maxcodemax = 0;
}
- private ushort? GetCode(BitReader reader)
+ private ushort? GetCode(ArcLzwStream.BitReader reader)
{
if (clearFlag || freeEnt > maxcode)
{
@@ -113,7 +114,7 @@ public List Decompress(byte[] input, bool useCrunched)
suffix[i] = (byte)i;
}
- var reader = new BitReader(input);
+ var reader = new ArcLzwStream.BitReader(input);
freeEnt = FIRST;
if (GetCode(reader) is ushort old)
@@ -129,7 +130,7 @@ public List Decompress(byte[] input, bool useCrunched)
{
Array.Clear(prefix, 0, prefix.Length);
clearFlag = true;
- freeEnt = (ushort)(FIRST - 1);
+ freeEnt = FIRST - 1;
if (GetCode(reader) is ushort c)
{
@@ -180,7 +181,9 @@ public List Decompress(byte[] input, bool useCrunched)
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
+#pragma warning disable CA1065
public override long Length => throw new NotImplementedException();
+#pragma warning restore CA1065
public override long Position
{
get => _stream.Position;
diff --git a/src/SharpCompress/Compressors/ArcLzw/BitReader.cs b/src/SharpCompress/Compressors/ArcLzw/BitReader.cs
index 414e5e0a5..5c2a9cc85 100644
--- a/src/SharpCompress/Compressors/ArcLzw/BitReader.cs
+++ b/src/SharpCompress/Compressors/ArcLzw/BitReader.cs
@@ -1,5 +1,7 @@
using System;
+namespace SharpCompress.Compressors.ArcLzw;
+
public partial class ArcLzwStream
{
public class BitReader
@@ -18,13 +20,17 @@ public BitReader(byte[] inputData)
public int? ReadBits(int bitCount)
{
if (bitCount <= 0 || bitCount > 16)
+ {
throw new ArgumentOutOfRangeException(
nameof(bitCount),
"Bit count must be between 1 and 16"
);
+ }
if (bytePosition >= data.Length)
+ {
return null;
+ }
int result = 0;
int bitsRead = 0;
@@ -32,7 +38,9 @@ public BitReader(byte[] inputData)
while (bitsRead < bitCount)
{
if (bytePosition >= data.Length)
+ {
return null;
+ }
int bitsAvailable = 8 - bitPosition;
int bitsToRead = Math.Min(bitCount - bitsRead, bitsAvailable);
diff --git a/src/SharpCompress/Compressors/BZip2/BZip2Constants.cs b/src/SharpCompress/Compressors/BZip2/BZip2Constants.cs
index 85c63b398..eef5e9645 100644
--- a/src/SharpCompress/Compressors/BZip2/BZip2Constants.cs
+++ b/src/SharpCompress/Compressors/BZip2/BZip2Constants.cs
@@ -43,7 +43,7 @@ internal class BZip2Constants
public const int NUM_OVERSHOOT_BYTES = 20;
public static int[] rNums =
- {
+ [
619,
720,
127,
@@ -556,5 +556,5 @@ internal class BZip2Constants
364,
936,
638,
- };
+ ];
}
diff --git a/src/SharpCompress/Compressors/BZip2/CBZip2InputStream.cs b/src/SharpCompress/Compressors/BZip2/CBZip2InputStream.cs
index abff49735..af2fe34ea 100644
--- a/src/SharpCompress/Compressors/BZip2/CBZip2InputStream.cs
+++ b/src/SharpCompress/Compressors/BZip2/CBZip2InputStream.cs
@@ -60,20 +60,17 @@ int IStreamStack.BufferPosition
void IStreamStack.SetPosition(long position) { }
- private static void Cadvise()
- {
+ private static void Cadvise() =>
+ throw
//System.out.Println("CRC Error");
- throw new InvalidOperationException("BZip2 error");
- }
+ new InvalidOperationException("BZip2 error");
private static void BadBGLengths() => Cadvise();
private static void BitStreamEOF() => Cadvise();
- private static void CompressedStreamEOF()
- {
+ private static void CompressedStreamEOF() =>
throw new InvalidOperationException("BZip2 compressed file ends unexpectedly");
- }
private void MakeMaps()
{
@@ -833,7 +830,7 @@ cache misses.
/*
This loop is hammered during decompression,
hence the unrolling.
-
+
for (j = nextSym-1; j > 0; j--) yy[j] = yy[j-1];
*/
diff --git a/src/SharpCompress/Compressors/BZip2/CBZip2OutputStream.cs b/src/SharpCompress/Compressors/BZip2/CBZip2OutputStream.cs
index 555c6fcb2..e8216fbeb 100644
--- a/src/SharpCompress/Compressors/BZip2/CBZip2OutputStream.cs
+++ b/src/SharpCompress/Compressors/BZip2/CBZip2OutputStream.cs
@@ -1306,8 +1306,7 @@ private void QSort3(int loSt, int hiSt, int dSt)
n = block[zptr[unLo] + d + 1] - med;
if (n == 0)
{
- var temp = 0;
- temp = zptr[unLo];
+ var temp = zptr[unLo];
zptr[unLo] = zptr[ltLo];
zptr[ltLo] = temp;
ltLo++;
@@ -1330,8 +1329,7 @@ private void QSort3(int loSt, int hiSt, int dSt)
n = block[zptr[unHi] + d + 1] - med;
if (n == 0)
{
- var temp = 0;
- temp = zptr[unHi];
+ var temp = zptr[unHi];
zptr[unHi] = zptr[gtHi];
zptr[gtHi] = temp;
gtHi--;
@@ -1843,7 +1841,7 @@ because the number of elems to sort is
*/
private readonly int[] incs =
- {
+ [
1,
4,
13,
@@ -1858,7 +1856,7 @@ because the number of elems to sort is
265720,
797161,
2391484,
- };
+ ];
private void AllocateCompressStructures()
{
diff --git a/src/SharpCompress/Compressors/BZip2/CRC.cs b/src/SharpCompress/Compressors/BZip2/CRC.cs
index 6e8e034a3..1042e2082 100644
--- a/src/SharpCompress/Compressors/BZip2/CRC.cs
+++ b/src/SharpCompress/Compressors/BZip2/CRC.cs
@@ -32,7 +32,7 @@ namespace SharpCompress.Compressors.BZip2;
internal class CRC
{
public static int[] crc32Table =
- {
+ [
0x00000000,
0x04c11db7,
0x09823b6e,
@@ -289,9 +289,12 @@ internal class CRC
unchecked((int)0xb8757bda),
unchecked((int)0xb5365d03),
unchecked((int)0xb1f740b4),
- };
+ ];
- public CRC() => InitialiseCRC();
+ public CRC()
+ {
+ InitialiseCRC();
+ }
internal void InitialiseCRC() => globalCrc = unchecked((int)0xffffffff);
diff --git a/src/SharpCompress/Compressors/Deflate/DeflateManager.cs b/src/SharpCompress/Compressors/Deflate/DeflateManager.cs
index 933f57529..a864d5441 100644
--- a/src/SharpCompress/Compressors/Deflate/DeflateManager.cs
+++ b/src/SharpCompress/Compressors/Deflate/DeflateManager.cs
@@ -77,7 +77,7 @@ internal sealed partial class DeflateManager
{
// extra bits for each length code
internal static readonly int[] ExtraLengthBits =
- {
+ [
0,
0,
0,
@@ -107,11 +107,11 @@ internal sealed partial class DeflateManager
5,
5,
0,
- };
+ ];
// extra bits for each distance code
internal static readonly int[] ExtraDistanceBits =
- {
+ [
0,
0,
0,
@@ -142,7 +142,7 @@ internal sealed partial class DeflateManager
12,
13,
13,
- };
+ ];
internal enum BlockState
{
@@ -202,9 +202,10 @@ DeflateFlavor flavor
public static Config Lookup(CompressionLevel level) => Table[(int)level];
- static Config() =>
- Table = new[]
- {
+ static Config()
+ {
+ Table =
+ [
new Config(0, 0, 0, 0, DeflateFlavor.Store),
new Config(4, 4, 8, 4, DeflateFlavor.Fast),
new Config(4, 5, 16, 8, DeflateFlavor.Fast),
@@ -215,7 +216,8 @@ static Config() =>
new Config(8, 32, 128, 256, DeflateFlavor.Slow),
new Config(32, 128, 258, 1024, DeflateFlavor.Slow),
new Config(32, 258, 258, 4096, DeflateFlavor.Slow),
- };
+ ];
+ }
private static readonly Config[] Table;
}
@@ -223,7 +225,7 @@ static Config() =>
private CompressFunc DeflateFunction;
private static readonly string[] _ErrorMessage =
- {
+ [
"need dictionary",
"stream end",
"",
@@ -234,7 +236,7 @@ static Config() =>
"buffer error",
"incompatible version",
"",
- };
+ ];
// preset dictionary flag in zlib header
private const int PRESET_DICT = 0x20;
diff --git a/src/SharpCompress/Compressors/Deflate/DeflateStream.cs b/src/SharpCompress/Compressors/Deflate/DeflateStream.cs
index 8f268f004..d8a4b7113 100644
--- a/src/SharpCompress/Compressors/Deflate/DeflateStream.cs
+++ b/src/SharpCompress/Compressors/Deflate/DeflateStream.cs
@@ -27,7 +27,6 @@
using System;
using System.IO;
using System.Text;
-using System.Threading;
using SharpCompress.IO;
namespace SharpCompress.Compressors.Deflate;
diff --git a/src/SharpCompress/Compressors/Deflate/InfTree.cs b/src/SharpCompress/Compressors/Deflate/InfTree.cs
index 2ea4cbc48..63890ffe0 100644
--- a/src/SharpCompress/Compressors/Deflate/InfTree.cs
+++ b/src/SharpCompress/Compressors/Deflate/InfTree.cs
@@ -79,7 +79,7 @@ internal sealed class InfTree
//UPGRADE_NOTE: Final was removed from the declaration of 'fixed_tl'. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
internal static readonly int[] fixed_tl =
- {
+ [
96,
7,
256,
@@ -1616,11 +1616,11 @@ internal sealed class InfTree
0,
9,
255,
- };
+ ];
//UPGRADE_NOTE: Final was removed from the declaration of 'fixed_td'. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
internal static readonly int[] fixed_td =
- {
+ [
80,
5,
1,
@@ -1717,12 +1717,12 @@ internal sealed class InfTree
192,
5,
24577,
- };
+ ];
// Tables for deflate from PKZIP's appnote.txt.
//UPGRADE_NOTE: Final was removed from the declaration of 'cplens'. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
internal static readonly int[] cplens =
- {
+ [
3,
4,
5,
@@ -1754,12 +1754,12 @@ internal sealed class InfTree
258,
0,
0,
- };
+ ];
// see note #13 above about 258
//UPGRADE_NOTE: Final was removed from the declaration of 'cplext'. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
internal static readonly int[] cplext =
- {
+ [
0,
0,
0,
@@ -1791,11 +1791,11 @@ internal sealed class InfTree
0,
112,
112,
- };
+ ];
//UPGRADE_NOTE: Final was removed from the declaration of 'cpdist'. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
internal static readonly int[] cpdist =
- {
+ [
1,
2,
3,
@@ -1826,11 +1826,11 @@ internal sealed class InfTree
12289,
16385,
24577,
- };
+ ];
//UPGRADE_NOTE: Final was removed from the declaration of 'cpdext'. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
internal static readonly int[] cpdext =
- {
+ [
0,
0,
0,
@@ -1861,7 +1861,7 @@ internal sealed class InfTree
12,
13,
13,
- };
+ ];
// If BMAX needs to be larger than 16, then h and x[] should be uLong.
internal int[] c; // bit length count table
diff --git a/src/SharpCompress/Compressors/Deflate/Inflate.cs b/src/SharpCompress/Compressors/Deflate/Inflate.cs
index fd1c764ad..999972a45 100644
--- a/src/SharpCompress/Compressors/Deflate/Inflate.cs
+++ b/src/SharpCompress/Compressors/Deflate/Inflate.cs
@@ -74,7 +74,7 @@ internal sealed class InflateBlocks
// Table for deflate from PKZIP's appnote.txt.
internal static readonly int[] border =
- {
+ [
16,
17,
18,
@@ -94,7 +94,7 @@ internal sealed class InflateBlocks
14,
1,
15,
- };
+ ];
internal ZlibCodec _codec; // pointer back to this zlib stream
internal int[] bb = new int[1]; // bit length tree depth
@@ -825,7 +825,7 @@ internal static class InternalInflateConstants
{
// And'ing with mask[n] masks the lower n bits
internal static readonly int[] InflateMask =
- {
+ [
0x00000000,
0x00000001,
0x00000003,
@@ -843,7 +843,7 @@ internal static class InternalInflateConstants
0x00003fff,
0x00007fff,
0x0000ffff,
- };
+ ];
}
internal sealed class InflateCodes
@@ -1628,7 +1628,7 @@ internal sealed class InflateManager
private const int PRESET_DICT = 0x20;
private const int Z_DEFLATED = 8;
- private static readonly byte[] mark = { 0, 0, 0xff, 0xff };
+ private static readonly byte[] mark = [0, 0, 0xff, 0xff];
internal ZlibCodec _codec; // pointer back to this zlib stream
internal InflateBlocks blocks; // current inflate_blocks state
@@ -1651,8 +1651,10 @@ internal sealed class InflateManager
public InflateManager() { }
- public InflateManager(bool expectRfc1950HeaderBytes) =>
+ public InflateManager(bool expectRfc1950HeaderBytes)
+ {
HandleRfc1950HeaderBytes = expectRfc1950HeaderBytes;
+ }
internal bool HandleRfc1950HeaderBytes { get; set; } = true;
diff --git a/src/SharpCompress/Compressors/Deflate/Tree.cs b/src/SharpCompress/Compressors/Deflate/Tree.cs
index e5c9b52af..c600473e8 100644
--- a/src/SharpCompress/Compressors/Deflate/Tree.cs
+++ b/src/SharpCompress/Compressors/Deflate/Tree.cs
@@ -76,7 +76,7 @@ private sealed class Tree
private static readonly int HEAP_SIZE = ((2 * InternalConstants.L_CODES) + 1);
internal static readonly sbyte[] bl_order =
- {
+ [
16,
17,
18,
@@ -96,7 +96,7 @@ private sealed class Tree
14,
1,
15,
- };
+ ];
// The lengths of the bit length codes are sent in order of decreasing
// probability, to avoid transmitting the lengths for unused bit
@@ -106,7 +106,7 @@ private sealed class Tree
//internal const int DIST_CODE_LEN = 512;
private static readonly sbyte[] _dist_code =
- {
+ [
0,
1,
2,
@@ -619,10 +619,10 @@ private sealed class Tree
29,
29,
29,
- };
+ ];
internal static readonly sbyte[] LengthCode =
- {
+ [
0,
1,
2,
@@ -879,10 +879,10 @@ private sealed class Tree
27,
27,
28,
- };
+ ];
internal static readonly int[] LengthBase =
- {
+ [
0,
1,
2,
@@ -912,10 +912,10 @@ private sealed class Tree
192,
224,
0,
- };
+ ];
internal static readonly int[] DistanceBase =
- {
+ [
0,
1,
2,
@@ -946,7 +946,7 @@ private sealed class Tree
12288,
16384,
24576,
- };
+ ];
internal short[] dyn_tree; // the dynamic tree
internal int max_code; // largest code with non zero frequency
diff --git a/src/SharpCompress/Compressors/Deflate/Zlib.cs b/src/SharpCompress/Compressors/Deflate/Zlib.cs
index 9ba751d88..d5d0d51dd 100644
--- a/src/SharpCompress/Compressors/Deflate/Zlib.cs
+++ b/src/SharpCompress/Compressors/Deflate/Zlib.cs
@@ -283,7 +283,7 @@ internal static class InternalConstants
internal sealed class StaticTree
{
internal static readonly short[] lengthAndLiteralsTreeCodes =
- {
+ [
12,
8,
140,
@@ -860,10 +860,10 @@ internal sealed class StaticTree
8,
227,
8,
- };
+ ];
internal static readonly short[] distTreeCodes =
- {
+ [
0,
5,
16,
@@ -924,11 +924,11 @@ internal sealed class StaticTree
5,
23,
5,
- };
+ ];
// extra bits for each bit length code
internal static readonly int[] extra_blbits =
- {
+ [
0,
0,
0,
@@ -948,7 +948,7 @@ internal sealed class StaticTree
2,
3,
7,
- };
+ ];
internal static readonly StaticTree Literals;
internal static readonly StaticTree Distances;
diff --git a/src/SharpCompress/Compressors/Deflate/ZlibBaseStream.cs b/src/SharpCompress/Compressors/Deflate/ZlibBaseStream.cs
index 155a3556a..8ec2d38be 100644
--- a/src/SharpCompress/Compressors/Deflate/ZlibBaseStream.cs
+++ b/src/SharpCompress/Compressors/Deflate/ZlibBaseStream.cs
@@ -386,7 +386,7 @@ public override void Flush()
{
_stream.Flush();
//rewind the buffer
- ((IStreamStack)this).Rewind(z.AvailableBytesIn); //unused
+ this.Rewind(z.AvailableBytesIn); //unused
z.AvailableBytesIn = 0;
}
@@ -671,7 +671,7 @@ public override Int32 Read(Byte[] buffer, Int32 offset, Int32 count)
if (rc == ZlibConstants.Z_STREAM_END && z.AvailableBytesIn != 0 && !_wantCompress)
{
//rewind the buffer
- ((IStreamStack)this).Rewind(z.AvailableBytesIn); //unused
+ this.Rewind(z.AvailableBytesIn); //unused
z.AvailableBytesIn = 0;
}
diff --git a/src/SharpCompress/Compressors/Deflate64/DeflateInput.cs b/src/SharpCompress/Compressors/Deflate64/DeflateInput.cs
index 304a2a78e..f1b9b9add 100644
--- a/src/SharpCompress/Compressors/Deflate64/DeflateInput.cs
+++ b/src/SharpCompress/Compressors/Deflate64/DeflateInput.cs
@@ -8,7 +8,10 @@ namespace SharpCompress.Compressors.Deflate64;
internal sealed class DeflateInput
{
- public DeflateInput(byte[] buffer) => Buffer = buffer;
+ public DeflateInput(byte[] buffer)
+ {
+ Buffer = buffer;
+ }
public byte[] Buffer { get; }
public int Count { get; set; }
diff --git a/src/SharpCompress/Compressors/Deflate64/FastEncoderStatus.cs b/src/SharpCompress/Compressors/Deflate64/FastEncoderStatus.cs
index bc8214a31..f4c7160fe 100644
--- a/src/SharpCompress/Compressors/Deflate64/FastEncoderStatus.cs
+++ b/src/SharpCompress/Compressors/Deflate64/FastEncoderStatus.cs
@@ -12,8 +12,7 @@ internal static class FastEncoderStatics
// static information for encoding, DO NOT MODIFY
internal static ReadOnlySpan FAST_ENCODER_TREE_STRUCTURE_DATA =>
- new byte[]
- {
+ [
0xec,
0xbd,
0x07,
@@ -112,11 +111,10 @@ internal static class FastEncoderStatics
0x7c,
0x1f,
0x3f,
- };
+ ];
internal static ReadOnlySpan B_FINAL_FAST_ENCODER_TREE_STRUCTURE_DATA =>
- new byte[]
- {
+ [
0xed,
0xbd,
0x07,
@@ -215,7 +213,7 @@ internal static class FastEncoderStatics
0x7c,
0x1f,
0x3f,
- };
+ ];
// Output a currentMatch with length matchLen (>= MIN_MATCH) and displacement matchPos
//
@@ -248,7 +246,7 @@ internal static class FastEncoderStatics
// and the rest is the code bits.
internal static readonly uint[] FAST_ENCODER_LITERAL_CODE_INFO =
- {
+ [
0x0000d7ee,
0x0004d7ee,
0x0002d7ee,
@@ -762,10 +760,10 @@ internal static class FastEncoderStatics
0x003be7f1,
0x003de7f1,
0x000047eb,
- };
+ ];
internal static readonly uint[] FAST_ENCODER_DISTANCE_CODE_INFO =
- {
+ [
0x00000f06,
0x0001ff0a,
0x0003ff0b,
@@ -798,10 +796,10 @@ internal static class FastEncoderStatics
0x000017d5,
0x00000000,
0x00000100,
- };
+ ];
internal static readonly uint[] BIT_MASK =
- {
+ [
0,
1,
3,
@@ -818,9 +816,9 @@ internal static class FastEncoderStatics
8191,
16383,
32767,
- };
+ ];
internal static readonly byte[] EXTRA_LENGTH_BITS =
- {
+ [
0,
0,
0,
@@ -850,9 +848,9 @@ internal static class FastEncoderStatics
5,
5,
0,
- };
+ ];
internal static readonly byte[] EXTRA_DISTANCE_BITS =
- {
+ [
0,
0,
0,
@@ -885,7 +883,7 @@ internal static class FastEncoderStatics
13,
0,
0,
- };
+ ];
internal const int NUM_CHARS = 256;
internal const int NUM_LENGTH_BASE_CODES = 29;
internal const int NUM_DIST_BASE_CODES = 30;
diff --git a/src/SharpCompress/Compressors/Deflate64/InflaterManaged.cs b/src/SharpCompress/Compressors/Deflate64/InflaterManaged.cs
index 5caf257d0..208dfc94f 100644
--- a/src/SharpCompress/Compressors/Deflate64/InflaterManaged.cs
+++ b/src/SharpCompress/Compressors/Deflate64/InflaterManaged.cs
@@ -40,43 +40,12 @@ internal sealed class InflaterManaged
// Extra bits for length code 257 - 285.
private static ReadOnlySpan S_EXTRA_LENGTH_BITS =>
- new byte[]
- {
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 1,
- 1,
- 1,
- 1,
- 2,
- 2,
- 2,
- 2,
- 3,
- 3,
- 3,
- 3,
- 4,
- 4,
- 4,
- 4,
- 5,
- 5,
- 5,
- 5,
- 16,
- };
+ [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 16];
// The base length for length code 257 - 285.
// The formula to get the real length for a length code is lengthBase[code - 257] + (value stored in extraBits)
private static readonly int[] S_LENGTH_BASE =
- {
+ [
3,
4,
5,
@@ -106,12 +75,12 @@ internal sealed class InflaterManaged
195,
227,
3,
- };
+ ];
// The base distance for distance code 0 - 31
// The real distance for a distance code is distanceBasePosition[code] + (value stored in extraBits)
private static readonly int[] S_DISTANCE_BASE_POSITION =
- {
+ [
1,
2,
3,
@@ -144,15 +113,14 @@ internal sealed class InflaterManaged
24577,
32769,
49153,
- };
+ ];
// code lengths for code length alphabet is stored in following order
private static ReadOnlySpan S_CODE_ORDER =>
- new byte[] { 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 };
+ [16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15];
private static ReadOnlySpan S_STATIC_DISTANCE_TREE_TABLE =>
- new byte[]
- {
+ [
0x00,
0x10,
0x08,
@@ -185,7 +153,7 @@ internal sealed class InflaterManaged
0x17,
0x0f,
0x1f,
- };
+ ];
private readonly OutputWindow _output;
private readonly InputBuffer _input;
diff --git a/src/SharpCompress/Compressors/Explode/ExplodeStream.cs b/src/SharpCompress/Compressors/Explode/ExplodeStream.cs
index 4b34b5055..c05f060db 100644
--- a/src/SharpCompress/Compressors/Explode/ExplodeStream.cs
+++ b/src/SharpCompress/Compressors/Explode/ExplodeStream.cs
@@ -70,7 +70,7 @@ HeaderFlags generalPurposeBitFlag
this.DebugConstruct(typeof(ExplodeStream));
#endif
this.compressedSize = (int)compressedSize;
- unCompressedSize = (long)uncompressedSize;
+ unCompressedSize = uncompressedSize;
this.generalPurposeBitFlag = generalPurposeBitFlag;
explode_SetTables();
@@ -86,25 +86,15 @@ protected override void Dispose(bool disposing)
base.Dispose(disposing);
}
- public override void Flush()
- {
- throw new NotImplementedException();
- }
+ public override void Flush() => throw new NotImplementedException();
- public override long Seek(long offset, SeekOrigin origin)
- {
+ public override long Seek(long offset, SeekOrigin origin) =>
throw new NotImplementedException();
- }
- public override void SetLength(long value)
- {
- throw new NotImplementedException();
- }
+ public override void SetLength(long value) => throw new NotImplementedException();
- public override void Write(byte[] buffer, int offset, int count)
- {
+ public override void Write(byte[] buffer, int offset, int count) =>
throw new NotImplementedException();
- }
public override bool CanRead => true;
public override bool CanSeek => false;
@@ -116,8 +106,8 @@ public override long Position
set { }
}
- static uint[] mask_bits = new uint[]
- {
+ static uint[] mask_bits =
+ [
0x0000,
0x0001,
0x0003,
@@ -135,11 +125,11 @@ public override long Position
0x3fff,
0x7fff,
0xffff,
- };
+ ];
/* Tables for length and distance */
- static int[] cplen2 = new int[]
- {
+ static int[] cplen2 =
+ [
2,
3,
4,
@@ -204,10 +194,10 @@ public override long Position
63,
64,
65,
- };
+ ];
- static int[] cplen3 = new int[]
- {
+ static int[] cplen3 =
+ [
3,
4,
5,
@@ -272,10 +262,10 @@ public override long Position
64,
65,
66,
- };
+ ];
- static int[] extra = new int[]
- {
+ static int[] extra =
+ [
0,
0,
0,
@@ -340,10 +330,10 @@ public override long Position
0,
0,
8,
- };
+ ];
- static int[] cpdist4 = new int[]
- {
+ static int[] cpdist4 =
+ [
1,
65,
129,
@@ -408,10 +398,10 @@ public override long Position
3905,
3969,
4033,
- };
+ ];
- static int[] cpdist8 = new int[]
- {
+ static int[] cpdist8 =
+ [
1,
129,
257,
@@ -476,7 +466,7 @@ public override long Position
7809,
7937,
8065,
- };
+ ];
private int get_tree(int[] arrBitLengths, int numberExpected)
/* Get the bit lengths for a code representation from the compressed
@@ -492,7 +482,10 @@ Otherwise zero is returned. */
int bitLengthOfCodes = (nextByte & 0xf) + 1; /* bits in code (1..16) */
int numOfCodes = ((nextByte & 0xf0) >> 4) + 1; /* codes with those bits (1..16) */
if (outIndex + numOfCodes > numberExpected)
+ {
return 4; /* don't overflow arrBitLengths[] */
+ }
+
do
{
arrBitLengths[outIndex++] = bitLengthOfCodes;
@@ -516,7 +509,9 @@ private int explode_SetTables()
{
bitsForLiteralCodeTable = 9; /* base table size for literals */
if ((returnCode = get_tree(arrBitLengthsForCodes, 256)) != 0)
+ {
return returnCode;
+ }
if (
(
@@ -531,10 +526,14 @@ ref bitsForLiteralCodeTable
)
) != 0
)
+ {
return returnCode;
+ }
if ((returnCode = get_tree(arrBitLengthsForCodes, 64)) != 0)
+ {
return returnCode;
+ }
if (
(
@@ -549,13 +548,17 @@ ref bitsForLengthCodeTable
)
) != 0
)
+ {
return returnCode;
+ }
}
else
/* No literal tree--minimum match length is 2 */
{
if ((returnCode = get_tree(arrBitLengthsForCodes, 64)) != 0)
+ {
return returnCode;
+ }
hufLiteralCodeTable = null;
@@ -572,11 +575,15 @@ ref bitsForLengthCodeTable
)
) != 0
)
+ {
return returnCode;
+ }
}
if ((returnCode = get_tree(arrBitLengthsForCodes, 64)) != 0)
- return (int)returnCode;
+ {
+ return returnCode;
+ }
if ((generalPurposeBitFlag & HeaderFlags.Bit1) != 0) /* true if 8K */
{
@@ -635,9 +642,14 @@ int DecodeHuft(huftNode[] htab, int bits, uint mask, out huftNode huftPointer, o
DumpBits(huftPointer.NumberOfBitsUsed);
e = huftPointer.NumberOfExtraBits;
if (e <= 32)
+ {
break;
+ }
+
if (e == INVALID_CODE)
+ {
return 1;
+ }
e &= 31;
NeedBits(e);
@@ -690,7 +702,9 @@ public override int Read(byte[] buffer, int offset, int count)
out _
) != 0
)
+ {
throw new Exception("Error decoding literal value");
+ }
nextByte = (byte)huftPointer.Value;
}
@@ -706,7 +720,9 @@ out _
outBytesCount++;
if (windowIndex == WSIZE)
+ {
windowIndex = 0;
+ }
continue;
}
@@ -725,7 +741,9 @@ out _
out _
) != 0
)
+ {
throw new Exception("Error decoding distance high bits");
+ }
distance = windowIndex - (distance + huftPointer.Value); /* construct offset */
@@ -739,7 +757,9 @@ out _
out int extraBitLength
) != 0
)
+ {
throw new Exception("Error decoding coded length");
+ }
length = huftPointer.Value;
@@ -751,7 +771,9 @@ out int extraBitLength
}
if (length > (unCompressedSize - outBytesCount))
+ {
length = (int)(unCompressedSize - outBytesCount);
+ }
distance &= WSIZE - 1;
}
@@ -764,10 +786,14 @@ out int extraBitLength
outBytesCount++;
if (distance == WSIZE)
+ {
distance = 0;
+ }
if (windowIndex == WSIZE)
+ {
windowIndex = 0;
+ }
length--;
}
diff --git a/src/SharpCompress/Compressors/Explode/HuftTree.cs b/src/SharpCompress/Compressors/Explode/HuftTree.cs
index d216cfc79..050947575 100644
--- a/src/SharpCompress/Compressors/Explode/HuftTree.cs
+++ b/src/SharpCompress/Compressors/Explode/HuftTree.cs
@@ -46,7 +46,9 @@ so that no bits beyond that code are fetched when that code is
int[] arrBitLengthCount = new int[BMAX + 1];
for (int i = 0; i < BMAX + 1; i++)
+ {
arrBitLengthCount[i] = 0;
+ }
int pIndex = 0;
int counterCurrentCode = numberOfCodes;
@@ -64,20 +66,32 @@ so that no bits beyond that code are fetched when that code is
/* Find minimum and maximum length, bound *outBitsForTable by those */
int counter;
for (counter = 1; counter <= BMAX; counter++)
+ {
if (arrBitLengthCount[counter] != 0)
+ {
break;
+ }
+ }
int numberOfBitsInCurrentCode = counter; /* minimum code length */
if (outBitsForTable < counter)
+ {
outBitsForTable = counter;
+ }
for (counterCurrentCode = BMAX; counterCurrentCode != 0; counterCurrentCode--)
+ {
if (arrBitLengthCount[counterCurrentCode] != 0)
+ {
break;
+ }
+ }
int maximumCodeLength = counterCurrentCode; /* maximum code length */
if (outBitsForTable > counterCurrentCode)
+ {
outBitsForTable = counterCurrentCode;
+ }
/* Adjust last length count to fill out codes, if needed */
int numberOfDummyCodesAdded;
@@ -86,11 +100,17 @@ so that no bits beyond that code are fetched when that code is
counter < counterCurrentCode;
counter++, numberOfDummyCodesAdded <<= 1
)
+ {
if ((numberOfDummyCodesAdded -= arrBitLengthCount[counter]) < 0)
+ {
return 2; /* bad input: more codes than bits */
+ }
+ }
if ((numberOfDummyCodesAdded -= arrBitLengthCount[counterCurrentCode]) < 0)
+ {
return 2;
+ }
arrBitLengthCount[counterCurrentCode] += numberOfDummyCodesAdded;
@@ -108,14 +128,18 @@ so that no bits beyond that code are fetched when that code is
/* Make a table of values in order of bit lengths */
int[] arrValuesInOrderOfBitLength = new int[N_MAX];
for (int i = 0; i < N_MAX; i++)
+ {
arrValuesInOrderOfBitLength[i] = 0;
+ }
pIndex = 0;
counterCurrentCode = 0;
do
{
if ((counter = arrBitLengthForCodes[pIndex++]) != 0)
+ {
arrValuesInOrderOfBitLength[bitOffset[counter]++] = counterCurrentCode;
+ }
} while (++counterCurrentCode < numberOfCodes);
numberOfCodes = bitOffset[maximumCodeLength]; /* set numberOfCodes to length of v */
@@ -165,7 +189,10 @@ so that no bits beyond that code are fetched when that code is
while (++counter < numberOfEntriesInCurrentTable) /* try smaller tables up to z bits */
{
if ((fBitCounter1 <<= 1) <= arrBitLengthCount[++xIndex])
+ {
break; /* enough codes to use up j bits */
+ }
+
fBitCounter1 -= arrBitLengthCount[xIndex]; /* else deduct codes from patterns */
}
}
@@ -173,7 +200,9 @@ so that no bits beyond that code are fetched when that code is
bitsBeforeThisTable + counter > lengthOfEOBcode
&& bitsBeforeThisTable < lengthOfEOBcode
)
+ {
counter = lengthOfEOBcode - bitsBeforeThisTable; /* make EOB code end at table */
+ }
numberOfEntriesInCurrentTable = 1 << counter; /* table entries for j-bit table */
arrLX[stackOfBitsPerTable + tableLevel] = counter; /* set table size in stack */
@@ -216,7 +245,9 @@ so that no bits beyond that code are fetched when that code is
};
if (pIndex >= numberOfCodes)
+ {
vHuft1.NumberOfExtraBits = INVALID_CODE; /* out of values--invalid code */
+ }
else if (arrValuesInOrderOfBitLength[pIndex] < numberOfSimpleValueCodes)
{
vHuft1.NumberOfExtraBits = (
@@ -241,7 +272,9 @@ so that no bits beyond that code are fetched when that code is
counter < numberOfEntriesInCurrentTable;
counter += fBitCounter2
)
+ {
pointerToCurrentTable[counter] = vHuft1;
+ }
/* backwards increment the k-bit code i */
for (
@@ -249,14 +282,19 @@ so that no bits beyond that code are fetched when that code is
(counterCurrentCode & counter) != 0;
counter >>= 1
)
+ {
counterCurrentCode ^= counter;
+ }
+
counterCurrentCode ^= counter;
/* backup over finished tables */
while (
(counterCurrentCode & ((1 << bitsBeforeThisTable) - 1)) != bitOffset[tableLevel]
)
+ {
bitsBeforeThisTable -= arrLX[stackOfBitsPerTable + (--tableLevel)];
+ }
}
}
diff --git a/src/SharpCompress/Compressors/Filters/BCJFilter.cs b/src/SharpCompress/Compressors/Filters/BCJFilter.cs
index e7f00c98d..2928a570a 100644
--- a/src/SharpCompress/Compressors/Filters/BCJFilter.cs
+++ b/src/SharpCompress/Compressors/Filters/BCJFilter.cs
@@ -5,7 +5,7 @@ namespace SharpCompress.Compressors.Filters;
internal class BCJFilter : Filter
{
private static readonly bool[] MASK_TO_ALLOWED_STATUS =
- {
+ [
true,
true,
true,
@@ -14,15 +14,18 @@ internal class BCJFilter : Filter
false,
false,
false,
- };
+ ];
- private static readonly int[] MASK_TO_BIT_NUMBER = { 0, 1, 2, 2, 3, 3, 3, 3 };
+ private static readonly int[] MASK_TO_BIT_NUMBER = [0, 1, 2, 2, 3, 3, 3, 3];
private int _pos;
private int _prevMask;
public BCJFilter(bool isEncoder, Stream baseStream)
- : base(isEncoder, baseStream, 5) => _pos = 5;
+ : base(isEncoder, baseStream, 5)
+ {
+ _pos = 5;
+ }
private static bool Test86MsByte(byte b) => b == 0x00 || b == 0xFF;
diff --git a/src/SharpCompress/Compressors/Filters/BCJFilterARM.cs b/src/SharpCompress/Compressors/Filters/BCJFilterARM.cs
index 5d861621d..e25ab0efb 100644
--- a/src/SharpCompress/Compressors/Filters/BCJFilterARM.cs
+++ b/src/SharpCompress/Compressors/Filters/BCJFilterARM.cs
@@ -7,7 +7,10 @@ internal class BCJFilterARM : Filter
private int _pos;
public BCJFilterARM(bool isEncoder, Stream baseStream)
- : base(isEncoder, baseStream, 8) => _pos = 8;
+ : base(isEncoder, baseStream, 8)
+ {
+ _pos = 8;
+ }
protected override int Transform(byte[] buffer, int offset, int count)
{
diff --git a/src/SharpCompress/Compressors/Filters/BCJFilterARM64.cs b/src/SharpCompress/Compressors/Filters/BCJFilterARM64.cs
index 24f5ab177..55d969fa9 100644
--- a/src/SharpCompress/Compressors/Filters/BCJFilterARM64.cs
+++ b/src/SharpCompress/Compressors/Filters/BCJFilterARM64.cs
@@ -9,7 +9,10 @@ internal class BCJFilterARM64 : Filter
private int _pos;
public BCJFilterARM64(bool isEncoder, Stream baseStream)
- : base(isEncoder, baseStream, 8) => _pos = 0;
+ : base(isEncoder, baseStream, 8)
+ {
+ _pos = 0;
+ }
protected override int Transform(byte[] buffer, int offset, int count)
{
@@ -30,7 +33,9 @@ protected override int Transform(byte[] buffer, int offset, int count)
pc >>= 2;
if (!_isEncoder)
+ {
pc = 0U - pc;
+ }
instr |= (src + pc) & 0x03FFFFFF;
BinaryPrimitives.WriteUInt32LittleEndian(new Span(buffer, i, 4), instr);
@@ -40,13 +45,17 @@ protected override int Transform(byte[] buffer, int offset, int count)
uint src = ((instr >> 29) & 3) | ((instr >> 3) & 0x001FFFFC);
if (((src + 0x00020000) & 0x001C0000) != 0)
+ {
continue;
+ }
instr &= 0x9000001F;
pc >>= 12;
if (!_isEncoder)
+ {
pc = 0U - pc;
+ }
uint dest = src + pc;
instr |= (dest & 3) << 29;
diff --git a/src/SharpCompress/Compressors/Filters/BCJFilterARMT.cs b/src/SharpCompress/Compressors/Filters/BCJFilterARMT.cs
index db5b36f2c..486a528c2 100644
--- a/src/SharpCompress/Compressors/Filters/BCJFilterARMT.cs
+++ b/src/SharpCompress/Compressors/Filters/BCJFilterARMT.cs
@@ -7,7 +7,10 @@ internal class BCJFilterARMT : Filter
private int _pos;
public BCJFilterARMT(bool isEncoder, Stream baseStream)
- : base(isEncoder, baseStream, 4) => _pos = 4;
+ : base(isEncoder, baseStream, 4)
+ {
+ _pos = 4;
+ }
protected override int Transform(byte[] buffer, int offset, int count)
{
@@ -27,9 +30,13 @@ protected override int Transform(byte[] buffer, int offset, int count)
int dest;
if (_isEncoder)
+ {
dest = src + (_pos + i - offset);
+ }
else
+ {
dest = src - (_pos + i - offset);
+ }
dest >>>= 1;
buffer[i + 1] = (byte)(0xF0 | ((dest >>> 19) & 0x07));
diff --git a/src/SharpCompress/Compressors/Filters/BCJFilterIA64.cs b/src/SharpCompress/Compressors/Filters/BCJFilterIA64.cs
index 44323ae73..83d0f8fe3 100644
--- a/src/SharpCompress/Compressors/Filters/BCJFilterIA64.cs
+++ b/src/SharpCompress/Compressors/Filters/BCJFilterIA64.cs
@@ -7,7 +7,7 @@ internal class BCJFilterIA64 : Filter
private int _pos;
private static readonly int[] BRANCH_TABLE =
- {
+ [
0,
0,
0,
@@ -40,10 +40,13 @@ internal class BCJFilterIA64 : Filter
4,
0,
0,
- };
+ ];
public BCJFilterIA64(bool isEncoder, Stream baseStream)
- : base(isEncoder, baseStream, 16) => _pos = 0;
+ : base(isEncoder, baseStream, 16)
+ {
+ _pos = 0;
+ }
protected override int Transform(byte[] buffer, int offset, int count)
{
@@ -58,7 +61,9 @@ protected override int Transform(byte[] buffer, int offset, int count)
for (int slot = 0, bitPos = 5; slot < 3; ++slot, bitPos += 41)
{
if (((mask >>> slot) & 1) == 0)
+ {
continue;
+ }
var bytePos = bitPos >>> 3;
var bitRes = bitPos & 7;
@@ -72,7 +77,9 @@ protected override int Transform(byte[] buffer, int offset, int count)
var instrNorm = instr >>> bitRes;
if (((instrNorm >>> 37) & 0x0F) != 0x05 || ((instrNorm >>> 9) & 0x07) != 0x00)
+ {
continue;
+ }
var src = (int)((instrNorm >>> 13) & 0x0FFFFF);
src |= ((int)(instrNorm >>> 36) & 1) << 20;
@@ -80,9 +87,13 @@ protected override int Transform(byte[] buffer, int offset, int count)
int dest;
if (_isEncoder)
+ {
dest = src + (_pos + i - offset);
+ }
else
+ {
dest = src - (_pos + i - offset);
+ }
dest >>>= 4;
diff --git a/src/SharpCompress/Compressors/Filters/BCJFilterPPC.cs b/src/SharpCompress/Compressors/Filters/BCJFilterPPC.cs
index ccfa74807..abe014495 100644
--- a/src/SharpCompress/Compressors/Filters/BCJFilterPPC.cs
+++ b/src/SharpCompress/Compressors/Filters/BCJFilterPPC.cs
@@ -7,7 +7,10 @@ internal class BCJFilterPPC : Filter
private int _pos;
public BCJFilterPPC(bool isEncoder, Stream baseStream)
- : base(isEncoder, baseStream, 4) => _pos = 0;
+ : base(isEncoder, baseStream, 4)
+ {
+ _pos = 0;
+ }
protected override int Transform(byte[] buffer, int offset, int count)
{
diff --git a/src/SharpCompress/Compressors/Filters/BCJFilterRISCV.cs b/src/SharpCompress/Compressors/Filters/BCJFilterRISCV.cs
index 67d9cb528..ce1afb227 100644
--- a/src/SharpCompress/Compressors/Filters/BCJFilterRISCV.cs
+++ b/src/SharpCompress/Compressors/Filters/BCJFilterRISCV.cs
@@ -9,7 +9,10 @@ internal class BCJFilterRISCV : Filter
private int _pos;
public BCJFilterRISCV(bool isEncoder, Stream baseStream)
- : base(isEncoder, baseStream, 8) => _pos = 0;
+ : base(isEncoder, baseStream, 8)
+ {
+ _pos = 0;
+ }
private int Decode(byte[] buffer, int offset, int count)
{
@@ -27,7 +30,9 @@ private int Decode(byte[] buffer, int offset, int count)
{
uint b1 = buffer[i + 1];
if ((b1 & 0x0D) != 0)
+ {
continue;
+ }
uint b2 = buffer[i + 2];
uint b3 = buffer[i + 3];
@@ -73,7 +78,7 @@ private int Decode(byte[] buffer, int offset, int count)
else
{
uint inst2_rs1 = inst >> 27;
- if ((uint)(((inst) - 0x3117) << 18) >= ((inst2_rs1) & 0x1D))
+ if (((inst) - 0x3117) << 18 >= ((inst2_rs1) & 0x1D))
{
i += 4 - 2;
continue;
@@ -116,7 +121,9 @@ private int Encode(byte[] buffer, int offset, int count)
{
uint b1 = buffer[i + 1];
if ((b1 & 0x0D) != 0)
+ {
continue;
+ }
uint b2 = buffer[i + 2];
uint b3 = buffer[i + 3];
@@ -168,7 +175,7 @@ private int Encode(byte[] buffer, int offset, int count)
else
{
uint fake_rs1 = inst >> 27;
- if ((uint)(((inst) - 0x3117) << 18) >= ((fake_rs1) & 0x1D))
+ if (((inst) - 0x3117) << 18 >= ((fake_rs1) & 0x1D))
{
i += 4 - 2;
continue;
diff --git a/src/SharpCompress/Compressors/Filters/BCJFilterSPARC.cs b/src/SharpCompress/Compressors/Filters/BCJFilterSPARC.cs
index db7c75beb..a92d1684c 100644
--- a/src/SharpCompress/Compressors/Filters/BCJFilterSPARC.cs
+++ b/src/SharpCompress/Compressors/Filters/BCJFilterSPARC.cs
@@ -7,7 +7,10 @@ internal class BCJFilterSPARC : Filter
private int _pos;
public BCJFilterSPARC(bool isEncoder, Stream baseStream)
- : base(isEncoder, baseStream, 4) => _pos = 0;
+ : base(isEncoder, baseStream, 4)
+ {
+ _pos = 0;
+ }
protected override int Transform(byte[] buffer, int offset, int count)
{
diff --git a/src/SharpCompress/Compressors/Filters/BranchExecFilter.cs b/src/SharpCompress/Compressors/Filters/BranchExecFilter.cs
index df95c838a..5e101de0d 100644
--- a/src/SharpCompress/Compressors/Filters/BranchExecFilter.cs
+++ b/src/SharpCompress/Compressors/Filters/BranchExecFilter.cs
@@ -13,8 +13,9 @@ namespace SharpCompress.Compressors.Filters;
[CLSCompliant(false)]
public sealed class BranchExecFilter
{
- public enum Alignment : int
+ public enum Alignment
{
+ None = 0,
ARCH_x86_ALIGNMENT = 1,
ARCH_PowerPC_ALIGNMENT = 4,
ARCH_IA64_ALIGNMENT = 16,
@@ -47,13 +48,17 @@ public static void X86Converter(byte[] buf, uint ip, ref uint state)
var size = (uint)buf.Length;
if (size <= 4)
+ {
return;
+ }
size -= 4;
for (i = 0; i < size; ++i)
{
if ((buf[i] & 0xFE) != 0xE8)
+ {
continue;
+ }
prev_pos = i - prev_pos;
if (prev_pos > 3)
@@ -89,12 +94,16 @@ public static void X86Converter(byte[] buf, uint ip, ref uint state)
{
dest = src - (pos + (uint)i + 5);
if (prev_mask == 0)
+ {
break;
+ }
j = mask_to_bit_num[prev_mask] * 8u;
b = (byte)(dest >> (24 - (int)j));
if (!X86TestByte(b))
+ {
break;
+ }
src = dest ^ ((1u << (32 - (int)j)) - 1u);
}
diff --git a/src/SharpCompress/Compressors/Filters/DeltaFilter.cs b/src/SharpCompress/Compressors/Filters/DeltaFilter.cs
index a6954116e..5f7593ce9 100644
--- a/src/SharpCompress/Compressors/Filters/DeltaFilter.cs
+++ b/src/SharpCompress/Compressors/Filters/DeltaFilter.cs
@@ -1,36 +1,35 @@
using System.IO;
-namespace SharpCompress.Compressors.Filters
+namespace SharpCompress.Compressors.Filters;
+
+internal class DeltaFilter : Filter
{
- internal class DeltaFilter : Filter
+ private const int DISTANCE_MIN = 1;
+ private const int DISTANCE_MAX = 256;
+ private const int DISTANCE_MASK = DISTANCE_MAX - 1;
+
+ private int _distance;
+ private byte[] _history;
+ private int _position;
+
+ public DeltaFilter(bool isEncoder, Stream baseStream, byte[] info)
+ : base(isEncoder, baseStream, 1)
{
- private const int DISTANCE_MIN = 1;
- private const int DISTANCE_MAX = 256;
- private const int DISTANCE_MASK = DISTANCE_MAX - 1;
+ _distance = info[0];
+ _history = new byte[DISTANCE_MAX];
+ _position = 0;
+ }
- private int _distance;
- private byte[] _history;
- private int _position;
+ protected override int Transform(byte[] buffer, int offset, int count)
+ {
+ var end = offset + count;
- public DeltaFilter(bool isEncoder, Stream baseStream, byte[] info)
- : base(isEncoder, baseStream, 1)
+ for (var i = offset; i < end; i++)
{
- _distance = info[0];
- _history = new byte[DISTANCE_MAX];
- _position = 0;
+ buffer[i] += _history[(_distance + _position--) & DISTANCE_MASK];
+ _history[_position & DISTANCE_MASK] = buffer[i];
}
- protected override int Transform(byte[] buffer, int offset, int count)
- {
- var end = offset + count;
-
- for (var i = offset; i < end; i++)
- {
- buffer[i] += _history[(_distance + _position--) & DISTANCE_MASK];
- _history[_position & DISTANCE_MASK] = buffer[i];
- }
-
- return count;
- }
+ return count;
}
}
diff --git a/src/SharpCompress/Compressors/LZMA/AesDecoderStream.cs b/src/SharpCompress/Compressors/LZMA/AesDecoderStream.cs
index a7a8583d2..2014b223c 100644
--- a/src/SharpCompress/Compressors/LZMA/AesDecoderStream.cs
+++ b/src/SharpCompress/Compressors/LZMA/AesDecoderStream.cs
@@ -177,8 +177,8 @@ private void Init(byte[] info, out int numCyclesPower, out byte[] salt, out byte
if ((bt & 0xC0) == 0)
{
- salt = Array.Empty();
- iv = Array.Empty();
+ salt = [];
+ iv = [];
return;
}
diff --git a/src/SharpCompress/Compressors/LZMA/Bcj2DecoderStream.cs b/src/SharpCompress/Compressors/LZMA/Bcj2DecoderStream.cs
index de2d284f8..20bcc1a13 100644
--- a/src/SharpCompress/Compressors/LZMA/Bcj2DecoderStream.cs
+++ b/src/SharpCompress/Compressors/LZMA/Bcj2DecoderStream.cs
@@ -69,7 +69,10 @@ private class StatusDecoder
private uint _prob;
- public StatusDecoder() => _prob = K_BIT_MODEL_TOTAL / 2;
+ public StatusDecoder()
+ {
+ _prob = K_BIT_MODEL_TOTAL / 2;
+ }
public uint Decode(RangeDecoder decoder)
{
diff --git a/src/SharpCompress/Compressors/LZMA/ICoder.cs b/src/SharpCompress/Compressors/LZMA/ICoder.cs
index 951349796..59ec105f3 100644
--- a/src/SharpCompress/Compressors/LZMA/ICoder.cs
+++ b/src/SharpCompress/Compressors/LZMA/ICoder.cs
@@ -6,7 +6,7 @@ namespace SharpCompress.Compressors.LZMA;
///
/// The exception that is thrown when an error in input stream occurs during decoding.
///
-internal class DataErrorException : Exception
+public class DataErrorException : Exception
{
public DataErrorException()
: base("Data Error") { }
@@ -15,7 +15,7 @@ public DataErrorException()
///
/// The exception that is thrown when the value of an argument is outside the allowable range.
///
-internal class InvalidParamException : Exception
+public class InvalidParamException : Exception
{
public InvalidParamException()
: base("Invalid Parameter") { }
diff --git a/src/SharpCompress/Compressors/LZMA/LZipStream.cs b/src/SharpCompress/Compressors/LZMA/LZipStream.cs
index 26d34d3a1..df454c10e 100644
--- a/src/SharpCompress/Compressors/LZMA/LZipStream.cs
+++ b/src/SharpCompress/Compressors/LZMA/LZipStream.cs
@@ -97,10 +97,7 @@ public void Finish()
_countingWritableSubStream?.Write(intBuf);
//total with headers
- BinaryPrimitives.WriteUInt64LittleEndian(
- intBuf,
- (ulong)compressedCount + (ulong)(6 + 20)
- );
+ BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)compressedCount + (6 + 20));
_countingWritableSubStream?.Write(intBuf);
}
_finished = true;
diff --git a/src/SharpCompress/Compressors/LZMA/Log.cs b/src/SharpCompress/Compressors/LZMA/Log.cs
index c2b5651e4..b9cccc3f7 100644
--- a/src/SharpCompress/Compressors/LZMA/Log.cs
+++ b/src/SharpCompress/Compressors/LZMA/Log.cs
@@ -1,6 +1,5 @@
using System;
using System.Collections.Generic;
-using System.Diagnostics;
namespace SharpCompress.Compressors.LZMA;
@@ -9,7 +8,10 @@ internal static class Log
private static readonly Stack INDENT = new();
private static bool NEEDS_INDENT = true;
- static Log() => INDENT.Push("");
+ static Log()
+ {
+ INDENT.Push("");
+ }
public static void PushIndent(string indent = " ") => INDENT.Push(INDENT.Peek() + indent);
diff --git a/src/SharpCompress/Compressors/LZMA/LzmaEncoder.cs b/src/SharpCompress/Compressors/LZMA/LzmaEncoder.cs
index e9fa1bab9..6db300a87 100644
--- a/src/SharpCompress/Compressors/LZMA/LzmaEncoder.cs
+++ b/src/SharpCompress/Compressors/LZMA/LzmaEncoder.cs
@@ -1709,7 +1709,7 @@ private void FillAlignPrices()
_alignPriceCount = 0;
}
- private static readonly string[] K_MATCH_FINDER_I_DS = { "BT2", "BT4" };
+ private static readonly string[] K_MATCH_FINDER_I_DS = ["BT2", "BT4"];
private static int FindMatchFinder(string s)
{
diff --git a/src/SharpCompress/Compressors/LZMA/LzmaEncoderProperties.cs b/src/SharpCompress/Compressors/LZMA/LzmaEncoderProperties.cs
index 198ba471c..160274b8a 100644
--- a/src/SharpCompress/Compressors/LZMA/LzmaEncoderProperties.cs
+++ b/src/SharpCompress/Compressors/LZMA/LzmaEncoderProperties.cs
@@ -29,8 +29,8 @@ public LzmaEncoderProperties(bool eos, int dictionary, int numFastBytes)
var algorithm = 2;
var mf = "bt4";
- _propIDs = new[]
- {
+ _propIDs =
+ [
CoderPropId.DictionarySize,
CoderPropId.PosStateBits,
CoderPropId.LitContextBits,
@@ -39,9 +39,9 @@ public LzmaEncoderProperties(bool eos, int dictionary, int numFastBytes)
CoderPropId.NumFastBytes,
CoderPropId.MatchFinder,
CoderPropId.EndMarker,
- };
- _properties = new object[]
- {
+ ];
+ _properties =
+ [
dictionary,
posStateBits,
litContextBits,
@@ -50,6 +50,6 @@ public LzmaEncoderProperties(bool eos, int dictionary, int numFastBytes)
numFastBytes,
mf,
eos,
- };
+ ];
}
}
diff --git a/src/SharpCompress/Compressors/Lzw/LzwConstants.cs b/src/SharpCompress/Compressors/Lzw/LzwConstants.cs
index 0325adbbd..d36210e8a 100644
--- a/src/SharpCompress/Compressors/Lzw/LzwConstants.cs
+++ b/src/SharpCompress/Compressors/Lzw/LzwConstants.cs
@@ -1,65 +1,64 @@
-namespace SharpCompress.Compressors.Lzw
+namespace SharpCompress.Compressors.Lzw;
+
+///
+/// This class contains constants used for LZW
+///
+[System.Diagnostics.CodeAnalysis.SuppressMessage(
+ "Naming",
+ "CA1707:Identifiers should not contain underscores",
+ Justification = "kept for backwards compatibility"
+)]
+public sealed class LzwConstants
{
///
- /// This class contains constants used for LZW
+ /// Magic number found at start of LZW header: 0x1f 0x9d
///
- [System.Diagnostics.CodeAnalysis.SuppressMessage(
- "Naming",
- "CA1707:Identifiers should not contain underscores",
- Justification = "kept for backwards compatibility"
- )]
- public sealed class LzwConstants
- {
- ///
- /// Magic number found at start of LZW header: 0x1f 0x9d
- ///
- public const int MAGIC = 0x1f9d;
+ public const int MAGIC = 0x1f9d;
- ///
- /// Maximum number of bits per code
- ///
- public const int MAX_BITS = 16;
+ ///
+ /// Maximum number of bits per code
+ ///
+ public const int MAX_BITS = 16;
- /* 3rd header byte:
- * bit 0..4 Number of compression bits
- * bit 5 Extended header
- * bit 6 Free
- * bit 7 Block mode
- */
+ /* 3rd header byte:
+ * bit 0..4 Number of compression bits
+ * bit 5 Extended header
+ * bit 6 Free
+ * bit 7 Block mode
+ */
- ///
- /// Mask for 'number of compression bits'
- ///
- public const int BIT_MASK = 0x1f;
+ ///
+ /// Mask for 'number of compression bits'
+ ///
+ public const int BIT_MASK = 0x1f;
- ///
- /// Indicates the presence of a fourth header byte
- ///
- public const int EXTENDED_MASK = 0x20;
+ ///
+ /// Indicates the presence of a fourth header byte
+ ///
+ public const int EXTENDED_MASK = 0x20;
- //public const int FREE_MASK = 0x40;
+ //public const int FREE_MASK = 0x40;
- ///
- /// Reserved bits
- ///
- public const int RESERVED_MASK = 0x60;
+ ///
+ /// Reserved bits
+ ///
+ public const int RESERVED_MASK = 0x60;
- ///
- /// Block compression: if table is full and compression rate is dropping,
- /// clear the dictionary.
- ///
- public const int BLOCK_MODE_MASK = 0x80;
+ ///
+ /// Block compression: if table is full and compression rate is dropping,
+ /// clear the dictionary.
+ ///
+ public const int BLOCK_MODE_MASK = 0x80;
- ///
- /// LZW file header size (in bytes)
- ///
- public const int HDR_SIZE = 3;
+ ///
+ /// LZW file header size (in bytes)
+ ///
+ public const int HDR_SIZE = 3;
- ///
- /// Initial number of bits per code
- ///
- public const int INIT_BITS = 9;
+ ///
+ /// Initial number of bits per code
+ ///
+ public const int INIT_BITS = 9;
- private LzwConstants() { }
- }
+ private LzwConstants() { }
}
diff --git a/src/SharpCompress/Compressors/Lzw/LzwStream.cs b/src/SharpCompress/Compressors/Lzw/LzwStream.cs
index 71132dac0..fb9a0ec7b 100644
--- a/src/SharpCompress/Compressors/Lzw/LzwStream.cs
+++ b/src/SharpCompress/Compressors/Lzw/LzwStream.cs
@@ -3,622 +3,611 @@
using SharpCompress.Common;
using SharpCompress.IO;
-namespace SharpCompress.Compressors.Lzw
+namespace SharpCompress.Compressors.Lzw;
+
+///
+/// This filter stream is used to decompress a LZW format stream.
+/// Specifically, a stream that uses the LZC compression method.
+/// This file format is usually associated with the .Z file extension.
+///
+/// See http://en.wikipedia.org/wiki/Compress
+/// See http://wiki.wxwidgets.org/Development:_Z_File_Format
+///
+/// The file header consists of 3 (or optionally 4) bytes. The first two bytes
+/// contain the magic marker "0x1f 0x9d", followed by a byte of flags.
+///
+/// Based on Java code by Ronald Tschalar, which in turn was based on the unlzw.c
+/// code in the gzip package.
+///
+/// This sample shows how to unzip a compressed file
+///
+/// using System;
+/// using System.IO;
+///
+/// using ICSharpCode.SharpZipLib.Core;
+/// using ICSharpCode.SharpZipLib.LZW;
+///
+/// class MainClass
+/// {
+/// public static void Main(string[] args)
+/// {
+/// using (Stream inStream = new LzwInputStream(File.OpenRead(args[0])))
+/// using (FileStream outStream = File.Create(Path.GetFileNameWithoutExtension(args[0]))) {
+/// byte[] buffer = new byte[4096];
+/// StreamUtils.Copy(inStream, outStream, buffer);
+/// // OR
+/// inStream.Read(buffer, 0, buffer.Length);
+/// // now do something with the buffer
+/// }
+/// }
+/// }
+///
+///
+public class LzwStream : Stream, IStreamStack
{
- ///
- /// This filter stream is used to decompress a LZW format stream.
- /// Specifically, a stream that uses the LZC compression method.
- /// This file format is usually associated with the .Z file extension.
- ///
- /// See http://en.wikipedia.org/wiki/Compress
- /// See http://wiki.wxwidgets.org/Development:_Z_File_Format
- ///
- /// The file header consists of 3 (or optionally 4) bytes. The first two bytes
- /// contain the magic marker "0x1f 0x9d", followed by a byte of flags.
- ///
- /// Based on Java code by Ronald Tschalar, which in turn was based on the unlzw.c
- /// code in the gzip package.
- ///
- /// This sample shows how to unzip a compressed file
- ///
- /// using System;
- /// using System.IO;
- ///
- /// using ICSharpCode.SharpZipLib.Core;
- /// using ICSharpCode.SharpZipLib.LZW;
- ///
- /// class MainClass
- /// {
- /// public static void Main(string[] args)
- /// {
- /// using (Stream inStream = new LzwInputStream(File.OpenRead(args[0])))
- /// using (FileStream outStream = File.Create(Path.GetFileNameWithoutExtension(args[0]))) {
- /// byte[] buffer = new byte[4096];
- /// StreamUtils.Copy(inStream, outStream, buffer);
- /// // OR
- /// inStream.Read(buffer, 0, buffer.Length);
- /// // now do something with the buffer
- /// }
- /// }
- /// }
- ///
- ///
- public class LzwStream : Stream, IStreamStack
- {
#if DEBUG_STREAMS
- long IStreamStack.InstanceId { get; set; }
+ long IStreamStack.InstanceId { get; set; }
#endif
- int IStreamStack.DefaultBufferSize { get; set; }
+ int IStreamStack.DefaultBufferSize { get; set; }
- Stream IStreamStack.BaseStream() => baseInputStream;
+ Stream IStreamStack.BaseStream() => baseInputStream;
- int IStreamStack.BufferSize
+ int IStreamStack.BufferSize
+ {
+ get => 0;
+ set { }
+ }
+ int IStreamStack.BufferPosition
+ {
+ get => 0;
+ set { }
+ }
+
+ void IStreamStack.SetPosition(long position) { }
+
+ public static bool IsLzwStream(Stream stream)
+ {
+ try
{
- get => 0;
- set { }
+ byte[] hdr = new byte[LzwConstants.HDR_SIZE];
+
+ int result = stream.Read(hdr, 0, hdr.Length);
+
+ // Check the magic marker
+ if (result < 0)
+ {
+ throw new IncompleteArchiveException("Failed to read LZW header");
+ }
+
+ if (hdr[0] != (LzwConstants.MAGIC >> 8) || hdr[1] != (LzwConstants.MAGIC & 0xff))
+ {
+ throw new IncompleteArchiveException(
+ String.Format(
+ "Wrong LZW header. Magic bytes don't match. 0x{0:x2} 0x{1:x2}",
+ hdr[0],
+ hdr[1]
+ )
+ );
+ }
}
- int IStreamStack.BufferPosition
+ catch (Exception)
{
- get => 0;
- set { }
+ return false;
}
+ return true;
+ }
+
+ ///
+ /// Gets or sets a flag indicating ownership of underlying stream.
+ /// When the flag is true will close the underlying stream also.
+ ///
+ /// The default value is true.
+ public bool IsStreamOwner { get; set; } = false;
- void IStreamStack.SetPosition(long position) { }
+ ///
+ /// Creates a LzwInputStream
+ ///
+ ///
+ /// The stream to read compressed data from (baseInputStream LZW format)
+ ///
+ public LzwStream(Stream baseInputStream)
+ {
+ this.baseInputStream = baseInputStream;
+#if DEBUG_STREAMS
+ this.DebugConstruct(typeof(LzwStream));
+#endif
+ }
- public static bool IsLzwStream(Stream stream)
+ ///
+ /// See
+ ///
+ ///
+ public override int ReadByte()
+ {
+ int b = Read(one, 0, 1);
+ if (b == 1)
{
- try
- {
- byte[] hdr = new byte[LzwConstants.HDR_SIZE];
+ return (one[0] & 0xff);
+ }
- int result = stream.Read(hdr, 0, hdr.Length);
+ return -1;
+ }
- // Check the magic marker
- if (result < 0)
- throw new IncompleteArchiveException("Failed to read LZW header");
+ ///
+ /// Reads decompressed data into the provided buffer byte array
+ ///
+ ///
+ /// The array to read and decompress data into
+ ///
+ ///
+ /// The offset indicating where the data should be placed
+ ///
+ ///
+ /// The number of bytes to decompress
+ ///
+ /// The number of bytes read. Zero signals the end of stream
+ public override int Read(byte[] buffer, int offset, int count)
+ {
+ if (!headerParsed)
+ {
+ ParseHeader();
+ }
- if (hdr[0] != (LzwConstants.MAGIC >> 8) || hdr[1] != (LzwConstants.MAGIC & 0xff))
- {
- throw new IncompleteArchiveException(
- String.Format(
- "Wrong LZW header. Magic bytes don't match. 0x{0:x2} 0x{1:x2}",
- hdr[0],
- hdr[1]
- )
- );
- }
- }
- catch (Exception)
- {
- return false;
- }
- return true;
+ if (eof)
+ {
+ return 0;
}
- ///
- /// Gets or sets a flag indicating ownership of underlying stream.
- /// When the flag is true will close the underlying stream also.
- ///
- /// The default value is true.
- public bool IsStreamOwner { get; set; } = false;
-
- ///
- /// Creates a LzwInputStream
- ///
- ///
- /// The stream to read compressed data from (baseInputStream LZW format)
- ///
- public LzwStream(Stream baseInputStream)
+ int start = offset;
+
+ /* Using local copies of various variables speeds things up by as
+ * much as 30% in Java! Performance not tested in C#.
+ */
+ int[] lTabPrefix = tabPrefix;
+ byte[] lTabSuffix = tabSuffix;
+ byte[] lStack = stack;
+ int lNBits = nBits;
+ int lMaxCode = maxCode;
+ int lMaxMaxCode = maxMaxCode;
+ int lBitMask = bitMask;
+ int lOldCode = oldCode;
+ byte lFinChar = finChar;
+ int lStackP = stackP;
+ int lFreeEnt = freeEnt;
+ byte[] lData = data;
+ int lBitPos = bitPos;
+
+ // empty stack if stuff still left
+ int sSize = lStack.Length - lStackP;
+ if (sSize > 0)
{
- this.baseInputStream = baseInputStream;
-#if DEBUG_STREAMS
- this.DebugConstruct(typeof(LzwStream));
-#endif
+ int num = (sSize >= count) ? count : sSize;
+ Array.Copy(lStack, lStackP, buffer, offset, num);
+ offset += num;
+ count -= num;
+ lStackP += num;
}
- ///
- /// See
- ///
- ///
- public override int ReadByte()
+ if (count == 0)
{
- int b = Read(one, 0, 1);
- if (b == 1)
- return (one[0] & 0xff);
- return -1;
+ stackP = lStackP;
+ return offset - start;
}
- ///
- /// Reads decompressed data into the provided buffer byte array
- ///
- ///
- /// The array to read and decompress data into
- ///
- ///
- /// The offset indicating where the data should be placed
- ///
- ///
- /// The number of bytes to decompress
- ///
- /// The number of bytes read. Zero signals the end of stream
- public override int Read(byte[] buffer, int offset, int count)
+ // loop, filling local buffer until enough data has been decompressed
+ MainLoop:
+ do
{
- if (!headerParsed)
- ParseHeader();
-
- if (eof)
- return 0;
-
- int start = offset;
-
- /* Using local copies of various variables speeds things up by as
- * much as 30% in Java! Performance not tested in C#.
- */
- int[] lTabPrefix = tabPrefix;
- byte[] lTabSuffix = tabSuffix;
- byte[] lStack = stack;
- int lNBits = nBits;
- int lMaxCode = maxCode;
- int lMaxMaxCode = maxMaxCode;
- int lBitMask = bitMask;
- int lOldCode = oldCode;
- byte lFinChar = finChar;
- int lStackP = stackP;
- int lFreeEnt = freeEnt;
- byte[] lData = data;
- int lBitPos = bitPos;
-
- // empty stack if stuff still left
- int sSize = lStack.Length - lStackP;
- if (sSize > 0)
+ if (end < EXTRA)
{
- int num = (sSize >= count) ? count : sSize;
- Array.Copy(lStack, lStackP, buffer, offset, num);
- offset += num;
- count -= num;
- lStackP += num;
+ Fill();
}
- if (count == 0)
- {
- stackP = lStackP;
- return offset - start;
- }
+ int bitIn = (got > 0) ? (end - end % lNBits) << 3 : (end << 3) - (lNBits - 1);
- // loop, filling local buffer until enough data has been decompressed
- MainLoop:
- do
+ while (lBitPos < bitIn)
{
- if (end < EXTRA)
+ #region A
+
+ // handle 1-byte reads correctly
+ if (count == 0)
{
- Fill();
+ nBits = lNBits;
+ maxCode = lMaxCode;
+ maxMaxCode = lMaxMaxCode;
+ bitMask = lBitMask;
+ oldCode = lOldCode;
+ finChar = lFinChar;
+ stackP = lStackP;
+ freeEnt = lFreeEnt;
+ bitPos = lBitPos;
+
+ return offset - start;
}
- int bitIn = (got > 0) ? (end - end % lNBits) << 3 : (end << 3) - (lNBits - 1);
-
- while (lBitPos < bitIn)
+ // check for code-width expansion
+ if (lFreeEnt > lMaxCode)
{
- #region A
+ int nBytes = lNBits << 3;
+ lBitPos = (lBitPos - 1) + nBytes - (lBitPos - 1 + nBytes) % nBytes;
- // handle 1-byte reads correctly
- if (count == 0)
- {
- nBits = lNBits;
- maxCode = lMaxCode;
- maxMaxCode = lMaxMaxCode;
- bitMask = lBitMask;
- oldCode = lOldCode;
- finChar = lFinChar;
- stackP = lStackP;
- freeEnt = lFreeEnt;
- bitPos = lBitPos;
-
- return offset - start;
- }
-
- // check for code-width expansion
- if (lFreeEnt > lMaxCode)
- {
- int nBytes = lNBits << 3;
- lBitPos = (lBitPos - 1) + nBytes - (lBitPos - 1 + nBytes) % nBytes;
+ lNBits++;
+ lMaxCode = (lNBits == maxBits) ? lMaxMaxCode : (1 << lNBits) - 1;
- lNBits++;
- lMaxCode = (lNBits == maxBits) ? lMaxMaxCode : (1 << lNBits) - 1;
-
- lBitMask = (1 << lNBits) - 1;
- lBitPos = ResetBuf(lBitPos);
- goto MainLoop;
- }
+ lBitMask = (1 << lNBits) - 1;
+ lBitPos = ResetBuf(lBitPos);
+ goto MainLoop;
+ }
- #endregion A
+ #endregion A
- #region B
+ #region B
- // read next code
- int pos = lBitPos >> 3;
- int code =
+ // read next code
+ int pos = lBitPos >> 3;
+ int code =
+ (
(
- (
- (lData[pos] & 0xFF)
- | ((lData[pos + 1] & 0xFF) << 8)
- | ((lData[pos + 2] & 0xFF) << 16)
- ) >> (lBitPos & 0x7)
- ) & lBitMask;
+ (lData[pos] & 0xFF)
+ | ((lData[pos + 1] & 0xFF) << 8)
+ | ((lData[pos + 2] & 0xFF) << 16)
+ ) >> (lBitPos & 0x7)
+ ) & lBitMask;
- lBitPos += lNBits;
+ lBitPos += lNBits;
- // handle first iteration
- if (lOldCode == -1)
- {
- if (code >= 256)
- throw new IncompleteArchiveException(
- "corrupt input: " + code + " > 255"
- );
-
- lFinChar = (byte)(lOldCode = code);
- buffer[offset++] = lFinChar;
- count--;
- continue;
- }
-
- // handle CLEAR code
- if (code == TBL_CLEAR && blockMode)
+ // handle first iteration
+ if (lOldCode == -1)
+ {
+ if (code >= 256)
{
- Array.Copy(zeros, 0, lTabPrefix, 0, zeros.Length);
- lFreeEnt = TBL_FIRST - 1;
-
- int nBytes = lNBits << 3;
- lBitPos = (lBitPos - 1) + nBytes - (lBitPos - 1 + nBytes) % nBytes;
- lNBits = LzwConstants.INIT_BITS;
- lMaxCode = (1 << lNBits) - 1;
- lBitMask = lMaxCode;
-
- // Code tables reset
-
- lBitPos = ResetBuf(lBitPos);
- goto MainLoop;
+ throw new IncompleteArchiveException("corrupt input: " + code + " > 255");
}
- #endregion B
+ lFinChar = (byte)(lOldCode = code);
+ buffer[offset++] = lFinChar;
+ count--;
+ continue;
+ }
- #region C
+ // handle CLEAR code
+ if (code == TBL_CLEAR && blockMode)
+ {
+ Array.Copy(zeros, 0, lTabPrefix, 0, zeros.Length);
+ lFreeEnt = TBL_FIRST - 1;
- // setup
- int inCode = code;
- lStackP = lStack.Length;
+ int nBytes = lNBits << 3;
+ lBitPos = (lBitPos - 1) + nBytes - (lBitPos - 1 + nBytes) % nBytes;
+ lNBits = LzwConstants.INIT_BITS;
+ lMaxCode = (1 << lNBits) - 1;
+ lBitMask = lMaxCode;
- // Handle KwK case
- if (code >= lFreeEnt)
- {
- if (code > lFreeEnt)
- {
- throw new IncompleteArchiveException(
- "corrupt input: code=" + code + ", freeEnt=" + lFreeEnt
- );
- }
-
- lStack[--lStackP] = lFinChar;
- code = lOldCode;
- }
+ // Code tables reset
- // Generate output characters in reverse order
- while (code >= 256)
- {
- lStack[--lStackP] = lTabSuffix[code];
- code = lTabPrefix[code];
- }
-
- lFinChar = lTabSuffix[code];
- buffer[offset++] = lFinChar;
- count--;
+ lBitPos = ResetBuf(lBitPos);
+ goto MainLoop;
+ }
- // And put them out in forward order
- sSize = lStack.Length - lStackP;
- int num = (sSize >= count) ? count : sSize;
- Array.Copy(lStack, lStackP, buffer, offset, num);
- offset += num;
- count -= num;
- lStackP += num;
+ #endregion B
- #endregion C
+ #region C
- #region D
+ // setup
+ int inCode = code;
+ lStackP = lStack.Length;
- // generate new entry in table
- if (lFreeEnt < lMaxMaxCode)
+ // Handle KwK case
+ if (code >= lFreeEnt)
+ {
+ if (code > lFreeEnt)
{
- lTabPrefix[lFreeEnt] = lOldCode;
- lTabSuffix[lFreeEnt] = lFinChar;
- lFreeEnt++;
+ throw new IncompleteArchiveException(
+ "corrupt input: code=" + code + ", freeEnt=" + lFreeEnt
+ );
}
- // Remember previous code
- lOldCode = inCode;
+ lStack[--lStackP] = lFinChar;
+ code = lOldCode;
+ }
- // if output buffer full, then return
- if (count == 0)
- {
- nBits = lNBits;
- maxCode = lMaxCode;
- bitMask = lBitMask;
- oldCode = lOldCode;
- finChar = lFinChar;
- stackP = lStackP;
- freeEnt = lFreeEnt;
- bitPos = lBitPos;
-
- return offset - start;
- }
+ // Generate output characters in reverse order
+ while (code >= 256)
+ {
+ lStack[--lStackP] = lTabSuffix[code];
+ code = lTabPrefix[code];
+ }
- #endregion D
- } // while
+ lFinChar = lTabSuffix[code];
+ buffer[offset++] = lFinChar;
+ count--;
- lBitPos = ResetBuf(lBitPos);
- } while (got > 0); // do..while
+ // And put them out in forward order
+ sSize = lStack.Length - lStackP;
+ int num = (sSize >= count) ? count : sSize;
+ Array.Copy(lStack, lStackP, buffer, offset, num);
+ offset += num;
+ count -= num;
+ lStackP += num;
- nBits = lNBits;
- maxCode = lMaxCode;
- bitMask = lBitMask;
- oldCode = lOldCode;
- finChar = lFinChar;
- stackP = lStackP;
- freeEnt = lFreeEnt;
- bitPos = lBitPos;
+ #endregion C
- eof = true;
- return offset - start;
- }
+ #region D
- ///
- /// Moves the unread data in the buffer to the beginning and resets
- /// the pointers.
- ///
- ///
- ///
- private int ResetBuf(int bitPosition)
- {
- int pos = bitPosition >> 3;
- Array.Copy(data, pos, data, 0, end - pos);
- end -= pos;
- return 0;
- }
+ // generate new entry in table
+ if (lFreeEnt < lMaxMaxCode)
+ {
+ lTabPrefix[lFreeEnt] = lOldCode;
+ lTabSuffix[lFreeEnt] = lFinChar;
+ lFreeEnt++;
+ }
- private void Fill()
- {
- got = baseInputStream.Read(data, end, data.Length - 1 - end);
- if (got > 0)
- {
- end += got;
- }
- }
+ // Remember previous code
+ lOldCode = inCode;
- private void ParseHeader()
- {
- headerParsed = true;
+ // if output buffer full, then return
+ if (count == 0)
+ {
+ nBits = lNBits;
+ maxCode = lMaxCode;
+ bitMask = lBitMask;
+ oldCode = lOldCode;
+ finChar = lFinChar;
+ stackP = lStackP;
+ freeEnt = lFreeEnt;
+ bitPos = lBitPos;
+
+ return offset - start;
+ }
- byte[] hdr = new byte[LzwConstants.HDR_SIZE];
+ #endregion D
+ } // while
- int result = baseInputStream.Read(hdr, 0, hdr.Length);
+ lBitPos = ResetBuf(lBitPos);
+ } while (got > 0); // do..while
- // Check the magic marker
- if (result < 0)
- throw new IncompleteArchiveException("Failed to read LZW header");
+ nBits = lNBits;
+ maxCode = lMaxCode;
+ bitMask = lBitMask;
+ oldCode = lOldCode;
+ finChar = lFinChar;
+ stackP = lStackP;
+ freeEnt = lFreeEnt;
+ bitPos = lBitPos;
- if (hdr[0] != (LzwConstants.MAGIC >> 8) || hdr[1] != (LzwConstants.MAGIC & 0xff))
- {
- throw new IncompleteArchiveException(
- String.Format(
- "Wrong LZW header. Magic bytes don't match. 0x{0:x2} 0x{1:x2}",
- hdr[0],
- hdr[1]
- )
- );
- }
+ eof = true;
+ return offset - start;
+ }
- // Check the 3rd header byte
- blockMode = (hdr[2] & LzwConstants.BLOCK_MODE_MASK) > 0;
- maxBits = hdr[2] & LzwConstants.BIT_MASK;
+ ///
+ /// Moves the unread data in the buffer to the beginning and resets
+ /// the pointers.
+ ///
+ ///
+ ///
+ private int ResetBuf(int bitPosition)
+ {
+ int pos = bitPosition >> 3;
+ Array.Copy(data, pos, data, 0, end - pos);
+ end -= pos;
+ return 0;
+ }
- if (maxBits > LzwConstants.MAX_BITS)
- {
- throw new ArchiveException(
- "Stream compressed with "
- + maxBits
- + " bits, but decompression can only handle "
- + LzwConstants.MAX_BITS
- + " bits."
- );
- }
+ private void Fill()
+ {
+ got = baseInputStream.Read(data, end, data.Length - 1 - end);
+ if (got > 0)
+ {
+ end += got;
+ }
+ }
- if ((hdr[2] & LzwConstants.RESERVED_MASK) > 0)
- {
- throw new ArchiveException("Unsupported bits set in the header.");
- }
+ private void ParseHeader()
+ {
+ headerParsed = true;
- // Initialize variables
- maxMaxCode = 1 << maxBits;
- nBits = LzwConstants.INIT_BITS;
- maxCode = (1 << nBits) - 1;
- bitMask = maxCode;
- oldCode = -1;
- finChar = 0;
- freeEnt = blockMode ? TBL_FIRST : 256;
-
- tabPrefix = new int[1 << maxBits];
- tabSuffix = new byte[1 << maxBits];
- stack = new byte[1 << maxBits];
- stackP = stack.Length;
-
- for (int idx = 255; idx >= 0; idx--)
- tabSuffix[idx] = (byte)idx;
- }
+ byte[] hdr = new byte[LzwConstants.HDR_SIZE];
- #region Stream Overrides
+ int result = baseInputStream.Read(hdr, 0, hdr.Length);
- ///
- /// Gets a value indicating whether the current stream supports reading
- ///
- public override bool CanRead
+ // Check the magic marker
+ if (result < 0)
{
- get { return baseInputStream.CanRead; }
+ throw new IncompleteArchiveException("Failed to read LZW header");
}
- ///
- /// Gets a value of false indicating seeking is not supported for this stream.
- ///
- public override bool CanSeek
+ if (hdr[0] != (LzwConstants.MAGIC >> 8) || hdr[1] != (LzwConstants.MAGIC & 0xff))
{
- get { return false; }
+ throw new IncompleteArchiveException(
+ String.Format(
+ "Wrong LZW header. Magic bytes don't match. 0x{0:x2} 0x{1:x2}",
+ hdr[0],
+ hdr[1]
+ )
+ );
}
- ///
- /// Gets a value of false indicating that this stream is not writeable.
- ///
- public override bool CanWrite
- {
- get { return false; }
- }
+ // Check the 3rd header byte
+ blockMode = (hdr[2] & LzwConstants.BLOCK_MODE_MASK) > 0;
+ maxBits = hdr[2] & LzwConstants.BIT_MASK;
- ///
- /// A value representing the length of the stream in bytes.
- ///
- public override long Length
+ if (maxBits > LzwConstants.MAX_BITS)
{
- get { return got; }
+ throw new ArchiveException(
+ "Stream compressed with "
+ + maxBits
+ + " bits, but decompression can only handle "
+ + LzwConstants.MAX_BITS
+ + " bits."
+ );
}
- ///
- /// The current position within the stream.
- /// Throws a NotSupportedException when attempting to set the position
- ///
- /// Attempting to set the position
- public override long Position
+ if ((hdr[2] & LzwConstants.RESERVED_MASK) > 0)
{
- get { return baseInputStream.Position; }
- set { throw new NotSupportedException("InflaterInputStream Position not supported"); }
+ throw new ArchiveException("Unsupported bits set in the header.");
}
- ///
- /// Flushes the baseInputStream
- ///
- public override void Flush()
+ // Initialize variables
+ maxMaxCode = 1 << maxBits;
+ nBits = LzwConstants.INIT_BITS;
+ maxCode = (1 << nBits) - 1;
+ bitMask = maxCode;
+ oldCode = -1;
+ finChar = 0;
+ freeEnt = blockMode ? TBL_FIRST : 256;
+
+ tabPrefix = new int[1 << maxBits];
+ tabSuffix = new byte[1 << maxBits];
+ stack = new byte[1 << maxBits];
+ stackP = stack.Length;
+
+ for (int idx = 255; idx >= 0; idx--)
{
- baseInputStream.Flush();
+ tabSuffix[idx] = (byte)idx;
}
+ }
- ///
- /// Sets the position within the current stream
- /// Always throws a NotSupportedException
- ///
- /// The relative offset to seek to.
- /// The defining where to seek from.
- /// The new position in the stream.
- /// Any access
- public override long Seek(long offset, SeekOrigin origin)
- {
- throw new NotSupportedException("Seek not supported");
- }
+ #region Stream Overrides
- ///
- /// Set the length of the current stream
- /// Always throws a NotSupportedException
- ///
- /// The new length value for the stream.
- /// Any access
- public override void SetLength(long value)
- {
- throw new NotSupportedException("InflaterInputStream SetLength not supported");
- }
+ ///
+ /// Gets a value indicating whether the current stream supports reading
+ ///
+ public override bool CanRead => baseInputStream.CanRead;
- ///
- /// Writes a sequence of bytes to stream and advances the current position
- /// This method always throws a NotSupportedException
- ///
- /// The buffer containing data to write.
- /// The offset of the first byte to write.
- /// The number of bytes to write.
- /// Any access
- public override void Write(byte[] buffer, int offset, int count)
- {
- throw new NotSupportedException("InflaterInputStream Write not supported");
- }
+ ///
+ /// Gets a value of false indicating seeking is not supported for this stream.
+ ///
+ public override bool CanSeek => false;
- ///
- /// Writes one byte to the current stream and advances the current position
- /// Always throws a NotSupportedException
- ///
- /// The byte to write.
- /// Any access
- public override void WriteByte(byte value)
- {
- throw new NotSupportedException("InflaterInputStream WriteByte not supported");
- }
+ ///
+ /// Gets a value of false indicating that this stream is not writeable.
+ ///
+ public override bool CanWrite => false;
+
+ ///
+ /// A value representing the length of the stream in bytes.
+ ///
+ public override long Length => got;
+
+ ///
+ /// The current position within the stream.
+ /// Throws a NotSupportedException when attempting to set the position
+ ///
+ /// Attempting to set the position
+ public override long Position
+ {
+ get => baseInputStream.Position;
+ set => throw new NotSupportedException("InflaterInputStream Position not supported");
+ }
+
+ ///
+ /// Flushes the baseInputStream
+ ///
+ public override void Flush() => baseInputStream.Flush();
- ///
- /// Closes the input stream. When
- /// is true the underlying stream is also closed.
- ///
- protected override void Dispose(bool disposing)
+ ///
+ /// Sets the position within the current stream
+ /// Always throws a NotSupportedException
+ ///
+ /// The relative offset to seek to.
+ /// The defining where to seek from.
+ /// The new position in the stream.
+ /// Any access
+ public override long Seek(long offset, SeekOrigin origin) =>
+ throw new NotSupportedException("Seek not supported");
+
+ ///
+ /// Set the length of the current stream
+ /// Always throws a NotSupportedException
+ ///
+ /// The new length value for the stream.
+ /// Any access
+ public override void SetLength(long value) =>
+ throw new NotSupportedException("InflaterInputStream SetLength not supported");
+
+ ///
+ /// Writes a sequence of bytes to stream and advances the current position
+ /// This method always throws a NotSupportedException
+ ///
+ /// The buffer containing data to write.
+ /// The offset of the first byte to write.
+ /// The number of bytes to write.
+ /// Any access
+ public override void Write(byte[] buffer, int offset, int count) =>
+ throw new NotSupportedException("InflaterInputStream Write not supported");
+
+ ///
+ /// Writes one byte to the current stream and advances the current position
+ /// Always throws a NotSupportedException
+ ///
+ /// The byte to write.
+ /// Any access
+ public override void WriteByte(byte value) =>
+ throw new NotSupportedException("InflaterInputStream WriteByte not supported");
+
+ ///
+ /// Closes the input stream. When
+ /// is true the underlying stream is also closed.
+ ///
+ protected override void Dispose(bool disposing)
+ {
+ if (!isClosed)
{
- if (!isClosed)
- {
- isClosed = true;
+ isClosed = true;
#if DEBUG_STREAMS
- this.DebugDispose(typeof(LzwStream));
+ this.DebugDispose(typeof(LzwStream));
#endif
- if (IsStreamOwner)
- {
- baseInputStream.Dispose();
- }
+ if (IsStreamOwner)
+ {
+ baseInputStream.Dispose();
}
}
+ }
- #endregion Stream Overrides
+ #endregion Stream Overrides
- #region Instance Fields
+ #region Instance Fields
- private Stream baseInputStream;
+ private Stream baseInputStream;
- ///
- /// Flag indicating wether this instance has been closed or not.
- ///
- private bool isClosed;
+ ///
+ /// Flag indicating wether this instance has been closed or not.
+ ///
+ private bool isClosed;
- private readonly byte[] one = new byte[1];
- private bool headerParsed;
+ private readonly byte[] one = new byte[1];
+ private bool headerParsed;
- // string table stuff
- private const int TBL_CLEAR = 0x100;
+ // string table stuff
+ private const int TBL_CLEAR = 0x100;
- private const int TBL_FIRST = TBL_CLEAR + 1;
+ private const int TBL_FIRST = TBL_CLEAR + 1;
- private int[] tabPrefix = new int[0]; //
- private byte[] tabSuffix = new byte[0]; //
- private readonly int[] zeros = new int[256];
- private byte[] stack = new byte[0]; //
+ private int[] tabPrefix = []; //
+ private byte[] tabSuffix = []; //
+ private readonly int[] zeros = new int[256];
+ private byte[] stack = []; //
- // various state
- private bool blockMode;
+ // various state
+ private bool blockMode;
- private int nBits;
- private int maxBits;
- private int maxMaxCode;
- private int maxCode;
- private int bitMask;
- private int oldCode;
- private byte finChar;
- private int stackP;
- private int freeEnt;
+ private int nBits;
+ private int maxBits;
+ private int maxMaxCode;
+ private int maxCode;
+ private int bitMask;
+ private int oldCode;
+ private byte finChar;
+ private int stackP;
+ private int freeEnt;
- // input buffer
- private readonly byte[] data = new byte[1024 * 8];
+ // input buffer
+ private readonly byte[] data = new byte[1024 * 8];
- private int bitPos;
- private int end;
- private int got;
- private bool eof;
- private const int EXTRA = 64;
+ private int bitPos;
+ private int end;
+ private int got;
+ private bool eof;
+ private const int EXTRA = 64;
- #endregion Instance Fields
- }
+ #endregion Instance Fields
}
diff --git a/src/SharpCompress/Compressors/PPMd/H/ModelPPM.cs b/src/SharpCompress/Compressors/PPMd/H/ModelPPM.cs
index f02ec8474..67e6b397d 100644
--- a/src/SharpCompress/Compressors/PPMd/H/ModelPPM.cs
+++ b/src/SharpCompress/Compressors/PPMd/H/ModelPPM.cs
@@ -124,7 +124,7 @@ public virtual int HiBitsFlag
private readonly int[][] _binSumm = new int[128][]; // binary SEE-contexts
private static readonly int[] INIT_BIN_ESC =
- {
+ [
0x3CDD,
0x1F3F,
0x59BF,
@@ -133,7 +133,7 @@ public virtual int HiBitsFlag
0x5ABC,
0x6632,
0x6051,
- };
+ ];
// Temp fields
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState1 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
diff --git a/src/SharpCompress/Compressors/PPMd/H/PPMContext.cs b/src/SharpCompress/Compressors/PPMd/H/PPMContext.cs
index e4f98c719..6af3c3454 100644
--- a/src/SharpCompress/Compressors/PPMd/H/PPMContext.cs
+++ b/src/SharpCompress/Compressors/PPMd/H/PPMContext.cs
@@ -60,7 +60,7 @@ public virtual int NumStats
private int _suffix; // pointer ppmcontext
//UPGRADE_NOTE: Final was removed from the declaration of 'ExpEscape'. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
- public static readonly int[] EXP_ESCAPE = { 25, 14, 9, 7, 5, 5, 4, 4, 4, 3, 3, 3, 2, 2, 2, 2 };
+ public static readonly int[] EXP_ESCAPE = [25, 14, 9, 7, 5, 5, 4, 4, 4, 3, 3, 3, 2, 2, 2, 2];
// Temp fields
//UPGRADE_NOTE: Final was removed from the declaration of 'tempState1 '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
@@ -548,5 +548,8 @@ public override string ToString()
return buffer.ToString();
}
- static PpmContext() => UNION_SIZE = Math.Max(FreqData.SIZE, State.SIZE);
+ static PpmContext()
+ {
+ UNION_SIZE = Math.Max(FreqData.SIZE, State.SIZE);
+ }
}
diff --git a/src/SharpCompress/Compressors/PPMd/H/Pointer.cs b/src/SharpCompress/Compressors/PPMd/H/Pointer.cs
index 96cf4c3ec..a9f3cdddf 100644
--- a/src/SharpCompress/Compressors/PPMd/H/Pointer.cs
+++ b/src/SharpCompress/Compressors/PPMd/H/Pointer.cs
@@ -7,7 +7,10 @@ internal abstract class Pointer
/// Initialize the object with the array (may be null)
/// the byte array
///
- internal Pointer(byte[] mem) => Memory = mem;
+ internal Pointer(byte[] mem)
+ {
+ Memory = mem;
+ }
internal byte[] Memory { get; private set; }
diff --git a/src/SharpCompress/Compressors/PPMd/H/SubAllocator.cs b/src/SharpCompress/Compressors/PPMd/H/SubAllocator.cs
index e507f3c30..8b0722587 100644
--- a/src/SharpCompress/Compressors/PPMd/H/SubAllocator.cs
+++ b/src/SharpCompress/Compressors/PPMd/H/SubAllocator.cs
@@ -77,7 +77,10 @@ public virtual int UnitsStart
private RarMemBlock _tempRarMemBlock2;
private RarMemBlock _tempRarMemBlock3;
- public SubAllocator() => Clean();
+ public SubAllocator()
+ {
+ Clean();
+ }
public virtual void Clean() => _subAllocatorSize = 0;
@@ -438,5 +441,8 @@ public override string ToString()
return buffer.ToString();
}
- static SubAllocator() => UNIT_SIZE = Math.Max(PpmContext.SIZE, RarMemBlock.SIZE);
+ static SubAllocator()
+ {
+ UNIT_SIZE = Math.Max(PpmContext.SIZE, RarMemBlock.SIZE);
+ }
}
diff --git a/src/SharpCompress/Compressors/PPMd/I1/Allocator.cs b/src/SharpCompress/Compressors/PPMd/I1/Allocator.cs
index c4d8aa4b7..eff201853 100644
--- a/src/SharpCompress/Compressors/PPMd/I1/Allocator.cs
+++ b/src/SharpCompress/Compressors/PPMd/I1/Allocator.cs
@@ -89,7 +89,10 @@ static Allocator()
#region Public Methods
- public Allocator() => _memoryNodes = new MemoryNode[INDEX_COUNT];
+ public Allocator()
+ {
+ _memoryNodes = new MemoryNode[INDEX_COUNT];
+ }
///
/// Initialize or reset the memory allocator (so that the single, large array can be re-used without destroying
diff --git a/src/SharpCompress/Compressors/PPMd/I1/MemoryNode.cs b/src/SharpCompress/Compressors/PPMd/I1/MemoryNode.cs
index c8b411d86..f5b1631b5 100644
--- a/src/SharpCompress/Compressors/PPMd/I1/MemoryNode.cs
+++ b/src/SharpCompress/Compressors/PPMd/I1/MemoryNode.cs
@@ -1,5 +1,7 @@
#nullable disable
+using System;
+
namespace SharpCompress.Compressors.PPMd.I1;
///
@@ -23,7 +25,7 @@ namespace SharpCompress.Compressors.PPMd.I1;
/// Note that is a field rather than a property for performance reasons.
///
///
-internal struct MemoryNode
+internal struct MemoryNode : IEquatable
{
public uint _address;
public byte[] _memory;
@@ -232,6 +234,8 @@ public override bool Equals(object obj)
return base.Equals(obj);
}
+ public readonly bool Equals(MemoryNode other) => other._address == _address;
+
///
/// Returns the hash code for this instance.
///
diff --git a/src/SharpCompress/Compressors/PPMd/I1/Model.cs b/src/SharpCompress/Compressors/PPMd/I1/Model.cs
index cd7f7ac43..10a3f9279 100644
--- a/src/SharpCompress/Compressors/PPMd/I1/Model.cs
+++ b/src/SharpCompress/Compressors/PPMd/I1/Model.cs
@@ -51,7 +51,7 @@ internal partial class Model
private readonly PpmState[] _decodeStates = new PpmState[256];
private static readonly ushort[] INITIAL_BINARY_ESCAPES =
- {
+ [
0x3CDD,
0x1F3F,
0x59BF,
@@ -60,10 +60,10 @@ internal partial class Model
0x5ABC,
0x6632,
0x6051,
- };
+ ];
private static ReadOnlySpan EXPONENTIAL_ESCAPES =>
- new byte[] { 25, 14, 9, 7, 5, 5, 4, 4, 4, 3, 3, 3, 2, 2, 2, 2 };
+ [25, 14, 9, 7, 5, 5, 4, 4, 4, 3, 3, 3, 2, 2, 2, 2];
#region Public Methods
diff --git a/src/SharpCompress/Compressors/PPMd/I1/Pointer.cs b/src/SharpCompress/Compressors/PPMd/I1/Pointer.cs
index d1a895452..a60a10632 100644
--- a/src/SharpCompress/Compressors/PPMd/I1/Pointer.cs
+++ b/src/SharpCompress/Compressors/PPMd/I1/Pointer.cs
@@ -18,7 +18,7 @@ namespace SharpCompress.Compressors.PPMd.I1;
/// Note that is a field rather than a property for performance reasons.
///
///
-internal struct Pointer
+internal struct Pointer : IEquatable
{
public uint _address;
public byte[] _memory;
@@ -349,6 +349,8 @@ public override bool Equals(object obj)
return base.Equals(obj);
}
+ public bool Equals(Pointer other) => other._address == _address;
+
///
/// Returns the hash code for this instance.
///
diff --git a/src/SharpCompress/Compressors/PPMd/I1/PpmContext.cs b/src/SharpCompress/Compressors/PPMd/I1/PpmContext.cs
index 701039c28..58a464a5c 100644
--- a/src/SharpCompress/Compressors/PPMd/I1/PpmContext.cs
+++ b/src/SharpCompress/Compressors/PPMd/I1/PpmContext.cs
@@ -1,5 +1,7 @@
#nullable disable
+using System;
+
namespace SharpCompress.Compressors.PPMd.I1;
///
@@ -17,7 +19,7 @@ internal partial class Model
///
/// The structure which represents the current PPM context. This is 12 bytes in size.
///
- internal struct PpmContext
+ internal struct PpmContext : IEquatable
{
public uint _address;
public byte[] _memory;
@@ -262,6 +264,8 @@ public override bool Equals(object obj)
return base.Equals(obj);
}
+ public bool Equals(PpmContext other) => other._address == _address;
+
///
/// Returns the hash code for this instance.
///
@@ -577,7 +581,6 @@ private void Update2(PpmState state, PpmContext context)
private See2Context MakeEscapeFrequency(PpmContext context)
{
- var numberStatistics = (uint)2 * context.NumberStatistics;
See2Context see2Context;
if (context.NumberStatistics != 0xff)
@@ -585,7 +588,7 @@ private See2Context MakeEscapeFrequency(PpmContext context)
// Note that context.Flags is always in the range 0 .. 28 (this ensures that the index used for the second
// dimension of the see2Contexts array is always in the range 0 .. 31).
- numberStatistics = context.Suffix.NumberStatistics;
+ var numberStatistics = context.Suffix.NumberStatistics;
var index1 = _probabilities[context.NumberStatistics + 2] - 3;
var index2 =
((context.SummaryFrequency > 11 * (context.NumberStatistics + 1)) ? 1 : 0)
diff --git a/src/SharpCompress/Compressors/PPMd/I1/PpmState.cs b/src/SharpCompress/Compressors/PPMd/I1/PpmState.cs
index 02e79a34e..f4c5f8f7d 100644
--- a/src/SharpCompress/Compressors/PPMd/I1/PpmState.cs
+++ b/src/SharpCompress/Compressors/PPMd/I1/PpmState.cs
@@ -1,5 +1,7 @@
#nullable disable
+using System;
+
namespace SharpCompress.Compressors.PPMd.I1;
///
@@ -15,7 +17,7 @@ namespace SharpCompress.Compressors.PPMd.I1;
/// Note that is a field rather than a property for performance reasons.
///
///
-internal struct PpmState
+internal struct PpmState : IEquatable
{
public uint _address;
public byte[] _memory;
@@ -183,6 +185,8 @@ public override bool Equals(object obj)
return base.Equals(obj);
}
+ public bool Equals(PpmState other) => other._address == _address;
+
///
/// Returns the hash code for this instance.
///
diff --git a/src/SharpCompress/Compressors/RLE90/RLE.cs b/src/SharpCompress/Compressors/RLE90/RLE.cs
index 8bc8ee1ba..f16a62004 100644
--- a/src/SharpCompress/Compressors/RLE90/RLE.cs
+++ b/src/SharpCompress/Compressors/RLE90/RLE.cs
@@ -1,52 +1,49 @@
using System.Collections.Generic;
using System.Linq;
-namespace SharpCompress.Compressors.RLE90
+namespace SharpCompress.Compressors.RLE90;
+
+public static class RLE
{
- public static class RLE
+ private const byte DLE = 0x90;
+
+ ///
+ /// Unpacks an RLE compressed buffer.
+ /// Format: char DLE count, where count == 0 -> DLE
+ ///
+ public static List UnpackRLE(byte[] compressedBuffer)
{
- private const byte DLE = 0x90;
+ var result = new List(compressedBuffer.Length * 2); // Optimized initial capacity
+ var countMode = false;
+ byte last = 0;
- ///
- /// Unpacks an RLE compressed buffer.
- /// Format: DLE , where count == 0 -> DLE
- ///
- /// The compressed buffer to unpack.
- /// A list of unpacked bytes.
- public static List UnpackRLE(byte[] compressedBuffer)
+ foreach (var c in compressedBuffer)
{
- var result = new List(compressedBuffer.Length * 2); // Optimized initial capacity
- var countMode = false;
- byte last = 0;
-
- foreach (var c in compressedBuffer)
+ if (!countMode)
+ {
+ if (c == DLE)
+ {
+ countMode = true;
+ }
+ else
+ {
+ result.Add(c);
+ last = c;
+ }
+ }
+ else
{
- if (!countMode)
+ countMode = false;
+ if (c == 0)
{
- if (c == DLE)
- {
- countMode = true;
- }
- else
- {
- result.Add(c);
- last = c;
- }
+ result.Add(DLE);
}
else
{
- countMode = false;
- if (c == 0)
- {
- result.Add(DLE);
- }
- else
- {
- result.AddRange(Enumerable.Repeat(last, c - 1));
- }
+ result.AddRange(Enumerable.Repeat(last, c - 1));
}
}
- return result;
}
+ return result;
}
}
diff --git a/src/SharpCompress/Compressors/RLE90/RunLength90Stream.cs b/src/SharpCompress/Compressors/RLE90/RunLength90Stream.cs
index 09034040a..c44283d0f 100644
--- a/src/SharpCompress/Compressors/RLE90/RunLength90Stream.cs
+++ b/src/SharpCompress/Compressors/RLE90/RunLength90Stream.cs
@@ -1,96 +1,91 @@
using System;
-using System.Collections.Generic;
using System.IO;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
using SharpCompress.IO;
-namespace SharpCompress.Compressors.RLE90
+namespace SharpCompress.Compressors.RLE90;
+
+public class RunLength90Stream : Stream, IStreamStack
{
- public class RunLength90Stream : Stream, IStreamStack
- {
#if DEBUG_STREAMS
- long IStreamStack.InstanceId { get; set; }
+ long IStreamStack.InstanceId { get; set; }
#endif
- int IStreamStack.DefaultBufferSize { get; set; }
+ int IStreamStack.DefaultBufferSize { get; set; }
- Stream IStreamStack.BaseStream() => _stream;
+ Stream IStreamStack.BaseStream() => _stream;
- int IStreamStack.BufferSize
- {
- get => 0;
- set { }
- }
- int IStreamStack.BufferPosition
- {
- get => 0;
- set { }
- }
+ int IStreamStack.BufferSize
+ {
+ get => 0;
+ set { }
+ }
+ int IStreamStack.BufferPosition
+ {
+ get => 0;
+ set { }
+ }
- void IStreamStack.SetPosition(long position) { }
+ void IStreamStack.SetPosition(long position) { }
- private readonly Stream _stream;
- private const byte DLE = 0x90;
- private int _compressedSize;
- private bool _processed = false;
+ private readonly Stream _stream;
+ private const byte DLE = 0x90;
+ private int _compressedSize;
+ private bool _processed = false;
- public RunLength90Stream(Stream stream, int compressedSize)
- {
- _stream = stream;
- _compressedSize = compressedSize;
+ public RunLength90Stream(Stream stream, int compressedSize)
+ {
+ _stream = stream;
+ _compressedSize = compressedSize;
#if DEBUG_STREAMS
- this.DebugConstruct(typeof(RunLength90Stream));
+ this.DebugConstruct(typeof(RunLength90Stream));
#endif
- }
+ }
- protected override void Dispose(bool disposing)
- {
+ protected override void Dispose(bool disposing)
+ {
#if DEBUG_STREAMS
- this.DebugDispose(typeof(RunLength90Stream));
+ this.DebugDispose(typeof(RunLength90Stream));
#endif
- base.Dispose(disposing);
- }
+ base.Dispose(disposing);
+ }
- public override bool CanRead => true;
+ public override bool CanRead => true;
- public override bool CanSeek => false;
+ public override bool CanSeek => false;
- public override bool CanWrite => false;
+ public override bool CanWrite => false;
- public override long Length => throw new NotImplementedException();
+ public override long Length => throw new NotImplementedException();
- public override long Position
- {
- get => _stream.Position;
- set => throw new NotImplementedException();
- }
+ public override long Position
+ {
+ get => _stream.Position;
+ set => throw new NotImplementedException();
+ }
- public override void Flush() => throw new NotImplementedException();
+ public override void Flush() => throw new NotImplementedException();
- public override int Read(byte[] buffer, int offset, int count)
+ public override int Read(byte[] buffer, int offset, int count)
+ {
+ if (_processed)
{
- if (_processed)
- {
- return 0;
- }
- _processed = true;
+ return 0;
+ }
+ _processed = true;
- using var binaryReader = new BinaryReader(_stream);
- byte[] compressedBuffer = binaryReader.ReadBytes(_compressedSize);
+ using var binaryReader = new BinaryReader(_stream);
+ byte[] compressedBuffer = binaryReader.ReadBytes(_compressedSize);
- var unpacked = RLE.UnpackRLE(compressedBuffer);
- unpacked.CopyTo(buffer);
+ var unpacked = RLE.UnpackRLE(compressedBuffer);
+ unpacked.CopyTo(buffer);
- return unpacked.Count;
- }
+ return unpacked.Count;
+ }
- public override long Seek(long offset, SeekOrigin origin) =>
- throw new NotImplementedException();
+ public override long Seek(long offset, SeekOrigin origin) =>
+ throw new NotImplementedException();
- public override void SetLength(long value) => throw new NotImplementedException();
+ public override void SetLength(long value) => throw new NotImplementedException();
- public override void Write(byte[] buffer, int offset, int count) =>
- throw new NotImplementedException();
- }
+ public override void Write(byte[] buffer, int offset, int count) =>
+ throw new NotImplementedException();
}
diff --git a/src/SharpCompress/Compressors/Rar/RarBLAKE2spStream.cs b/src/SharpCompress/Compressors/Rar/RarBLAKE2spStream.cs
index c0aead0db..6f039ceee 100644
--- a/src/SharpCompress/Compressors/Rar/RarBLAKE2spStream.cs
+++ b/src/SharpCompress/Compressors/Rar/RarBLAKE2spStream.cs
@@ -39,7 +39,7 @@ void IStreamStack.SetPosition(long position) { }
const uint BLAKE2S_INIT_IV_SIZE = 8;
static readonly UInt32[] k_BLAKE2S_IV =
- {
+ [
0x6A09E667U,
0xBB67AE85U,
0x3C6EF372U,
@@ -48,21 +48,21 @@ void IStreamStack.SetPosition(long position) { }
0x9B05688CU,
0x1F83D9ABU,
0x5BE0CD19U,
- };
+ ];
static readonly byte[][] k_BLAKE2S_Sigma =
- {
- new byte[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 },
- new byte[] { 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 },
- new byte[] { 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 },
- new byte[] { 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 },
- new byte[] { 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 },
- new byte[] { 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 },
- new byte[] { 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 },
- new byte[] { 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 },
- new byte[] { 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 },
- new byte[] { 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 },
- };
+ [
+ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
+ [14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3],
+ [11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4],
+ [7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8],
+ [9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13],
+ [2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9],
+ [12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11],
+ [13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10],
+ [6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5],
+ [10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0],
+ ];
internal class BLAKE2S
{
@@ -101,7 +101,7 @@ public BLAKE2SP()
BLAKE2SP _blake2sp;
- byte[] _hash = { };
+ byte[] _hash = [];
public RarBLAKE2spStream(
IRarUnpack unpack,
diff --git a/src/SharpCompress/Compressors/Rar/UnpackV1/Decode/AudioVariables.cs b/src/SharpCompress/Compressors/Rar/UnpackV1/Decode/AudioVariables.cs
index ade62f559..d53d777ff 100644
--- a/src/SharpCompress/Compressors/Rar/UnpackV1/Decode/AudioVariables.cs
+++ b/src/SharpCompress/Compressors/Rar/UnpackV1/Decode/AudioVariables.cs
@@ -2,7 +2,10 @@ namespace SharpCompress.Compressors.Rar.UnpackV1.Decode;
internal class AudioVariables
{
- internal AudioVariables() => Dif = new int[11];
+ internal AudioVariables()
+ {
+ Dif = new int[11];
+ }
internal int[] Dif { get; }
internal int ByteCount { get; set; }
diff --git a/src/SharpCompress/Compressors/Rar/UnpackV1/Unpack.cs b/src/SharpCompress/Compressors/Rar/UnpackV1/Unpack.cs
index c4e6d1088..0eb69b4dc 100644
--- a/src/SharpCompress/Compressors/Rar/UnpackV1/Unpack.cs
+++ b/src/SharpCompress/Compressors/Rar/UnpackV1/Unpack.cs
@@ -18,9 +18,11 @@ internal sealed partial class Unpack : BitInput, IRarUnpack, IDisposable
private readonly BitInput Inp;
private bool disposed;
- public Unpack() =>
+ public Unpack()
+ {
// to ease in porting Unpack50.cs
Inp = this;
+ }
public void Dispose()
{
@@ -72,14 +74,14 @@ public int Char
private readonly RarVM rarVM = new();
// Filters code, one entry per filter
- private readonly List filters = new();
+ private readonly List filters = [];
// Filters stack, several entrances of same filter are possible
- private readonly List prgStack = new();
+ private readonly List prgStack = [];
// lengths of preceding blocks, one length per filter. Used to reduce size
// required to write block length if lengths are repeating
- private readonly List oldFilterLengths = new();
+ private readonly List oldFilterLengths = [];
private int lastFilter;
@@ -100,7 +102,7 @@ public int Char
private int lowDistRepCount;
private static readonly int[] DBitLengthCounts =
- {
+ [
4,
2,
2,
@@ -120,7 +122,7 @@ public int Char
14,
0,
12,
- };
+ ];
private FileHeader fileHeader;
diff --git a/src/SharpCompress/Compressors/Rar/UnpackV1/Unpack15.cs b/src/SharpCompress/Compressors/Rar/UnpackV1/Unpack15.cs
index 4c6e38943..d457cfb33 100644
--- a/src/SharpCompress/Compressors/Rar/UnpackV1/Unpack15.cs
+++ b/src/SharpCompress/Compressors/Rar/UnpackV1/Unpack15.cs
@@ -77,7 +77,7 @@ internal partial class Unpack
private const int STARTL1 = 2;
private static readonly int[] DecL1 =
- {
+ [
0x8000,
0xa000,
0xc000,
@@ -89,14 +89,14 @@ internal partial class Unpack
0xf200,
0xf200,
0xffff,
- };
+ ];
- private static readonly int[] PosL1 = { 0, 0, 0, 2, 3, 5, 7, 11, 16, 20, 24, 32, 32 };
+ private static readonly int[] PosL1 = [0, 0, 0, 2, 3, 5, 7, 11, 16, 20, 24, 32, 32];
private const int STARTL2 = 3;
private static readonly int[] DecL2 =
- {
+ [
0xa000,
0xc000,
0xd000,
@@ -107,14 +107,14 @@ internal partial class Unpack
0xf200,
0xf240,
0xffff,
- };
+ ];
- private static readonly int[] PosL2 = { 0, 0, 0, 0, 5, 7, 9, 13, 18, 22, 26, 34, 36 };
+ private static readonly int[] PosL2 = [0, 0, 0, 0, 5, 7, 9, 13, 18, 22, 26, 34, 36];
private const int STARTHF0 = 4;
private static readonly int[] DecHf0 =
- {
+ [
0x8000,
0xc000,
0xe000,
@@ -124,14 +124,14 @@ internal partial class Unpack
0xf200,
0xf200,
0xffff,
- };
+ ];
- private static readonly int[] PosHf0 = { 0, 0, 0, 0, 0, 8, 16, 24, 33, 33, 33, 33, 33 };
+ private static readonly int[] PosHf0 = [0, 0, 0, 0, 0, 8, 16, 24, 33, 33, 33, 33, 33];
private const int STARTHF1 = 5;
private static readonly int[] DecHf1 =
- {
+ [
0x2000,
0xc000,
0xe000,
@@ -140,14 +140,14 @@ internal partial class Unpack
0xf200,
0xf7e0,
0xffff,
- };
+ ];
- private static readonly int[] PosHf1 = { 0, 0, 0, 0, 0, 0, 4, 44, 60, 76, 80, 80, 127 };
+ private static readonly int[] PosHf1 = [0, 0, 0, 0, 0, 0, 4, 44, 60, 76, 80, 80, 127];
private const int STARTHF2 = 5;
private static readonly int[] DecHf2 =
- {
+ [
0x1000,
0x2400,
0x8000,
@@ -156,35 +156,26 @@ internal partial class Unpack
0xffff,
0xffff,
0xffff,
- };
+ ];
- private static readonly int[] PosHf2 = { 0, 0, 0, 0, 0, 0, 2, 7, 53, 117, 233, 0, 0 };
+ private static readonly int[] PosHf2 = [0, 0, 0, 0, 0, 0, 2, 7, 53, 117, 233, 0, 0];
private const int STARTHF3 = 6;
- private static readonly int[] DecHf3 =
- {
- 0x800,
- 0x2400,
- 0xee00,
- 0xfe80,
- 0xffff,
- 0xffff,
- 0xffff,
- };
+ private static readonly int[] DecHf3 = [0x800, 0x2400, 0xee00, 0xfe80, 0xffff, 0xffff, 0xffff];
- private static readonly int[] PosHf3 = { 0, 0, 0, 0, 0, 0, 0, 2, 16, 218, 251, 0, 0 };
+ private static readonly int[] PosHf3 = [0, 0, 0, 0, 0, 0, 0, 2, 16, 218, 251, 0, 0];
private const int STARTHF4 = 8;
- private static readonly int[] DecHf4 = { 0xff00, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff };
+ private static readonly int[] DecHf4 = [0xff00, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff];
- private static readonly int[] PosHf4 = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 0, 0 };
+ private static readonly int[] PosHf4 = [0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 0, 0];
- private static readonly int[] ShortLen1 = { 1, 3, 4, 4, 5, 6, 7, 8, 8, 4, 4, 5, 6, 6, 4, 0 };
+ private static readonly int[] ShortLen1 = [1, 3, 4, 4, 5, 6, 7, 8, 8, 4, 4, 5, 6, 6, 4, 0];
private static readonly int[] ShortXor1 =
- {
+ [
0,
0xa0,
0xd0,
@@ -200,12 +191,12 @@ internal partial class Unpack
0x98,
0x9c,
0xb0,
- };
+ ];
- private static readonly int[] ShortLen2 = { 2, 3, 3, 3, 4, 4, 5, 6, 6, 4, 4, 5, 6, 6, 4, 0 };
+ private static readonly int[] ShortLen2 = [2, 3, 3, 3, 4, 4, 5, 6, 6, 4, 4, 5, 6, 6, 4, 0];
private static readonly int[] ShortXor2 =
- {
+ [
0,
0x40,
0x60,
@@ -221,7 +212,7 @@ internal partial class Unpack
0x98,
0x9c,
0xb0,
- };
+ ];
private void unpack15(bool solid)
{
diff --git a/src/SharpCompress/Compressors/Rar/UnpackV1/Unpack20.cs b/src/SharpCompress/Compressors/Rar/UnpackV1/Unpack20.cs
index 69a266bb6..d9bbb3193 100644
--- a/src/SharpCompress/Compressors/Rar/UnpackV1/Unpack20.cs
+++ b/src/SharpCompress/Compressors/Rar/UnpackV1/Unpack20.cs
@@ -14,13 +14,13 @@ namespace SharpCompress.Compressors.Rar.UnpackV1;
internal partial class Unpack
{
- private readonly MultDecode[] MD = new[]
- {
+ private readonly MultDecode[] MD =
+ [
new MultDecode(),
new MultDecode(),
new MultDecode(),
new MultDecode(),
- };
+ ];
private readonly byte[] UnpOldTable20 = new byte[PackDef.MC20 * 4];
@@ -42,7 +42,7 @@ internal partial class Unpack
private readonly BitDecode BD = new();
private static readonly int[] LDecode =
- {
+ [
0,
1,
2,
@@ -71,43 +71,13 @@ internal partial class Unpack
160,
192,
224,
- };
+ ];
private static ReadOnlySpan LBits =>
- new byte[]
- {
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 1,
- 1,
- 1,
- 1,
- 2,
- 2,
- 2,
- 2,
- 3,
- 3,
- 3,
- 3,
- 4,
- 4,
- 4,
- 4,
- 5,
- 5,
- 5,
- 5,
- };
+ [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5];
private static readonly int[] DDecode =
- {
+ [
0,
1,
2,
@@ -156,10 +126,10 @@ internal partial class Unpack
851968,
917504,
983040,
- };
+ ];
private static readonly int[] DBits =
- {
+ [
0,
0,
0,
@@ -208,11 +178,11 @@ internal partial class Unpack
16,
16,
16,
- };
+ ];
- private static readonly int[] SDDecode = { 0, 4, 8, 16, 32, 64, 128, 192 };
+ private static readonly int[] SDDecode = [0, 4, 8, 16, 32, 64, 128, 192];
- private static readonly int[] SDBits = { 2, 2, 3, 4, 5, 6, 6, 6 };
+ private static readonly int[] SDBits = [2, 2, 3, 4, 5, 6, 6, 6];
private void unpack20(bool solid)
{
diff --git a/src/SharpCompress/Compressors/Rar/UnpackV1/UnpackFilter.cs b/src/SharpCompress/Compressors/Rar/UnpackV1/UnpackFilter.cs
index 5359ab242..602cd0ae7 100644
--- a/src/SharpCompress/Compressors/Rar/UnpackV1/UnpackFilter.cs
+++ b/src/SharpCompress/Compressors/Rar/UnpackV1/UnpackFilter.cs
@@ -7,7 +7,10 @@ internal class UnpackFilter
public byte Type;
public byte Channels;
- internal UnpackFilter() => Program = new VMPreparedProgram();
+ internal UnpackFilter()
+ {
+ Program = new VMPreparedProgram();
+ }
// TODO uint
internal uint uBlockStart
diff --git a/src/SharpCompress/Compressors/Rar/UnpackV2017/FragmentedWindow.unpack50frag_cpp.cs b/src/SharpCompress/Compressors/Rar/UnpackV2017/FragmentedWindow.unpack50frag_cpp.cs
index a9a6f280f..502a6d1f5 100644
--- a/src/SharpCompress/Compressors/Rar/UnpackV2017/FragmentedWindow.unpack50frag_cpp.cs
+++ b/src/SharpCompress/Compressors/Rar/UnpackV2017/FragmentedWindow.unpack50frag_cpp.cs
@@ -56,11 +56,6 @@ public void Init(size_t WinSize)
while (Size >= MinSize)
{
NewMem = new byte[Size];
- if (NewMem != null)
- {
- break;
- }
-
Size -= Size / 32;
}
diff --git a/src/SharpCompress/Compressors/Rar/UnpackV2017/Unpack.unpack15_cpp.cs b/src/SharpCompress/Compressors/Rar/UnpackV2017/Unpack.unpack15_cpp.cs
index c1886df84..aecc172fa 100644
--- a/src/SharpCompress/Compressors/Rar/UnpackV2017/Unpack.unpack15_cpp.cs
+++ b/src/SharpCompress/Compressors/Rar/UnpackV2017/Unpack.unpack15_cpp.cs
@@ -8,7 +8,7 @@ internal partial class Unpack
private const int STARTL1 = 2;
private static readonly uint[] DecL1 =
- {
+ [
0x8000,
0xa000,
0xc000,
@@ -20,14 +20,14 @@ internal partial class Unpack
0xf200,
0xf200,
0xffff,
- };
+ ];
- private static readonly uint[] PosL1 = { 0, 0, 0, 2, 3, 5, 7, 11, 16, 20, 24, 32, 32 };
+ private static readonly uint[] PosL1 = [0, 0, 0, 2, 3, 5, 7, 11, 16, 20, 24, 32, 32];
private const int STARTL2 = 3;
private static readonly uint[] DecL2 =
- {
+ [
0xa000,
0xc000,
0xd000,
@@ -38,14 +38,14 @@ internal partial class Unpack
0xf200,
0xf240,
0xffff,
- };
+ ];
- private static readonly uint[] PosL2 = { 0, 0, 0, 0, 5, 7, 9, 13, 18, 22, 26, 34, 36 };
+ private static readonly uint[] PosL2 = [0, 0, 0, 0, 5, 7, 9, 13, 18, 22, 26, 34, 36];
private const int STARTHF0 = 4;
private static readonly uint[] DecHf0 =
- {
+ [
0x8000,
0xc000,
0xe000,
@@ -55,14 +55,14 @@ internal partial class Unpack
0xf200,
0xf200,
0xffff,
- };
+ ];
- private static readonly uint[] PosHf0 = { 0, 0, 0, 0, 0, 8, 16, 24, 33, 33, 33, 33, 33 };
+ private static readonly uint[] PosHf0 = [0, 0, 0, 0, 0, 8, 16, 24, 33, 33, 33, 33, 33];
private const int STARTHF1 = 5;
private static readonly uint[] DecHf1 =
- {
+ [
0x2000,
0xc000,
0xe000,
@@ -71,14 +71,14 @@ internal partial class Unpack
0xf200,
0xf7e0,
0xffff,
- };
+ ];
- private static readonly uint[] PosHf1 = { 0, 0, 0, 0, 0, 0, 4, 44, 60, 76, 80, 80, 127 };
+ private static readonly uint[] PosHf1 = [0, 0, 0, 0, 0, 0, 4, 44, 60, 76, 80, 80, 127];
private const int STARTHF2 = 5;
private static readonly uint[] DecHf2 =
- {
+ [
0x1000,
0x2400,
0x8000,
@@ -87,28 +87,19 @@ internal partial class Unpack
0xffff,
0xffff,
0xffff,
- };
+ ];
- private static readonly uint[] PosHf2 = { 0, 0, 0, 0, 0, 0, 2, 7, 53, 117, 233, 0, 0 };
+ private static readonly uint[] PosHf2 = [0, 0, 0, 0, 0, 0, 2, 7, 53, 117, 233, 0, 0];
private const int STARTHF3 = 6;
- private static readonly uint[] DecHf3 =
- {
- 0x800,
- 0x2400,
- 0xee00,
- 0xfe80,
- 0xffff,
- 0xffff,
- 0xffff,
- };
+ private static readonly uint[] DecHf3 = [0x800, 0x2400, 0xee00, 0xfe80, 0xffff, 0xffff, 0xffff];
- private static readonly uint[] PosHf3 = { 0, 0, 0, 0, 0, 0, 0, 2, 16, 218, 251, 0, 0 };
+ private static readonly uint[] PosHf3 = [0, 0, 0, 0, 0, 0, 0, 2, 16, 218, 251, 0, 0];
private const int STARTHF4 = 8;
- private static readonly uint[] DecHf4 = { 0xff00, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff };
- private static readonly uint[] PosHf4 = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 0, 0 };
+ private static readonly uint[] DecHf4 = [0xff00, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff];
+ private static readonly uint[] PosHf4 = [0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 0, 0, 0];
private void Unpack15(bool Solid)
{
@@ -208,27 +199,9 @@ private void Unpack15(bool Solid)
internal static class Unpack15Local
{
- public static readonly uint[] ShortLen1 =
- {
- 1,
- 3,
- 4,
- 4,
- 5,
- 6,
- 7,
- 8,
- 8,
- 4,
- 4,
- 5,
- 6,
- 6,
- 4,
- 0,
- };
+ public static readonly uint[] ShortLen1 = [1, 3, 4, 4, 5, 6, 7, 8, 8, 4, 4, 5, 6, 6, 4, 0];
public static readonly uint[] ShortXor1 =
- {
+ [
0,
0xa0,
0xd0,
@@ -244,28 +217,10 @@ internal static class Unpack15Local
0x98,
0x9c,
0xb0,
- };
- public static readonly uint[] ShortLen2 =
- {
- 2,
- 3,
- 3,
- 3,
- 4,
- 4,
- 5,
- 6,
- 6,
- 4,
- 4,
- 5,
- 6,
- 6,
- 4,
- 0,
- };
+ ];
+ public static readonly uint[] ShortLen2 = [2, 3, 3, 3, 4, 4, 5, 6, 6, 4, 4, 5, 6, 6, 4, 0];
public static readonly uint[] ShortXor2 =
- {
+ [
0,
0x40,
0x60,
@@ -281,7 +236,7 @@ internal static class Unpack15Local
0x98,
0x9c,
0xb0,
- };
+ ];
}
private void ShortLZ()
diff --git a/src/SharpCompress/Compressors/Rar/UnpackV2017/Unpack.unpack20_cpp.cs b/src/SharpCompress/Compressors/Rar/UnpackV2017/Unpack.unpack20_cpp.cs
index c5970d518..5f7ca20ba 100644
--- a/src/SharpCompress/Compressors/Rar/UnpackV2017/Unpack.unpack20_cpp.cs
+++ b/src/SharpCompress/Compressors/Rar/UnpackV2017/Unpack.unpack20_cpp.cs
@@ -24,7 +24,7 @@ private void CopyString20(uint Length, uint Distance)
internal static class Unpack20Local
{
public static readonly byte[] LDecode =
- {
+ [
0,
1,
2,
@@ -53,9 +53,9 @@ internal static class Unpack20Local
160,
192,
224,
- };
+ ];
public static readonly byte[] LBits =
- {
+ [
0,
0,
0,
@@ -84,9 +84,9 @@ internal static class Unpack20Local
5,
5,
5,
- };
+ ];
public static readonly uint[] DDecode =
- {
+ [
0,
1,
2,
@@ -135,9 +135,9 @@ internal static class Unpack20Local
851968,
917504,
983040,
- };
+ ];
public static readonly byte[] DBits =
- {
+ [
0,
0,
0,
@@ -186,9 +186,9 @@ internal static class Unpack20Local
16,
16,
16,
- };
- public static readonly byte[] SDDecode = { 0, 4, 8, 16, 32, 64, 128, 192 };
- public static readonly byte[] SDBits = { 2, 2, 3, 4, 5, 6, 6, 6 };
+ ];
+ public static readonly byte[] SDDecode = [0, 4, 8, 16, 32, 64, 128, 192];
+ public static readonly byte[] SDBits = [2, 2, 3, 4, 5, 6, 6, 6];
}
private void Unpack20(bool Solid)
diff --git a/src/SharpCompress/Compressors/Rar/UnpackV2017/unpack_hpp.cs b/src/SharpCompress/Compressors/Rar/UnpackV2017/unpack_hpp.cs
index 9f57af38f..5f9cf76fc 100644
--- a/src/SharpCompress/Compressors/Rar/UnpackV2017/unpack_hpp.cs
+++ b/src/SharpCompress/Compressors/Rar/UnpackV2017/unpack_hpp.cs
@@ -1,4 +1,3 @@
-using System;
using System.Collections.Generic;
using static SharpCompress.Compressors.Rar.UnpackV2017.PackDef;
using static SharpCompress.Compressors.Rar.UnpackV2017.UnpackGlobal;
@@ -264,11 +263,11 @@ internal partial class Unpack
byte *ReadBufMT;
#endif*/
- private byte[] FilterSrcMemory = Array.Empty();
- private byte[] FilterDstMemory = Array.Empty();
+ private byte[] FilterSrcMemory = [];
+ private byte[] FilterDstMemory = [];
// Filters code, one entry per filter.
- private readonly List Filters = new();
+ private readonly List Filters = [];
private readonly uint[] OldDist = new uint[4];
private uint OldDistPtr;
@@ -395,15 +394,15 @@ internal partial class Unpack
private readonly BitInput VMCodeInp = new(true);
// Filters code, one entry per filter.
- private readonly List Filters30 = new();
+ private readonly List Filters30 = [];
// Filters stack, several entrances of same filter are possible.
- private readonly List PrgStack = new();
+ private readonly List PrgStack = [];
// Lengths of preceding data blocks, one length of one last block
// for every filter. Used to reduce the size required to write
// the data block length if lengths are repeating.
- private readonly List OldFilterLengths = new();
+ private readonly List OldFilterLengths = [];
/*#if RarV2017_RAR_SMP
// More than 8 threads are unlikely to provide a noticeable gain
diff --git a/src/SharpCompress/Compressors/Rar/VM/BitInput.cs b/src/SharpCompress/Compressors/Rar/VM/BitInput.cs
index fe349fe74..90dad278e 100644
--- a/src/SharpCompress/Compressors/Rar/VM/BitInput.cs
+++ b/src/SharpCompress/Compressors/Rar/VM/BitInput.cs
@@ -22,7 +22,10 @@ public int InBit
public bool ExternalBuffer;
///
- internal BitInput() => InBuf = new byte[MAX_SIZE];
+ internal BitInput()
+ {
+ InBuf = new byte[MAX_SIZE];
+ }
internal byte[] InBuf { get; }
diff --git a/src/SharpCompress/Compressors/Rar/VM/RarVM.cs b/src/SharpCompress/Compressors/Rar/VM/RarVM.cs
index f1ef38345..b5c205fda 100644
--- a/src/SharpCompress/Compressors/Rar/VM/RarVM.cs
+++ b/src/SharpCompress/Compressors/Rar/VM/RarVM.cs
@@ -40,9 +40,11 @@ internal sealed class RarVM : BitInput
private int IP;
- internal RarVM() =>
+ internal RarVM()
+ {
//InitBlock();
Mem = null;
+ }
internal void init() => Mem ??= new byte[VM_MEMSIZE + 4];
@@ -1108,7 +1110,7 @@ internal static int ReadData(BitInput rarVM)
private VMStandardFilters IsStandardFilter(ReadOnlySpan code, int codeSize)
{
VMStandardFilterSignature[] stdList =
- {
+ [
new(53, 0xad576887, VMStandardFilters.VMSF_E8),
new(57, 0x3cd7e57e, VMStandardFilters.VMSF_E8E9),
new(120, 0x3769893f, VMStandardFilters.VMSF_ITANIUM),
@@ -1116,7 +1118,7 @@ private VMStandardFilters IsStandardFilter(ReadOnlySpan code, int codeSize
new(149, 0x1c2c5dc8, VMStandardFilters.VMSF_RGB),
new(216, 0xbc85e701, VMStandardFilters.VMSF_AUDIO),
new(40, 0x46b9c560, VMStandardFilters.VMSF_UPCASE),
- };
+ ];
var CodeCRC = RarCRC.CheckCrc(0xffffffff, code, 0, code.Length) ^ 0xffffffff;
for (var i = 0; i < stdList.Length; i++)
{
@@ -1199,7 +1201,7 @@ private void ExecuteStandardFilter(VMStandardFilters filterType)
var curPos = 0;
//UPGRADE_NOTE: Final was removed from the declaration of 'Masks '. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1003'"
- byte[] Masks = { 4, 4, 6, 6, 0, 0, 7, 7, 4, 4, 0, 0, 4, 4, 0, 0 };
+ byte[] Masks = [4, 4, 6, 6, 0, 0, 7, 7, 4, 4, 0, 0, 4, 4, 0, 0];
fileOffset = Utility.URShift(fileOffset, 4);
while (curPos < dataSize - 21)
diff --git a/src/SharpCompress/Compressors/Rar/VM/VMCmdFlags.cs b/src/SharpCompress/Compressors/Rar/VM/VMCmdFlags.cs
index 9dbafe1bc..d44aef789 100644
--- a/src/SharpCompress/Compressors/Rar/VM/VMCmdFlags.cs
+++ b/src/SharpCompress/Compressors/Rar/VM/VMCmdFlags.cs
@@ -13,7 +13,7 @@ internal class VMCmdFlags
public const byte VMCF_CHFLAGS = 64;
public static byte[] VM_CmdFlags =
- {
+ [
VMCF_OP2 | VMCF_BYTEMODE,
VMCF_OP2 | VMCF_BYTEMODE | VMCF_CHFLAGS,
VMCF_OP2 | VMCF_BYTEMODE | VMCF_CHFLAGS,
@@ -54,5 +54,5 @@ internal class VMCmdFlags
VMCF_OP2 | VMCF_BYTEMODE | VMCF_USEFLAGS | VMCF_CHFLAGS,
VMCF_OP2 | VMCF_BYTEMODE | VMCF_USEFLAGS | VMCF_CHFLAGS,
VMCF_OP0,
- };
+ ];
}
diff --git a/src/SharpCompress/Compressors/Rar/VM/VMPreparedProgram.cs b/src/SharpCompress/Compressors/Rar/VM/VMPreparedProgram.cs
index f1f9e0aaf..4552a5aa1 100644
--- a/src/SharpCompress/Compressors/Rar/VM/VMPreparedProgram.cs
+++ b/src/SharpCompress/Compressors/Rar/VM/VMPreparedProgram.cs
@@ -4,13 +4,13 @@ namespace SharpCompress.Compressors.Rar.VM;
internal class VMPreparedProgram
{
- internal List Commands = new();
- internal List AltCommands = new();
+ internal List Commands = [];
+ internal List AltCommands = [];
public int CommandCount { get; set; }
- internal List GlobalData = new();
- internal List StaticData = new();
+ internal List GlobalData = [];
+ internal List StaticData = [];
// static data contained in DB operators
internal int[] InitR = new int[7];
diff --git a/src/SharpCompress/Compressors/Reduce/ReduceStream.cs b/src/SharpCompress/Compressors/Reduce/ReduceStream.cs
index 8ea0cd3d2..5fcbbefad 100644
--- a/src/SharpCompress/Compressors/Reduce/ReduceStream.cs
+++ b/src/SharpCompress/Compressors/Reduce/ReduceStream.cs
@@ -80,25 +80,15 @@ protected override void Dispose(bool disposing)
base.Dispose(disposing);
}
- public override void Flush()
- {
- throw new NotImplementedException();
- }
+ public override void Flush() => throw new NotImplementedException();
- public override long Seek(long offset, SeekOrigin origin)
- {
+ public override long Seek(long offset, SeekOrigin origin) =>
throw new NotImplementedException();
- }
- public override void SetLength(long value)
- {
- throw new NotImplementedException();
- }
+ public override void SetLength(long value) => throw new NotImplementedException();
- public override void Write(byte[] buffer, int offset, int count)
- {
+ public override void Write(byte[] buffer, int offset, int count) =>
throw new NotImplementedException();
- }
public override bool CanRead => true;
public override bool CanSeek => false;
@@ -113,8 +103,8 @@ public override long Position
private const int RunLengthCode = 144;
private const int WSIZE = 0x4000;
- private readonly uint[] mask_bits = new uint[]
- {
+ private readonly uint[] mask_bits =
+ [
0x0000,
0x0001,
0x0003,
@@ -132,7 +122,7 @@ public override long Position
0x3fff,
0x7fff,
0xffff,
- };
+ ];
private int bitBufferCount;
private ulong bitBuffer;
@@ -140,7 +130,10 @@ public override long Position
private int NEXTBYTE()
{
if (inByteCount == compressedSize)
+ {
return EOF;
+ }
+
inByteCount++;
return inStream.ReadByte();
}
@@ -150,13 +143,13 @@ private void READBITS(int nbits, out byte zdest)
if (nbits > bitBufferCount)
{
int temp;
- while (bitBufferCount <= 8 * (int)(4 - 1) && (temp = NEXTBYTE()) != EOF)
+ while (bitBufferCount <= 8 * (4 - 1) && (temp = NEXTBYTE()) != EOF)
{
bitBuffer |= (ulong)temp << bitBufferCount;
bitBufferCount += 8;
}
}
- zdest = (byte)(bitBuffer & (ulong)mask_bits[nbits]);
+ zdest = (byte)(bitBuffer & mask_bits[nbits]);
bitBuffer >>= nbits;
bitBufferCount -= nbits;
}
@@ -165,7 +158,7 @@ private void READBITS(int nbits, out byte zdest)
private void LoadBitLengthTable()
{
- byte[] bitPos = { 0, 2, 4, 8, 16, 32, 64, 128, 255 };
+ byte[] bitPos = [0, 2, 4, 8, 16, 32, 64, 128, 255];
bitCountTable = new byte[256];
for (byte i = 1; i <= 8; i++)
@@ -229,7 +222,9 @@ public override int Read(byte[] buffer, int offset, int count)
windowsBuffer[windowIndex++] = nextByte;
outBytesCount++;
if (windowIndex == WSIZE)
+ {
windowIndex = 0;
+ }
continue;
}
@@ -241,7 +236,9 @@ public override int Read(byte[] buffer, int offset, int count)
windowsBuffer[windowIndex++] = RunLengthCode;
outBytesCount++;
if (windowIndex == WSIZE)
+ {
windowIndex = 0;
+ }
continue;
}
@@ -268,10 +265,14 @@ public override int Read(byte[] buffer, int offset, int count)
outBytesCount++;
if (distance == WSIZE)
+ {
distance = 0;
+ }
if (windowIndex == WSIZE)
+ {
windowIndex = 0;
+ }
length--;
}
diff --git a/src/SharpCompress/Compressors/Shrink/BitStream.cs b/src/SharpCompress/Compressors/Shrink/BitStream.cs
index 8bb69ead7..e999891e8 100644
--- a/src/SharpCompress/Compressors/Shrink/BitStream.cs
+++ b/src/SharpCompress/Compressors/Shrink/BitStream.cs
@@ -1,79 +1,78 @@
-namespace SharpCompress.Compressors.Shrink
+namespace SharpCompress.Compressors.Shrink;
+
+internal class BitStream
{
- internal class BitStream
- {
- private byte[] _src;
- private int _srcLen;
- private int _byteIdx;
- private int _bitIdx;
- private int _bitsLeft;
- private ulong _bitBuffer;
- private static uint[] _maskBits = new uint[17]
- {
- 0U,
- 1U,
- 3U,
- 7U,
- 15U,
- 31U,
- 63U,
- (uint)sbyte.MaxValue,
- (uint)byte.MaxValue,
- 511U,
- 1023U,
- 2047U,
- 4095U,
- 8191U,
- 16383U,
- (uint)short.MaxValue,
- (uint)ushort.MaxValue,
- };
+ private byte[] _src;
+ private int _srcLen;
+ private int _byteIdx;
+ private int _bitIdx;
+ private int _bitsLeft;
+ private ulong _bitBuffer;
+ private static uint[] _maskBits =
+ [
+ 0U,
+ 1U,
+ 3U,
+ 7U,
+ 15U,
+ 31U,
+ 63U,
+ (uint)sbyte.MaxValue,
+ byte.MaxValue,
+ 511U,
+ 1023U,
+ 2047U,
+ 4095U,
+ 8191U,
+ 16383U,
+ (uint)short.MaxValue,
+ ushort.MaxValue,
+ ];
- public BitStream(byte[] src, int srcLen)
- {
- _src = src;
- _srcLen = srcLen;
- _byteIdx = 0;
- _bitIdx = 0;
- }
+ public BitStream(byte[] src, int srcLen)
+ {
+ _src = src;
+ _srcLen = srcLen;
+ _byteIdx = 0;
+ _bitIdx = 0;
+ }
- public int BytesRead => (_byteIdx << 3) + _bitIdx;
+ public int BytesRead => (_byteIdx << 3) + _bitIdx;
- private int NextByte()
+ private int NextByte()
+ {
+ if (_byteIdx >= _srcLen)
{
- if (_byteIdx >= _srcLen)
- {
- return 0;
- }
-
- return _src[_byteIdx++];
+ return 0;
}
- public int NextBits(int nbits)
+ return _src[_byteIdx++];
+ }
+
+ public int NextBits(int nbits)
+ {
+ var result = 0;
+ if (nbits > _bitsLeft)
{
- var result = 0;
- if (nbits > _bitsLeft)
+ int num;
+ while (_bitsLeft <= 24 && (num = NextByte()) != 1234)
{
- int num;
- while (_bitsLeft <= 24 && (num = NextByte()) != 1234)
- {
- _bitBuffer |= (ulong)num << _bitsLeft;
- _bitsLeft += 8;
- }
+ _bitBuffer |= (ulong)num << _bitsLeft;
+ _bitsLeft += 8;
}
- result = (int)((long)_bitBuffer & (long)_maskBits[nbits]);
- _bitBuffer >>= nbits;
- _bitsLeft -= nbits;
- return result;
}
+ result = (int)((long)_bitBuffer & _maskBits[nbits]);
+ _bitBuffer >>= nbits;
+ _bitsLeft -= nbits;
+ return result;
+ }
- public bool Advance(int count)
+ public bool Advance(int count)
+ {
+ if (_byteIdx > _srcLen)
{
- if (_byteIdx > _srcLen)
- {
- return false;
- }
- return true;
+ return false;
}
+ return true;
}
}
diff --git a/src/SharpCompress/Compressors/Shrink/HwUnshrink.cs b/src/SharpCompress/Compressors/Shrink/HwUnshrink.cs
index 4506ccb16..8c9c1711d 100644
--- a/src/SharpCompress/Compressors/Shrink/HwUnshrink.cs
+++ b/src/SharpCompress/Compressors/Shrink/HwUnshrink.cs
@@ -1,275 +1,297 @@
using System;
-namespace SharpCompress.Compressors.Shrink
+namespace SharpCompress.Compressors.Shrink;
+
+public static class HwUnshrink
{
- public class HwUnshrink
- {
- private const int MIN_CODE_SIZE = 9;
- private const int MAX_CODE_SIZE = 13;
+ private const int MIN_CODE_SIZE = 9;
+ private const int MAX_CODE_SIZE = 13;
+
+ private const ushort MAX_CODE = (ushort)((1U << MAX_CODE_SIZE) - 1);
+ private const ushort INVALID_CODE = ushort.MaxValue;
+ private const ushort CONTROL_CODE = 256;
+ private const ushort INC_CODE_SIZE = 1;
+ private const ushort PARTIAL_CLEAR = 2;
- private const ushort MAX_CODE = (ushort)((1U << MAX_CODE_SIZE) - 1);
- private const ushort INVALID_CODE = ushort.MaxValue;
- private const ushort CONTROL_CODE = 256;
- private const ushort INC_CODE_SIZE = 1;
- private const ushort PARTIAL_CLEAR = 2;
+ private const int HASH_BITS = MAX_CODE_SIZE + 1; // For a load factor of 0.5.
+ private const int HASHTAB_SIZE = 1 << HASH_BITS;
+ private const ushort UNKNOWN_LEN = ushort.MaxValue;
- private const int HASH_BITS = MAX_CODE_SIZE + 1; // For a load factor of 0.5.
- private const int HASHTAB_SIZE = 1 << HASH_BITS;
- private const ushort UNKNOWN_LEN = ushort.MaxValue;
+ private struct CodeTabEntry
+ {
+ public int prefixCode; // INVALID_CODE means the entry is invalid.
+ public byte extByte;
+ public ushort len;
+ public int lastDstPos;
+ }
- private struct CodeTabEntry
+ private static void CodeTabInit(CodeTabEntry[] codeTab)
+ {
+ for (var i = 0; i <= byte.MaxValue; i++)
{
- public int prefixCode; // INVALID_CODE means the entry is invalid.
- public byte extByte;
- public ushort len;
- public int lastDstPos;
+ codeTab[i].prefixCode = (ushort)i;
+ codeTab[i].extByte = (byte)i;
+ codeTab[i].len = 1;
}
- private static void CodeTabInit(CodeTabEntry[] codeTab)
+ for (var i = byte.MaxValue + 1; i <= MAX_CODE; i++)
{
- for (var i = 0; i <= byte.MaxValue; i++)
- {
- codeTab[i].prefixCode = (ushort)i;
- codeTab[i].extByte = (byte)i;
- codeTab[i].len = 1;
- }
-
- for (var i = byte.MaxValue + 1; i <= MAX_CODE; i++)
- {
- codeTab[i].prefixCode = INVALID_CODE;
- }
+ codeTab[i].prefixCode = INVALID_CODE;
}
+ }
- private static void UnshrinkPartialClear(CodeTabEntry[] codeTab, ref CodeQueue queue)
- {
- var isPrefix = new bool[MAX_CODE + 1];
- int codeQueueSize;
+ private static void UnshrinkPartialClear(CodeTabEntry[] codeTab, ref CodeQueue queue)
+ {
+ var isPrefix = new bool[MAX_CODE + 1];
+ int codeQueueSize;
- // Scan for codes that have been used as a prefix.
- for (var i = CONTROL_CODE + 1; i <= MAX_CODE; i++)
+ // Scan for codes that have been used as a prefix.
+ for (var i = CONTROL_CODE + 1; i <= MAX_CODE; i++)
+ {
+ if (codeTab[i].prefixCode != INVALID_CODE)
{
- if (codeTab[i].prefixCode != INVALID_CODE)
- {
- isPrefix[codeTab[i].prefixCode] = true;
- }
+ isPrefix[codeTab[i].prefixCode] = true;
}
-
- // Clear "non-prefix" codes in the table; populate the code queue.
- codeQueueSize = 0;
- for (var i = CONTROL_CODE + 1; i <= MAX_CODE; i++)
- {
- if (!isPrefix[i])
- {
- codeTab[i].prefixCode = INVALID_CODE;
- queue.codes[codeQueueSize++] = (ushort)i;
- }
- }
-
- queue.codes[codeQueueSize] = INVALID_CODE; // End-of-queue marker.
- queue.nextIdx = 0;
}
- private static bool ReadCode(
- BitStream stream,
- ref int codeSize,
- CodeTabEntry[] codeTab,
- ref CodeQueue queue,
- out int nextCode
- )
+ // Clear "non-prefix" codes in the table; populate the code queue.
+ codeQueueSize = 0;
+ for (var i = CONTROL_CODE + 1; i <= MAX_CODE; i++)
{
- int code,
- controlCode;
-
- code = (int)stream.NextBits(codeSize);
- if (!stream.Advance(codeSize))
+ if (!isPrefix[i])
{
- nextCode = INVALID_CODE;
- return false;
+ codeTab[i].prefixCode = INVALID_CODE;
+ queue.codes[codeQueueSize++] = (ushort)i;
}
+ }
- // Handle regular codes (the common case).
- if (code != CONTROL_CODE)
- {
- nextCode = code;
- return true;
- }
+ queue.codes[codeQueueSize] = INVALID_CODE; // End-of-queue marker.
+ queue.nextIdx = 0;
+ }
- // Handle control codes.
- controlCode = (ushort)stream.NextBits(codeSize);
- if (!stream.Advance(codeSize))
- {
- nextCode = INVALID_CODE;
- return true;
- }
+ private static bool ReadCode(
+ BitStream stream,
+ ref int codeSize,
+ CodeTabEntry[] codeTab,
+ ref CodeQueue queue,
+ out int nextCode
+ )
+ {
+ int code,
+ controlCode;
- if (controlCode == INC_CODE_SIZE && codeSize < MAX_CODE_SIZE)
- {
- codeSize++;
- return ReadCode(stream, ref codeSize, codeTab, ref queue, out nextCode);
- }
+ code = stream.NextBits(codeSize);
+ if (!stream.Advance(codeSize))
+ {
+ nextCode = INVALID_CODE;
+ return false;
+ }
- if (controlCode == PARTIAL_CLEAR)
- {
- UnshrinkPartialClear(codeTab, ref queue);
- return ReadCode(stream, ref codeSize, codeTab, ref queue, out nextCode);
- }
+ // Handle regular codes (the common case).
+ if (code != CONTROL_CODE)
+ {
+ nextCode = code;
+ return true;
+ }
+ // Handle control codes.
+ controlCode = (ushort)stream.NextBits(codeSize);
+ if (!stream.Advance(codeSize))
+ {
nextCode = INVALID_CODE;
return true;
}
- private static void CopyFromPrevPos(byte[] dst, int prevPos, int dstPos, int len)
+ if (controlCode == INC_CODE_SIZE && codeSize < MAX_CODE_SIZE)
{
- if (dstPos + len > dst.Length)
- {
- // Not enough room in dst for the sloppy copy below.
- Array.Copy(dst, prevPos, dst, dstPos, len);
- return;
- }
+ codeSize++;
+ return ReadCode(stream, ref codeSize, codeTab, ref queue, out nextCode);
+ }
- if (prevPos + len > dstPos)
- {
- // Benign one-byte overlap possible in the KwKwK case.
- //assert(prevPos + len == dstPos + 1);
- //assert(dst[prevPos] == dst[prevPos + len - 1]);
- }
+ if (controlCode == PARTIAL_CLEAR)
+ {
+ UnshrinkPartialClear(codeTab, ref queue);
+ return ReadCode(stream, ref codeSize, codeTab, ref queue, out nextCode);
+ }
- Buffer.BlockCopy(dst, prevPos, dst, dstPos, len);
+ nextCode = INVALID_CODE;
+ return true;
+ }
+
+ private static void CopyFromPrevPos(byte[] dst, int prevPos, int dstPos, int len)
+ {
+ if (dstPos + len > dst.Length)
+ {
+ // Not enough room in dst for the sloppy copy below.
+ Array.Copy(dst, prevPos, dst, dstPos, len);
+ return;
}
- private static UnshrnkStatus OutputCode(
- int code,
- byte[] dst,
- int dstPos,
- int dstCap,
- int prevCode,
- CodeTabEntry[] codeTab,
- ref CodeQueue queue,
- out byte firstByte,
- out int len
- )
+ if (prevPos + len > dstPos)
+ {
+ // Benign one-byte overlap possible in the KwKwK case.
+ //assert(prevPos + len == dstPos + 1);
+ //assert(dst[prevPos] == dst[prevPos + len - 1]);
+ }
+
+ Buffer.BlockCopy(dst, prevPos, dst, dstPos, len);
+ }
+
+ private static UnshrnkStatus OutputCode(
+ int code,
+ byte[] dst,
+ int dstPos,
+ int dstCap,
+ int prevCode,
+ CodeTabEntry[] codeTab,
+ ref CodeQueue queue,
+ out byte firstByte,
+ out int len
+ )
+ {
+ int prefixCode;
+
+ //assert(code <= MAX_CODE && code != CONTROL_CODE);
+ //assert(dstPos < dstCap);
+ firstByte = 0;
+ if (code <= byte.MaxValue)
{
- int prefixCode;
+ // Output literal byte.
+ firstByte = (byte)code;
+ len = 1;
+ dst[dstPos] = (byte)code;
+ return UnshrnkStatus.Ok;
+ }
- //assert(code <= MAX_CODE && code != CONTROL_CODE);
- //assert(dstPos < dstCap);
+ if (codeTab[code].prefixCode == INVALID_CODE || codeTab[code].prefixCode == code)
+ {
+ // Reject invalid codes. Self-referential codes may exist in the table but cannot be used.
firstByte = 0;
- if (code <= byte.MaxValue)
- {
- // Output literal byte.
- firstByte = (byte)code;
- len = 1;
- dst[dstPos] = (byte)code;
- return UnshrnkStatus.Ok;
- }
+ len = 0;
+ return UnshrnkStatus.Error;
+ }
- if (codeTab[code].prefixCode == INVALID_CODE || codeTab[code].prefixCode == code)
+ if (codeTab[code].len != UNKNOWN_LEN)
+ {
+ // Output string with known length (the common case).
+ if (dstCap - dstPos < codeTab[code].len)
{
- // Reject invalid codes. Self-referential codes may exist in the table but cannot be used.
firstByte = 0;
len = 0;
- return UnshrnkStatus.Error;
+ return UnshrnkStatus.Full;
}
- if (codeTab[code].len != UNKNOWN_LEN)
- {
- // Output string with known length (the common case).
- if (dstCap - dstPos < codeTab[code].len)
- {
- firstByte = 0;
- len = 0;
- return UnshrnkStatus.Full;
- }
+ CopyFromPrevPos(dst, codeTab[code].lastDstPos, dstPos, codeTab[code].len);
+ firstByte = dst[dstPos];
+ len = codeTab[code].len;
+ return UnshrnkStatus.Ok;
+ }
- CopyFromPrevPos(dst, codeTab[code].lastDstPos, dstPos, codeTab[code].len);
- firstByte = dst[dstPos];
- len = codeTab[code].len;
- return UnshrnkStatus.Ok;
- }
+ // Output a string of unknown length.
+ //assert(codeTab[code].len == UNKNOWN_LEN);
+ prefixCode = codeTab[code].prefixCode;
+ // assert(prefixCode > CONTROL_CODE);
- // Output a string of unknown length.
- //assert(codeTab[code].len == UNKNOWN_LEN);
- prefixCode = codeTab[code].prefixCode;
- // assert(prefixCode > CONTROL_CODE);
+ if (prefixCode == queue.codes[queue.nextIdx])
+ {
+ // The prefix code hasn't been added yet, but we were just about to: the KwKwK case.
+ //assert(codeTab[prevCode].prefixCode != INVALID_CODE);
+ codeTab[prefixCode].prefixCode = prevCode;
+ codeTab[prefixCode].extByte = firstByte;
+ codeTab[prefixCode].len = (ushort)(codeTab[prevCode].len + 1);
+ codeTab[prefixCode].lastDstPos = codeTab[prevCode].lastDstPos;
+ dst[dstPos] = firstByte;
+ }
+ else if (codeTab[prefixCode].prefixCode == INVALID_CODE)
+ {
+ // The prefix code is still invalid.
+ firstByte = 0;
+ len = 0;
+ return UnshrnkStatus.Error;
+ }
- if (prefixCode == queue.codes[queue.nextIdx])
- {
- // The prefix code hasn't been added yet, but we were just about to: the KwKwK case.
- //assert(codeTab[prevCode].prefixCode != INVALID_CODE);
- codeTab[prefixCode].prefixCode = prevCode;
- codeTab[prefixCode].extByte = firstByte;
- codeTab[prefixCode].len = (ushort)(codeTab[prevCode].len + 1);
- codeTab[prefixCode].lastDstPos = codeTab[prevCode].lastDstPos;
- dst[dstPos] = firstByte;
- }
- else if (codeTab[prefixCode].prefixCode == INVALID_CODE)
- {
- // The prefix code is still invalid.
- firstByte = 0;
- len = 0;
- return UnshrnkStatus.Error;
- }
+ // Output the prefix string, then the extension byte.
+ len = codeTab[prefixCode].len + 1;
+ if (dstCap - dstPos < len)
+ {
+ firstByte = 0;
+ len = 0;
+ return UnshrnkStatus.Full;
+ }
- // Output the prefix string, then the extension byte.
- len = codeTab[prefixCode].len + 1;
- if (dstCap - dstPos < len)
- {
- firstByte = 0;
- len = 0;
- return UnshrnkStatus.Full;
- }
+ CopyFromPrevPos(dst, codeTab[prefixCode].lastDstPos, dstPos, codeTab[prefixCode].len);
+ dst[dstPos + len - 1] = codeTab[code].extByte;
+ firstByte = dst[dstPos];
- CopyFromPrevPos(dst, codeTab[prefixCode].lastDstPos, dstPos, codeTab[prefixCode].len);
- dst[dstPos + len - 1] = codeTab[code].extByte;
- firstByte = dst[dstPos];
+ // Update the code table now that the string has a length and pos.
+ //assert(prevCode != code);
+ codeTab[code].len = (ushort)len;
+ codeTab[code].lastDstPos = dstPos;
- // Update the code table now that the string has a length and pos.
- //assert(prevCode != code);
- codeTab[code].len = (ushort)len;
- codeTab[code].lastDstPos = dstPos;
+ return UnshrnkStatus.Ok;
+ }
+ public static UnshrnkStatus Unshrink(
+ byte[] src,
+ int srcLen,
+ out int srcUsed,
+ byte[] dst,
+ int dstCap,
+ out int dstUsed
+ )
+ {
+ var codeTab = new CodeTabEntry[HASHTAB_SIZE];
+ var queue = new CodeQueue();
+ var stream = new BitStream(src, srcLen);
+ int codeSize,
+ dstPos,
+ len;
+ int currCode,
+ prevCode,
+ newCode;
+ byte firstByte;
+
+ CodeTabInit(codeTab);
+ CodeQueueInit(ref queue);
+ codeSize = MIN_CODE_SIZE;
+ dstPos = 0;
+
+ // Handle the first code separately since there is no previous code.
+ if (!ReadCode(stream, ref codeSize, codeTab, ref queue, out currCode))
+ {
+ srcUsed = stream.BytesRead;
+ dstUsed = 0;
return UnshrnkStatus.Ok;
}
- public static UnshrnkStatus Unshrink(
- byte[] src,
- int srcLen,
- out int srcUsed,
- byte[] dst,
- int dstCap,
- out int dstUsed
- )
+ //assert(currCode != CONTROL_CODE);
+ if (currCode > byte.MaxValue)
{
- var codeTab = new CodeTabEntry[HASHTAB_SIZE];
- var queue = new CodeQueue();
- var stream = new BitStream(src, srcLen);
- int codeSize,
- dstPos,
- len;
- int currCode,
- prevCode,
- newCode;
- byte firstByte;
+ srcUsed = stream.BytesRead;
+ dstUsed = 0;
+ return UnshrnkStatus.Error; // The first code must be a literal.
+ }
- CodeTabInit(codeTab);
- CodeQueueInit(ref queue);
- codeSize = MIN_CODE_SIZE;
- dstPos = 0;
+ if (dstPos == dstCap)
+ {
+ srcUsed = stream.BytesRead;
+ dstUsed = 0;
+ return UnshrnkStatus.Full;
+ }
- // Handle the first code separately since there is no previous code.
- if (!ReadCode(stream, ref codeSize, codeTab, ref queue, out currCode))
- {
- srcUsed = stream.BytesRead;
- dstUsed = 0;
- return UnshrnkStatus.Ok;
- }
+ firstByte = (byte)currCode;
+ dst[dstPos] = (byte)currCode;
+ codeTab[currCode].lastDstPos = dstPos;
+ dstPos++;
- //assert(currCode != CONTROL_CODE);
- if (currCode > byte.MaxValue)
+ prevCode = currCode;
+ while (ReadCode(stream, ref codeSize, codeTab, ref queue, out currCode))
+ {
+ if (currCode == INVALID_CODE)
{
srcUsed = stream.BytesRead;
dstUsed = 0;
- return UnshrnkStatus.Error; // The first code must be a literal.
+ return UnshrnkStatus.Error;
}
if (dstPos == dstCap)
@@ -279,153 +301,130 @@ out int dstUsed
return UnshrnkStatus.Full;
}
- firstByte = (byte)currCode;
- dst[dstPos] = (byte)currCode;
- codeTab[currCode].lastDstPos = dstPos;
- dstPos++;
-
- prevCode = currCode;
- while (ReadCode(stream, ref codeSize, codeTab, ref queue, out currCode))
+ // Handle KwKwK: next code used before being added.
+ if (currCode == queue.codes[queue.nextIdx])
{
- if (currCode == INVALID_CODE)
+ if (codeTab[prevCode].prefixCode == INVALID_CODE)
{
+ // The previous code is no longer valid.
srcUsed = stream.BytesRead;
dstUsed = 0;
return UnshrnkStatus.Error;
}
- if (dstPos == dstCap)
- {
- srcUsed = stream.BytesRead;
- dstUsed = 0;
- return UnshrnkStatus.Full;
- }
+ // Extend the previous code with its first byte.
+ //assert(currCode != prevCode);
+ codeTab[currCode].prefixCode = prevCode;
+ codeTab[currCode].extByte = firstByte;
+ codeTab[currCode].len = (ushort)(codeTab[prevCode].len + 1);
+ codeTab[currCode].lastDstPos = codeTab[prevCode].lastDstPos;
+ //assert(dstPos < dstCap);
+ dst[dstPos] = firstByte;
+ }
- // Handle KwKwK: next code used before being added.
- if (currCode == queue.codes[queue.nextIdx])
- {
- if (codeTab[prevCode].prefixCode == INVALID_CODE)
- {
- // The previous code is no longer valid.
- srcUsed = stream.BytesRead;
- dstUsed = 0;
- return UnshrnkStatus.Error;
- }
-
- // Extend the previous code with its first byte.
- //assert(currCode != prevCode);
- codeTab[currCode].prefixCode = prevCode;
- codeTab[currCode].extByte = firstByte;
- codeTab[currCode].len = (ushort)(codeTab[prevCode].len + 1);
- codeTab[currCode].lastDstPos = codeTab[prevCode].lastDstPos;
- //assert(dstPos < dstCap);
- dst[dstPos] = firstByte;
- }
+ // Output the string represented by the current code.
+ var status = OutputCode(
+ currCode,
+ dst,
+ dstPos,
+ dstCap,
+ prevCode,
+ codeTab,
+ ref queue,
+ out firstByte,
+ out len
+ );
+ if (status != UnshrnkStatus.Ok)
+ {
+ srcUsed = stream.BytesRead;
+ dstUsed = 0;
+ return status;
+ }
- // Output the string represented by the current code.
- var status = OutputCode(
- currCode,
- dst,
- dstPos,
- dstCap,
- prevCode,
- codeTab,
- ref queue,
- out firstByte,
- out len
- );
- if (status != UnshrnkStatus.Ok)
- {
- srcUsed = stream.BytesRead;
- dstUsed = 0;
- return status;
- }
+ // Verify that the output matches walking the prefixes.
+ var c = currCode;
+ for (var i = 0; i < len; i++)
+ {
+ // assert(codeTab[c].len == len - i);
+ //assert(codeTab[c].extByte == dst[dstPos + len - i - 1]);
+ c = codeTab[c].prefixCode;
+ }
- // Verify that the output matches walking the prefixes.
- var c = currCode;
- for (var i = 0; i < len; i++)
- {
- // assert(codeTab[c].len == len - i);
- //assert(codeTab[c].extByte == dst[dstPos + len - i - 1]);
- c = codeTab[c].prefixCode;
- }
+ // Add a new code to the string table if there's room.
+ // The string is the previous code's string extended with the first byte of the current code's string.
+ newCode = CodeQueueRemoveNext(ref queue);
+ if (newCode != INVALID_CODE)
+ {
+ //assert(codeTab[prevCode].lastDstPos < dstPos);
+ codeTab[newCode].prefixCode = prevCode;
+ codeTab[newCode].extByte = firstByte;
+ codeTab[newCode].len = (ushort)(codeTab[prevCode].len + 1);
+ codeTab[newCode].lastDstPos = codeTab[prevCode].lastDstPos;
- // Add a new code to the string table if there's room.
- // The string is the previous code's string extended with the first byte of the current code's string.
- newCode = CodeQueueRemoveNext(ref queue);
- if (newCode != INVALID_CODE)
+ if (codeTab[prevCode].prefixCode == INVALID_CODE)
{
- //assert(codeTab[prevCode].lastDstPos < dstPos);
- codeTab[newCode].prefixCode = prevCode;
- codeTab[newCode].extByte = firstByte;
- codeTab[newCode].len = (ushort)(codeTab[prevCode].len + 1);
- codeTab[newCode].lastDstPos = codeTab[prevCode].lastDstPos;
-
- if (codeTab[prevCode].prefixCode == INVALID_CODE)
- {
- // prevCode was invalidated in a partial clearing. Until that code is re-used, the
- // string represented by newCode is indeterminate.
- codeTab[newCode].len = UNKNOWN_LEN;
- }
- // If prevCode was invalidated in a partial clearing, it's possible that newCode == prevCode,
- // in which case it will never be used or cleared.
+ // prevCode was invalidated in a partial clearing. Until that code is re-used, the
+ // string represented by newCode is indeterminate.
+ codeTab[newCode].len = UNKNOWN_LEN;
}
-
- codeTab[currCode].lastDstPos = dstPos;
- dstPos += len;
-
- prevCode = currCode;
+ // If prevCode was invalidated in a partial clearing, it's possible that newCode == prevCode,
+ // in which case it will never be used or cleared.
}
- srcUsed = stream.BytesRead;
- dstUsed = dstPos;
+ codeTab[currCode].lastDstPos = dstPos;
+ dstPos += len;
- return UnshrnkStatus.Ok;
+ prevCode = currCode;
}
- public enum UnshrnkStatus
- {
- Ok,
- Full,
- Error,
- }
+ srcUsed = stream.BytesRead;
+ dstUsed = dstPos;
- private struct CodeQueue
- {
- public int nextIdx;
- public ushort[] codes;
- }
+ return UnshrnkStatus.Ok;
+ }
- private static void CodeQueueInit(ref CodeQueue q)
- {
- int codeQueueSize;
- ushort code;
+ public enum UnshrnkStatus
+ {
+ Ok,
+ Full,
+ Error,
+ }
- codeQueueSize = 0;
- q.codes = new ushort[MAX_CODE - CONTROL_CODE + 2];
+ private struct CodeQueue
+ {
+ public int nextIdx;
+ public ushort[] codes;
+ }
- for (code = CONTROL_CODE + 1; code <= MAX_CODE; code++)
- {
- q.codes[codeQueueSize++] = code;
- }
+ private static void CodeQueueInit(ref CodeQueue q)
+ {
+ int codeQueueSize;
+ ushort code;
- //assert(codeQueueSize < q.codes.Length);
- q.codes[codeQueueSize] = INVALID_CODE; // End-of-queue marker.
- q.nextIdx = 0;
+ codeQueueSize = 0;
+ q.codes = new ushort[MAX_CODE - CONTROL_CODE + 2];
+
+ for (code = CONTROL_CODE + 1; code <= MAX_CODE; code++)
+ {
+ q.codes[codeQueueSize++] = code;
}
- private static ushort CodeQueueNext(ref CodeQueue q) =>
- //assert(q.nextIdx < q.codes.Length);
- q.codes[q.nextIdx];
+ //assert(codeQueueSize < q.codes.Length);
+ q.codes[codeQueueSize] = INVALID_CODE; // End-of-queue marker.
+ q.nextIdx = 0;
+ }
+
+ private static ushort CodeQueueNext(ref CodeQueue q) =>
+ //assert(q.nextIdx < q.codes.Length);
+ q.codes[q.nextIdx];
- private static ushort CodeQueueRemoveNext(ref CodeQueue q)
+ private static ushort CodeQueueRemoveNext(ref CodeQueue q)
+ {
+ var code = CodeQueueNext(ref q);
+ if (code != INVALID_CODE)
{
- var code = CodeQueueNext(ref q);
- if (code != INVALID_CODE)
- {
- q.nextIdx++;
- }
- return code;
+ q.nextIdx++;
}
+ return code;
}
}
diff --git a/src/SharpCompress/Compressors/Squeezed/BitReader.cs b/src/SharpCompress/Compressors/Squeezed/BitReader.cs
index 123acfb56..c86c9ec28 100644
--- a/src/SharpCompress/Compressors/Squeezed/BitReader.cs
+++ b/src/SharpCompress/Compressors/Squeezed/BitReader.cs
@@ -1,6 +1,8 @@
using System;
using System.IO;
+namespace SharpCompress.Compressors.Squeezed;
+
public class BitReader
{
private readonly Stream _stream;
@@ -20,7 +22,10 @@ public bool ReadBit()
{
int nextByte = _stream.ReadByte();
if (nextByte == -1)
+ {
throw new EndOfStreamException();
+ }
+
_bitBuffer = nextByte;
_bitCount = 8;
}
diff --git a/src/SharpCompress/Compressors/Squeezed/SqueezedStream.cs b/src/SharpCompress/Compressors/Squeezed/SqueezedStream.cs
index bd9760df9..18ff95209 100644
--- a/src/SharpCompress/Compressors/Squeezed/SqueezedStream.cs
+++ b/src/SharpCompress/Compressors/Squeezed/SqueezedStream.cs
@@ -2,144 +2,141 @@
using System.Collections.Generic;
using System.IO;
using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
using SharpCompress.Compressors.RLE90;
using SharpCompress.IO;
-namespace SharpCompress.Compressors.Squeezed
+namespace SharpCompress.Compressors.Squeezed;
+
+public class SqueezeStream : Stream, IStreamStack
{
- public class SqueezeStream : Stream, IStreamStack
- {
#if DEBUG_STREAMS
- long IStreamStack.InstanceId { get; set; }
+ long IStreamStack.InstanceId { get; set; }
#endif
- int IStreamStack.DefaultBufferSize { get; set; }
+ int IStreamStack.DefaultBufferSize { get; set; }
- Stream IStreamStack.BaseStream() => _stream;
+ Stream IStreamStack.BaseStream() => _stream;
- int IStreamStack.BufferSize
- {
- get => 0;
- set { }
- }
- int IStreamStack.BufferPosition
- {
- get => 0;
- set { }
- }
+ int IStreamStack.BufferSize
+ {
+ get => 0;
+ set { }
+ }
+ int IStreamStack.BufferPosition
+ {
+ get => 0;
+ set { }
+ }
- void IStreamStack.SetPosition(long position) { }
+ void IStreamStack.SetPosition(long position) { }
- private readonly Stream _stream;
- private readonly int _compressedSize;
- private const int NUMVALS = 257;
- private const int SPEOF = 256;
- private bool _processed = false;
+ private readonly Stream _stream;
+ private readonly int _compressedSize;
+ private const int NUMVALS = 257;
+ private const int SPEOF = 256;
+ private bool _processed = false;
- public SqueezeStream(Stream stream, int compressedSize)
- {
- _stream = stream;
- _compressedSize = compressedSize;
+ public SqueezeStream(Stream stream, int compressedSize)
+ {
+ _stream = stream;
+ _compressedSize = compressedSize;
#if DEBUG_STREAMS
- this.DebugConstruct(typeof(SqueezeStream));
+ this.DebugConstruct(typeof(SqueezeStream));
#endif
- }
+ }
- protected override void Dispose(bool disposing)
- {
+ protected override void Dispose(bool disposing)
+ {
#if DEBUG_STREAMS
- this.DebugDispose(typeof(SqueezeStream));
+ this.DebugDispose(typeof(SqueezeStream));
#endif
- base.Dispose(disposing);
- }
+ base.Dispose(disposing);
+ }
- public override bool CanRead => true;
+ public override bool CanRead => true;
- public override bool CanSeek => false;
+ public override bool CanSeek => false;
- public override bool CanWrite => false;
+ public override bool CanWrite => false;
- public override long Length => throw new NotImplementedException();
+ public override long Length => throw new NotImplementedException();
- public override long Position
- {
- get => _stream.Position;
- set => throw new NotImplementedException();
- }
+ public override long Position
+ {
+ get => _stream.Position;
+ set => throw new NotImplementedException();
+ }
- public override void Flush() => throw new NotImplementedException();
+ public override void Flush() => throw new NotImplementedException();
- public override int Read(byte[] buffer, int offset, int count)
+ public override int Read(byte[] buffer, int offset, int count)
+ {
+ if (_processed)
{
- if (_processed)
- {
- return 0;
- }
- _processed = true;
- using var binaryReader = new BinaryReader(_stream);
+ return 0;
+ }
+ _processed = true;
+ using var binaryReader = new BinaryReader(_stream);
- // Read numnodes (equivalent to convert_u16!(numnodes, buf))
- var numnodes = binaryReader.ReadUInt16();
+ // Read numnodes (equivalent to convert_u16!(numnodes, buf))
+ var numnodes = binaryReader.ReadUInt16();
- // Validation: numnodes should be within bounds
- if (numnodes >= NUMVALS)
- {
- throw new InvalidDataException(
- $"Invalid number of nodes {numnodes} (max {NUMVALS - 1})"
- );
- }
+ // Validation: numnodes should be within bounds
+ if (numnodes >= NUMVALS)
+ {
+ throw new InvalidDataException(
+ $"Invalid number of nodes {numnodes} (max {NUMVALS - 1})"
+ );
+ }
- // Handle the case where no nodes exist
- if (numnodes == 0)
- {
- return 0;
- }
+ // Handle the case where no nodes exist
+ if (numnodes == 0)
+ {
+ return 0;
+ }
- // Build dnode (tree of nodes)
- var dnode = new int[numnodes, 2];
- for (int j = 0; j < numnodes; j++)
- {
- dnode[j, 0] = binaryReader.ReadInt16();
- dnode[j, 1] = binaryReader.ReadInt16();
- }
+ // Build dnode (tree of nodes)
+ var dnode = new int[numnodes, 2];
+ for (int j = 0; j < numnodes; j++)
+ {
+ dnode[j, 0] = binaryReader.ReadInt16();
+ dnode[j, 1] = binaryReader.ReadInt16();
+ }
- // Initialize BitReader for reading bits
- var bitReader = new BitReader(_stream);
- var decoded = new List();
+ // Initialize BitReader for reading bits
+ var bitReader = new BitReader(_stream);
+ var decoded = new List();
- int i = 0;
- // Decode the buffer using the dnode tree
- while (true)
+ int i = 0;
+ // Decode the buffer using the dnode tree
+ while (true)
+ {
+ i = dnode[i, bitReader.ReadBit() ? 1 : 0];
+ if (i < 0)
{
- i = dnode[i, bitReader.ReadBit() ? 1 : 0];
- if (i < 0)
+ i = (short)-(i + 1);
+ if (i == SPEOF)
{
- i = (short)-(i + 1);
- if (i == SPEOF)
- {
- break;
- }
- else
- {
- decoded.Add((byte)i);
- i = 0;
- }
+ break;
+ }
+ else
+ {
+ decoded.Add((byte)i);
+ i = 0;
}
}
-
- // Unpack the decoded buffer using the RLE class
- var unpacked = RLE.UnpackRLE(decoded.ToArray());
- unpacked.CopyTo(buffer, 0);
- return unpacked.Count();
}
- public override long Seek(long offset, SeekOrigin origin) =>
- throw new NotImplementedException();
+ // Unpack the decoded buffer using the RLE class
+ var unpacked = RLE.UnpackRLE(decoded.ToArray());
+ unpacked.CopyTo(buffer, 0);
+ return unpacked.Count;
+ }
- public override void SetLength(long value) => throw new NotImplementedException();
+ public override long Seek(long offset, SeekOrigin origin) =>
+ throw new NotImplementedException();
- public override void Write(byte[] buffer, int offset, int count) =>
- throw new NotImplementedException();
- }
+ public override void SetLength(long value) => throw new NotImplementedException();
+
+ public override void Write(byte[] buffer, int offset, int count) =>
+ throw new NotImplementedException();
}
diff --git a/src/SharpCompress/Compressors/Xz/Filters/Lzma2Filter.cs b/src/SharpCompress/Compressors/Xz/Filters/Lzma2Filter.cs
index ea078c9d5..46e904898 100644
--- a/src/SharpCompress/Compressors/Xz/Filters/Lzma2Filter.cs
+++ b/src/SharpCompress/Compressors/Xz/Filters/Lzma2Filter.cs
@@ -50,7 +50,7 @@ public override void Init(byte[] properties)
public override void ValidateFilter() { }
public override void SetBaseStream(Stream stream) =>
- BaseStream = new LzmaStream(new[] { _dictionarySize }, stream);
+ BaseStream = new LzmaStream([_dictionarySize], stream);
public override int Read(byte[] buffer, int offset, int count) =>
BaseStream.Read(buffer, offset, count);
diff --git a/src/SharpCompress/Compressors/Xz/XZBlock.cs b/src/SharpCompress/Compressors/Xz/XZBlock.cs
index cdb075ef8..9da955cd9 100644
--- a/src/SharpCompress/Compressors/Xz/XZBlock.cs
+++ b/src/SharpCompress/Compressors/Xz/XZBlock.cs
@@ -99,7 +99,7 @@ private void CheckCrc()
private void ConnectStream()
{
_decomStream = BaseStream;
- while (Filters.Any())
+ while (Filters.Count > 0)
{
var filter = Filters.Pop();
filter.SetBaseStream(_decomStream);
diff --git a/src/SharpCompress/Compressors/Xz/XZFooter.cs b/src/SharpCompress/Compressors/Xz/XZFooter.cs
index 2610ed69b..293b1d663 100644
--- a/src/SharpCompress/Compressors/Xz/XZFooter.cs
+++ b/src/SharpCompress/Compressors/Xz/XZFooter.cs
@@ -22,9 +22,7 @@ public XZFooter(BinaryReader reader)
public static XZFooter FromStream(Stream stream)
{
- var footer = new XZFooter(
- new BinaryReader(SharpCompressStream.Create(stream, leaveOpen: true), Encoding.UTF8)
- );
+ var footer = new XZFooter(new BinaryReader(stream, Encoding.UTF8, true));
footer.Process();
return footer;
}
diff --git a/src/SharpCompress/Compressors/Xz/XZHeader.cs b/src/SharpCompress/Compressors/Xz/XZHeader.cs
index 5df5447dc..00a9f4ecb 100644
--- a/src/SharpCompress/Compressors/Xz/XZHeader.cs
+++ b/src/SharpCompress/Compressors/Xz/XZHeader.cs
@@ -9,18 +9,19 @@ namespace SharpCompress.Compressors.Xz;
public class XZHeader
{
private readonly BinaryReader _reader;
- private readonly byte[] MagicHeader = { 0xFD, 0x37, 0x7A, 0x58, 0x5a, 0x00 };
+ private readonly byte[] MagicHeader = [0xFD, 0x37, 0x7A, 0x58, 0x5a, 0x00];
public CheckType BlockCheckType { get; private set; }
public int BlockCheckSize => 4 << ((((int)BlockCheckType + 2) / 3) - 1);
- public XZHeader(BinaryReader reader) => _reader = reader;
+ public XZHeader(BinaryReader reader)
+ {
+ _reader = reader;
+ }
public static XZHeader FromStream(Stream stream)
{
- var header = new XZHeader(
- new BinaryReader(SharpCompressStream.Create(stream, leaveOpen: true), Encoding.UTF8)
- );
+ var header = new XZHeader(new BinaryReader(stream, Encoding.UTF8, true));
header.Process();
return header;
}
diff --git a/src/SharpCompress/Compressors/Xz/XZIndex.cs b/src/SharpCompress/Compressors/Xz/XZIndex.cs
index 95ad2c619..9c920c5d3 100644
--- a/src/SharpCompress/Compressors/Xz/XZIndex.cs
+++ b/src/SharpCompress/Compressors/Xz/XZIndex.cs
@@ -14,7 +14,7 @@ public class XZIndex
private readonly BinaryReader _reader;
public long StreamStartPosition { get; private set; }
public ulong NumberOfRecords { get; private set; }
- public List Records { get; } = new();
+ public List Records { get; } = [];
private readonly bool _indexMarkerAlreadyVerified;
@@ -32,7 +32,7 @@ public XZIndex(BinaryReader reader, bool indexMarkerAlreadyVerified)
public static XZIndex FromStream(Stream stream, bool indexMarkerAlreadyVerified)
{
var index = new XZIndex(
- new BinaryReader(SharpCompressStream.Create(stream, leaveOpen: true), Encoding.UTF8),
+ new BinaryReader(stream, Encoding.UTF8, true),
indexMarkerAlreadyVerified
);
index.Process();
@@ -79,7 +79,7 @@ private void SkipPadding()
private void VerifyCrc32()
{
- var crc = _reader.ReadLittleEndianUInt32();
+ var _ = _reader.ReadLittleEndianUInt32();
// TODO verify this matches
}
}
diff --git a/src/SharpCompress/Compressors/Xz/XZReadOnlyStream.cs b/src/SharpCompress/Compressors/Xz/XZReadOnlyStream.cs
index 210478d61..fa540b646 100644
--- a/src/SharpCompress/Compressors/Xz/XZReadOnlyStream.cs
+++ b/src/SharpCompress/Compressors/Xz/XZReadOnlyStream.cs
@@ -26,7 +26,7 @@ int IStreamStack.BufferPosition
void IStreamStack.SetPosition(long position) { }
- public XZReadOnlyStream(Stream stream)
+ protected XZReadOnlyStream(Stream stream)
{
BaseStream = stream;
if (!BaseStream.CanRead)
diff --git a/src/SharpCompress/Compressors/ZStandard/ZStandardStream.cs b/src/SharpCompress/Compressors/ZStandard/ZStandardStream.cs
index 61f7fbb46..5feeb876d 100644
--- a/src/SharpCompress/Compressors/ZStandard/ZStandardStream.cs
+++ b/src/SharpCompress/Compressors/ZStandard/ZStandardStream.cs
@@ -1,9 +1,6 @@
using System;
-using System.Collections.Generic;
using System.IO;
-using System.Linq;
using System.Text;
-using System.Threading.Tasks;
using SharpCompress.IO;
namespace SharpCompress.Compressors.ZStandard;
@@ -34,7 +31,7 @@ void IStreamStack.SetPosition(long position) { }
internal static bool IsZStandard(Stream stream)
{
- var br = new BinaryReader(stream);
+ using var br = new BinaryReader(stream, Encoding.UTF8, true);
var magic = br.ReadUInt32();
if (ZstandardConstants.MAGIC != magic)
{
@@ -59,7 +56,7 @@ public ZStandardStream(Stream baseInputStream)
/// Attempting to set the position
public override long Position
{
- get { return stream.Position; }
- set { throw new NotSupportedException("InflaterInputStream Position not supported"); }
+ get => stream.Position;
+ set => throw new NotSupportedException("InflaterInputStream Position not supported");
}
}
diff --git a/src/SharpCompress/Compressors/ZStandard/ZstandardConstants.cs b/src/SharpCompress/Compressors/ZStandard/ZstandardConstants.cs
index 8025c658e..ac4756d48 100644
--- a/src/SharpCompress/Compressors/ZStandard/ZstandardConstants.cs
+++ b/src/SharpCompress/Compressors/ZStandard/ZstandardConstants.cs
@@ -1,9 +1,3 @@
-using System;
-using System.Collections.Generic;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
-
namespace SharpCompress.Compressors.ZStandard;
internal class ZstandardConstants
diff --git a/src/SharpCompress/Factories/ArcFactory.cs b/src/SharpCompress/Factories/ArcFactory.cs
index b5180afae..707fdeb82 100644
--- a/src/SharpCompress/Factories/ArcFactory.cs
+++ b/src/SharpCompress/Factories/ArcFactory.cs
@@ -1,46 +1,39 @@
-using System;
using System.Collections.Generic;
using System.IO;
-using System.Linq;
-using System.Security.Cryptography;
-using System.Text;
-using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Readers;
using SharpCompress.Readers.Arc;
-using static System.Net.Mime.MediaTypeNames;
-namespace SharpCompress.Factories
-{
- public class ArcFactory : Factory, IReaderFactory
- {
- public override string Name => "Arc";
+namespace SharpCompress.Factories;
- public override ArchiveType? KnownArchiveType => ArchiveType.Arc;
+public class ArcFactory : Factory, IReaderFactory
+{
+ public override string Name => "Arc";
- public override IEnumerable GetSupportedExtensions()
- {
- yield return "arc";
- }
+ public override ArchiveType? KnownArchiveType => ArchiveType.Arc;
- public override bool IsArchive(
- Stream stream,
- string? password = null,
- int bufferSize = ReaderOptions.DefaultBufferSize
- )
- {
- //You may have to use some(paranoid) checks to ensure that you actually are
- //processing an ARC file, since other archivers also adopted the idea of putting
- //a 01Ah byte at offset 0, namely the Hyper archiver. To check if you have a
- //Hyper - archive, check the next two bytes for "HP" or "ST"(or look below for
- //"HYP").Also the ZOO archiver also does put a 01Ah at the start of the file,
- //see the ZOO entry below.
- var bytes = new byte[2];
- stream.Read(bytes, 0, 2);
- return bytes[0] == 0x1A && bytes[1] < 10; //rather thin, but this is all we have
- }
+ public override IEnumerable GetSupportedExtensions()
+ {
+ yield return "arc";
+ }
- public IReader OpenReader(Stream stream, ReaderOptions? options) =>
- ArcReader.Open(stream, options);
+ public override bool IsArchive(
+ Stream stream,
+ string? password = null,
+ int bufferSize = ReaderOptions.DefaultBufferSize
+ )
+ {
+ //You may have to use some(paranoid) checks to ensure that you actually are
+ //processing an ARC file, since other archivers also adopted the idea of putting
+ //a 01Ah byte at offset 0, namely the Hyper archiver. To check if you have a
+ //Hyper - archive, check the next two bytes for "HP" or "ST"(or look below for
+ //"HYP").Also the ZOO archiver also does put a 01Ah at the start of the file,
+ //see the ZOO entry below.
+ var bytes = new byte[2];
+ stream.Read(bytes, 0, 2);
+ return bytes[0] == 0x1A && bytes[1] < 10; //rather thin, but this is all we have
}
+
+ public IReader OpenReader(Stream stream, ReaderOptions? options) =>
+ ArcReader.Open(stream, options);
}
diff --git a/src/SharpCompress/Factories/Factory.cs b/src/SharpCompress/Factories/Factory.cs
index 1b9e23e96..017d6deda 100644
--- a/src/SharpCompress/Factories/Factory.cs
+++ b/src/SharpCompress/Factories/Factory.cs
@@ -20,7 +20,7 @@ static Factory()
RegisterFactory(new ArcFactory());
}
- private static readonly HashSet _factories = new();
+ private static readonly HashSet _factories = [];
///
/// Gets the collection of registered .
@@ -78,11 +78,11 @@ out IReader? reader
if (this is IReaderFactory readerFactory)
{
- long pos = ((IStreamStack)stream).GetPosition();
+ long pos = stream.GetPosition();
if (IsArchive(stream, options.Password, options.BufferSize))
{
- ((IStreamStack)stream).StackSeek(pos);
+ stream.StackSeek(pos);
reader = readerFactory.OpenReader(stream, options);
return true;
}
diff --git a/src/SharpCompress/Factories/GZipFactory.cs b/src/SharpCompress/Factories/GZipFactory.cs
index 17f344cf0..eb25a5220 100644
--- a/src/SharpCompress/Factories/GZipFactory.cs
+++ b/src/SharpCompress/Factories/GZipFactory.cs
@@ -83,20 +83,20 @@ out IReader? reader
{
reader = null;
- long pos = ((IStreamStack)rewindableStream).GetPosition();
+ long pos = rewindableStream.GetPosition();
if (GZipArchive.IsGZipFile(rewindableStream))
{
- ((IStreamStack)rewindableStream).StackSeek(pos);
+ rewindableStream.StackSeek(pos);
var testStream = new GZipStream(rewindableStream, CompressionMode.Decompress);
if (TarArchive.IsTarFile(testStream))
{
- ((IStreamStack)rewindableStream).StackSeek(pos);
+ rewindableStream.StackSeek(pos);
reader = new TarReader(rewindableStream, options, CompressionType.GZip);
return true;
}
- ((IStreamStack)rewindableStream).StackSeek(pos);
+ rewindableStream.StackSeek(pos);
reader = OpenReader(rewindableStream, options);
return true;
}
diff --git a/src/SharpCompress/Factories/TarFactory.cs b/src/SharpCompress/Factories/TarFactory.cs
index d32020fd7..12dae1841 100644
--- a/src/SharpCompress/Factories/TarFactory.cs
+++ b/src/SharpCompress/Factories/TarFactory.cs
@@ -166,7 +166,7 @@ out IReader? reader
)
{
reader = null;
- long pos = ((IStreamStack)rewindableStream).GetPosition();
+ long pos = rewindableStream.GetPosition();
TestOption? testedOption = null;
if (!string.IsNullOrWhiteSpace(options.ExtensionHint))
{
@@ -192,7 +192,7 @@ out IReader? reader
{
continue; // Already tested above
}
- ((IStreamStack)rewindableStream).StackSeek(pos);
+ rewindableStream.StackSeek(pos);
reader = TryOption(rewindableStream, options, pos, testOption);
if (reader != null)
{
@@ -212,17 +212,16 @@ TestOption testOption
{
if (testOption.CanHandle(rewindableStream))
{
- ((IStreamStack)rewindableStream).StackSeek(pos);
- var inStream = rewindableStream;
+ rewindableStream.StackSeek(pos);
if (testOption.WrapInSharpCompressStream)
{
- inStream = SharpCompressStream.Create(rewindableStream, leaveOpen: true);
+ rewindableStream = SharpCompressStream.Create(rewindableStream, leaveOpen: true);
}
var testStream = testOption.CreateStream(rewindableStream);
if (TarArchive.IsTarFile(testStream))
{
- ((IStreamStack)rewindableStream).StackSeek(pos);
+ rewindableStream.StackSeek(pos);
return new TarReader(rewindableStream, options, testOption.Type);
}
}
diff --git a/src/SharpCompress/Factories/ZStandardFactory.cs b/src/SharpCompress/Factories/ZStandardFactory.cs
index a5c6d84f2..1e79508c7 100644
--- a/src/SharpCompress/Factories/ZStandardFactory.cs
+++ b/src/SharpCompress/Factories/ZStandardFactory.cs
@@ -1,12 +1,6 @@
-using System;
using System.Collections.Generic;
using System.IO;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
-using SharpCompress.Archives;
using SharpCompress.Compressors.ZStandard;
-using SharpCompress.Readers;
namespace SharpCompress.Factories;
diff --git a/src/SharpCompress/IO/DataDescriptorStream.cs b/src/SharpCompress/IO/DataDescriptorStream.cs
index 235c7a30f..31b7d3b8e 100644
--- a/src/SharpCompress/IO/DataDescriptorStream.cs
+++ b/src/SharpCompress/IO/DataDescriptorStream.cs
@@ -1,5 +1,6 @@
using System;
using System.IO;
+using System.Text;
namespace SharpCompress.IO;
@@ -31,7 +32,7 @@ void IStreamStack.SetPosition(long position) { }
private bool _isDisposed;
private bool _done;
- private static byte[] _dataDescriptorMarker = new byte[] { 0x50, 0x4b, 0x07, 0x08 };
+ private static byte[] _dataDescriptorMarker = [0x50, 0x4b, 0x07, 0x08];
private static long _dataDescriptorSize = 24;
public DataDescriptorStream(Stream stream)
@@ -83,7 +84,7 @@ public override long Position
private bool validate_data_descriptor(Stream stream, long size)
{
- var br = new BinaryReader(stream);
+ using var br = new BinaryReader(stream, Encoding.UTF8, true);
br.ReadUInt32();
br.ReadUInt32(); // CRC32 can be checked if we calculate it
var compressedSize = br.ReadUInt32();
diff --git a/src/SharpCompress/IO/IStreamStack.cs b/src/SharpCompress/IO/IStreamStack.cs
index 56c020c8f..aa0968551 100644
--- a/src/SharpCompress/IO/IStreamStack.cs
+++ b/src/SharpCompress/IO/IStreamStack.cs
@@ -1,364 +1,44 @@
-using System;
-using System.Collections;
-using System.Collections.Generic;
-using System.Diagnostics;
using System.IO;
-using System.Linq;
-using System.Text;
-namespace SharpCompress.IO
-{
- public interface IStreamStack
- {
- ///
- /// Gets or sets the default buffer size to be applied when buffering is enabled for this stream stack.
- /// This value is used by the SetBuffer extension method to configure buffering on the appropriate stream
- /// in the stack hierarchy. A value of 0 indicates no default buffer size is set.
- ///
- int DefaultBufferSize { get; set; }
-
- ///
- /// Returns the immediate underlying stream in the stack.
- ///
- Stream BaseStream();
-
- ///
- /// Gets or sets the size of the buffer if the stream supports buffering; otherwise, returns 0.
- /// This property must not throw.
- ///
- int BufferSize { get; set; }
-
- ///
- /// Gets or sets the current position within the buffer if the stream supports buffering; otherwise, returns 0.
- /// This property must not throw.
- ///
- int BufferPosition { get; set; }
-
- ///
- /// Updates the internal position state of the stream. This should not perform seeking on the underlying stream,
- /// but should update any internal position or buffer state as appropriate for the stream implementation.
- ///
- /// The absolute position to set within the stream stack.
- void SetPosition(long position);
-
-#if DEBUG_STREAMS
- ///
- /// Gets or sets the unique instance identifier for debugging purposes.
- ///
- long InstanceId { get; set; }
-#endif
- }
-
- internal static class StackStreamExtensions
- {
- ///
- /// Gets the logical position of the first buffering stream in the stack, or 0 if none exist.
- ///
- /// The most derived (outermost) stream in the stack.
- /// The position of the first buffering stream, or 0 if not found.
- internal static long GetPosition(this IStreamStack stream)
- {
- IStreamStack? current = stream;
-
- while (current != null)
- {
- if (current.BufferSize != 0 && current is Stream st)
- {
- return st.Position;
- }
- current = current?.BaseStream() as IStreamStack;
- }
- return 0;
- }
-
- ///
- /// Rewinds the buffer of the outermost buffering stream in the stack by the specified count, if supported.
- /// Only the most derived buffering stream is affected.
- ///
- /// The most derived (outermost) stream in the stack.
- /// The number of bytes to rewind within the buffer.
- internal static void Rewind(this IStreamStack stream, int count)
- {
- Stream baseStream = stream.BaseStream();
- Stream thisStream = (Stream)stream;
- IStreamStack? buffStream = null;
- IStreamStack? current = stream;
-
- while (buffStream == null && current != null)
- {
- if (current.BufferSize != 0)
- {
- buffStream = current;
- buffStream.BufferPosition -= Math.Min(buffStream.BufferPosition, count);
- }
- current = current?.BaseStream() as IStreamStack;
- }
- }
-
- ///
- /// Sets the buffer size on the first buffering stream in the stack, or on the outermost stream if none exist.
- /// If is true, sets the buffer size regardless of current value.
- ///
- /// The most derived (outermost) stream in the stack.
- /// The buffer size to set.
- /// If true, forces the buffer size to be set even if already set.
- internal static void SetBuffer(this IStreamStack stream, int bufferSize, bool force)
- {
- if (bufferSize == 0 || stream == null)
- return;
-
- IStreamStack? current = stream;
- IStreamStack defaultBuffer = stream;
- IStreamStack? buffer = null;
-
- // First pass: find the deepest IStreamStack
- while (current != null)
- {
- defaultBuffer = current;
- if (buffer == null && ((current.BufferSize != 0 && bufferSize != 0) || force))
- buffer = current;
- if (defaultBuffer.DefaultBufferSize != 0)
- break;
- current = current.BaseStream() as IStreamStack;
- }
- if (defaultBuffer.DefaultBufferSize == 0)
- defaultBuffer.DefaultBufferSize = bufferSize;
- (buffer ?? stream).BufferSize = bufferSize;
- }
-
- ///
- /// Attempts to set the position in the stream stack. If a buffering stream is present and the position is within its buffer,
- /// BufferPosition is set on the outermost buffering stream and all intermediate streams update their internal state via SetPosition.
- /// If no buffering stream is present, seeks as close to the root stream as possible and updates all intermediate streams' state via SetPosition.
- /// Seeking is never performed if any intermediate stream in the stack is buffering.
- /// Throws if the position cannot be set.
- ///
- ///
- /// The most derived (outermost) stream in the stack. The method traverses up the stack via BaseStream() until a stream can satisfy the buffer or seek request.
- ///
- /// The absolute position to set.
- /// The position that was set.
- internal static long StackSeek(this IStreamStack stream, long position)
- {
- var stack = new List();
- Stream? current = stream as Stream;
- int lastBufferingIndex = -1;
- int firstSeekableIndex = -1;
- Stream? firstSeekableStream = null;
-
- // Traverse the stack, collecting info
- while (current is IStreamStack stackStream)
- {
- stack.Add(stackStream);
- if (stackStream.BufferSize > 0)
- {
- lastBufferingIndex = stack.Count - 1;
- break;
- }
- current = stackStream.BaseStream();
- }
+namespace SharpCompress.IO;
- // Find the first seekable stream (closest to the root)
- if (current != null && current.CanSeek)
- {
- firstSeekableIndex = stack.Count;
- firstSeekableStream = current;
- }
-
- // If any buffering stream exists, try to set BufferPosition on the outermost one
- if (lastBufferingIndex != -1)
- {
- var bufferingStream = stack[lastBufferingIndex];
- if (position >= 0 && position < bufferingStream.BufferSize)
- {
- bufferingStream.BufferPosition = (int)position;
- return position;
- }
- else
- {
- // If position is not in buffer, reset buffer and proceed as non-buffering
- bufferingStream.BufferPosition = 0;
- }
- // Continue to seek as if no buffer is present
- }
-
- // If no buffering, or buffer was reset, seek at the first seekable stream (closest to the root)
- if (firstSeekableStream != null)
- {
- firstSeekableStream.Seek(position, SeekOrigin.Begin);
- return firstSeekableStream.Position;
- }
-
- throw new NotSupportedException(
- "Cannot set position on this stream stack (no seekable or buffering stream supports the requested position)."
- );
- }
-
- ///
- /// Reads bytes from the stream, using the position to observe how much was actually consumed and rewind the buffer to ensure further reads are correct.
- /// This is required to prevent buffered reads from skipping data, while also benefiting from buffering and reduced stream IO reads.
- ///
- /// The stream to read from.
- /// The buffer to read data into.
- /// The offset in the buffer to start writing data.
- /// The maximum number of bytes to read.
- /// Returns the buffering stream found in the stack, or null if none exists.
- /// Returns the number of bytes actually read from the base stream, or -1 if no buffering stream was found.
- /// The number of bytes read into the buffer.
- internal static int Read(
- this IStreamStack stream,
- byte[] buffer,
- int offset,
- int count,
- out IStreamStack? buffStream,
- out int baseReadCount
- )
- {
- Stream baseStream = stream.BaseStream();
- Stream thisStream = (Stream)stream;
- IStreamStack? current = stream;
- buffStream = null;
- baseReadCount = -1;
-
- while (buffStream == null && (current = current?.BaseStream() as IStreamStack) != null)
- {
- if (current.BufferSize != 0)
- {
- buffStream = current;
- }
- }
-
- long buffPos = buffStream == null ? -1 : ((Stream)buffStream).Position;
-
- int read = baseStream.Read(buffer, offset, count); //amount read in to buffer
-
- if (buffPos != -1)
- {
- baseReadCount = (int)(((Stream)buffStream!).Position - buffPos);
- }
- return read;
- }
+public interface IStreamStack
+{
+ ///
+ /// Gets or sets the default buffer size to be applied when buffering is enabled for this stream stack.
+ /// This value is used by the SetBuffer extension method to configure buffering on the appropriate stream
+ /// in the stack hierarchy. A value of 0 indicates no default buffer size is set.
+ ///
+ int DefaultBufferSize { get; set; }
+
+ ///
+ /// Returns the immediate underlying stream in the stack.
+ ///
+ Stream BaseStream();
+
+ ///
+ /// Gets or sets the size of the buffer if the stream supports buffering; otherwise, returns 0.
+ /// This property must not throw.
+ ///
+ int BufferSize { get; set; }
+
+ ///
+ /// Gets or sets the current position within the buffer if the stream supports buffering; otherwise, returns 0.
+ /// This property must not throw.
+ ///
+ int BufferPosition { get; set; }
+
+ ///
+ /// Updates the internal position state of the stream. This should not perform seeking on the underlying stream,
+ /// but should update any internal position or buffer state as appropriate for the stream implementation.
+ ///
+ /// The absolute position to set within the stream stack.
+ void SetPosition(long position);
#if DEBUG_STREAMS
- private static long _instanceCounter = 0;
-
- private static string cleansePos(long pos)
- {
- if (pos < 0)
- return "";
- return "Px" + pos.ToString("x");
- }
-
- ///
- /// Gets or creates a unique instance ID for the stream stack for debugging purposes.
- ///
- /// The stream stack.
- /// Reference to the instance ID field.
- /// Whether this is being called during construction.
- /// The instance ID.
- public static long GetInstanceId(
- this IStreamStack stream,
- ref long instanceId,
- bool construct
- )
- {
- if (instanceId == 0) //will not be equal to 0 when inherited IStackStream types are being used
- instanceId = System.Threading.Interlocked.Increment(ref _instanceCounter);
- return instanceId;
- }
-
- ///
- /// Writes a debug message for stream construction.
- ///
- /// The stream stack.
- /// The type being constructed.
- public static void DebugConstruct(this IStreamStack stream, Type constructing)
- {
- long id = stream.InstanceId;
- stream.InstanceId = GetInstanceId(stream, ref id, true);
- var frame = (new StackTrace()).GetFrame(3);
- string parentInfo =
- frame != null
- ? $"{frame.GetMethod()?.DeclaringType?.Name}.{frame.GetMethod()?.Name}()"
- : "Unknown";
- if (constructing.FullName == stream.GetType().FullName) //don't debug base IStackStream types
- Debug.WriteLine(
- $"{GetStreamStackString(stream, true)} : Constructed by [{parentInfo}]"
- );
- }
-
- ///
- /// Writes a debug message for stream disposal.
- ///
- /// The stream stack.
- /// The type being disposed.
- public static void DebugDispose(this IStreamStack stream, Type constructing)
- {
- var frame = (new StackTrace()).GetFrame(3);
- string parentInfo =
- frame != null
- ? $"{frame.GetMethod()?.DeclaringType?.Name}.{frame.GetMethod()?.Name}()"
- : "Unknown";
- if (constructing.FullName == stream.GetType().FullName) //don't debug base IStackStream types
- Debug.WriteLine($"{GetStreamStackString(stream, false)} : Disposed by [{parentInfo}]");
- }
-
- ///
- /// Writes a debug trace message for the stream.
- ///
- /// The stream stack.
- /// The debug message to write.
- public static void DebugTrace(this IStreamStack stream, string message)
- {
- Debug.WriteLine(
- $"{GetStreamStackString(stream, false)} : [{stream.GetType().Name}]{message}"
- );
- }
-
- ///
- /// Returns the full stream chain as a string, including instance IDs and positions.
- ///
- /// The stream stack to represent.
- /// Whether this is being called during construction.
- /// A string representation of the entire stream stack.
- public static string GetStreamStackString(this IStreamStack stream, bool construct)
- {
- var sb = new StringBuilder();
- Stream? current = stream as Stream;
- while (current != null)
- {
- IStreamStack? sStack = current as IStreamStack;
- string id = sStack != null ? "#" + sStack.InstanceId.ToString() : "";
- string buffSize = sStack != null ? "Bx" + sStack.BufferSize.ToString("x") : "";
- string defBuffSize =
- sStack != null ? "Dx" + sStack.DefaultBufferSize.ToString("x") : "";
-
- if (sb.Length > 0)
- sb.Insert(0, "/");
- try
- {
- sb.Insert(
- 0,
- $"{current.GetType().Name}{id}[{cleansePos(current.Position)}:{buffSize}:{defBuffSize}]"
- );
- }
- catch
- {
- if (current is SharpCompressStream scs)
- sb.Insert(
- 0,
- $"{current.GetType().Name}{id}[{cleansePos(scs.InternalPosition)}:{buffSize}:{defBuffSize}]"
- );
- else
- sb.Insert(0, $"{current.GetType().Name}{id}[:{buffSize}]");
- }
- if (sStack != null)
- current = sStack.BaseStream(); //current may not be a IStreamStack, allow one more loop
- else
- break;
- }
- return sb.ToString();
- }
+ ///
+ /// Gets or sets the unique instance identifier for debugging purposes.
+ ///
+ long InstanceId { get; set; }
#endif
- }
}
diff --git a/src/SharpCompress/IO/ReadOnlySubStream.cs b/src/SharpCompress/IO/ReadOnlySubStream.cs
index ffcd55fd7..142fd2e3e 100644
--- a/src/SharpCompress/IO/ReadOnlySubStream.cs
+++ b/src/SharpCompress/IO/ReadOnlySubStream.cs
@@ -1,5 +1,4 @@
using System;
-using System.Diagnostics;
using System.IO;
namespace SharpCompress.IO;
diff --git a/src/SharpCompress/IO/SharpCompressStream.cs b/src/SharpCompress/IO/SharpCompressStream.cs
index 6d7d5372e..563a01f3e 100644
--- a/src/SharpCompress/IO/SharpCompressStream.cs
+++ b/src/SharpCompress/IO/SharpCompressStream.cs
@@ -1,8 +1,5 @@
using System;
-using System.Diagnostics;
using System.IO;
-using System.Text;
-using System.Threading;
namespace SharpCompress.IO;
@@ -72,7 +69,10 @@ int IStreamStack.BufferPosition
if (_bufferingEnabled)
{
if (value < 0 || value > _bufferedLength)
+ {
throw new ArgumentOutOfRangeException(nameof(value));
+ }
+
_internalPosition = value;
_bufferPosition = value;
ValidateBufferState(); // Add here
@@ -88,8 +88,6 @@ void IStreamStack.SetPosition(long position) { }
private bool _readOnly; //some archive detection requires seek to be disabled to cause it to exception to try the next arc type
- //private bool _isRewound;
- private bool _isDisposed;
private long _internalPosition = 0;
public bool ThrowOnDispose { get; set; }
@@ -112,7 +110,10 @@ stream is SharpCompressStream sc
)
{
if (bufferSize != 0)
+ {
((IStreamStack)stream).SetBuffer(bufferSize, forceBuffer);
+ }
+
return sc;
}
return new SharpCompressStream(stream, leaveOpen, throwOnDispose, bufferSize, forceBuffer);
@@ -131,7 +132,7 @@ public SharpCompressStream(
this.ThrowOnDispose = throwOnDispose;
_readOnly = !Stream.CanSeek;
- ((IStreamStack)this).SetBuffer(bufferSize, forceBuffer);
+ this.SetBuffer(bufferSize, forceBuffer);
try
{
_baseInitialPos = stream.Position;
@@ -146,18 +147,18 @@ public SharpCompressStream(
#endif
}
- internal bool IsRecording { get; private set; }
+ public bool IsDisposed { get; private set; }
protected override void Dispose(bool disposing)
{
#if DEBUG_STREAMS
this.DebugDispose(typeof(SharpCompressStream));
#endif
- if (_isDisposed)
+ if (IsDisposed)
{
return;
}
- _isDisposed = true;
+ IsDisposed = true;
base.Dispose(disposing);
if (this.LeaveOpen)
@@ -182,15 +183,9 @@ protected override void Dispose(bool disposing)
public override bool CanWrite => !_readOnly && Stream.CanWrite;
- public override void Flush()
- {
- Stream.Flush();
- }
+ public override void Flush() => Stream.Flush();
- public override long Length
- {
- get { return Stream.Length; }
- }
+ public override long Length => Stream.Length;
public override long Position
{
@@ -199,13 +194,15 @@ public override long Position
long pos = _internalPosition; // Stream.Position + _bufferStream.Position - _bufferStream.Length;
return pos;
}
- set { Seek(value, SeekOrigin.Begin); }
+ set => Seek(value, SeekOrigin.Begin);
}
public override int Read(byte[] buffer, int offset, int count)
{
if (count == 0)
+ {
return 0;
+ }
if (_bufferingEnabled)
{
@@ -229,7 +226,10 @@ public override int Read(byte[] buffer, int offset, int count)
// If buffer exhausted, refill
int r = Stream.Read(_buffer!, 0, _bufferSize);
if (r == 0)
+ {
return 0;
+ }
+
_bufferedLength = r;
_bufferPosition = 0;
if (_bufferedLength == 0)
@@ -262,7 +262,6 @@ public override long Seek(long offset, SeekOrigin origin)
ValidateBufferState();
}
- long orig = _internalPosition;
long targetPos;
// Calculate the absolute target position based on origin
switch (origin)
@@ -299,10 +298,7 @@ public override long Seek(long offset, SeekOrigin origin)
return _internalPosition;
}
- public override void SetLength(long value)
- {
- throw new NotSupportedException();
- }
+ public override void SetLength(long value) => throw new NotSupportedException();
public override void WriteByte(byte value)
{
diff --git a/src/SharpCompress/IO/SourceStream.cs b/src/SharpCompress/IO/SourceStream.cs
index 74eddec86..02ea7e538 100644
--- a/src/SharpCompress/IO/SourceStream.cs
+++ b/src/SharpCompress/IO/SourceStream.cs
@@ -50,8 +50,8 @@ ReaderOptions options
)
{
ReaderOptions = options;
- _files = new List();
- _streams = new List();
+ _files = [];
+ _streams = [];
IsFileMode = file != null;
IsVolumes = false;
diff --git a/src/SharpCompress/IO/StackStreamExtensions.cs b/src/SharpCompress/IO/StackStreamExtensions.cs
new file mode 100644
index 000000000..9d3ece2b6
--- /dev/null
+++ b/src/SharpCompress/IO/StackStreamExtensions.cs
@@ -0,0 +1,348 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+#if !NETFRAMEWORK && !NETSTANDARD2_0
+using System.Diagnostics;
+using System.Text;
+#endif
+
+namespace SharpCompress.IO;
+
+internal static class StackStreamExtensions
+{
+ ///
+ /// Gets the logical position of the first buffering stream in the stack, or 0 if none exist.
+ ///
+ /// The most derived (outermost) stream in the stack.
+ /// The position of the first buffering stream, or 0 if not found.
+ internal static long GetPosition(this IStreamStack stream)
+ {
+ var current = stream;
+
+ while (current != null)
+ {
+ if (current.BufferSize != 0 && current is Stream st)
+ {
+ return st.Position;
+ }
+ current = current.BaseStream() as IStreamStack;
+ }
+ return 0;
+ }
+
+ ///
+ /// Rewinds the buffer of the outermost buffering stream in the stack by the specified count, if supported.
+ /// Only the most derived buffering stream is affected.
+ ///
+ /// The most derived (outermost) stream in the stack.
+ /// The number of bytes to rewind within the buffer.
+ internal static void Rewind(this IStreamStack stream, int count)
+ {
+ IStreamStack? buffStream = null;
+ var current = stream;
+
+ while (buffStream == null && current != null)
+ {
+ if (current.BufferSize != 0)
+ {
+ buffStream = current;
+ buffStream.BufferPosition -= Math.Min(buffStream.BufferPosition, count);
+ }
+ current = current.BaseStream() as IStreamStack;
+ }
+ }
+
+ ///
+ /// Sets the buffer size on the first buffering stream in the stack, or on the outermost stream if none exist.
+ /// If is true, sets the buffer size regardless of current value.
+ ///
+ /// The most derived (outermost) stream in the stack.
+ /// The buffer size to set.
+ /// If true, forces the buffer size to be set even if already set.
+ internal static void SetBuffer(this IStreamStack stream, int bufferSize, bool force)
+ {
+ if (bufferSize == 0 || stream == null)
+ {
+ return;
+ }
+
+ var current = stream;
+ var defaultBuffer = stream;
+ IStreamStack? buffer = null;
+
+ // First pass: find the deepest IStreamStack
+ while (current != null)
+ {
+ defaultBuffer = current;
+ if (buffer == null && ((current.BufferSize != 0 && bufferSize != 0) || force))
+ {
+ buffer = current;
+ }
+
+ if (defaultBuffer.DefaultBufferSize != 0)
+ {
+ break;
+ }
+
+ current = current.BaseStream() as IStreamStack;
+ }
+ if (defaultBuffer.DefaultBufferSize == 0)
+ {
+ defaultBuffer.DefaultBufferSize = bufferSize;
+ }
+
+ (buffer ?? stream).BufferSize = bufferSize;
+ }
+
+ ///
+ /// Attempts to set the position in the stream stack. If a buffering stream is present and the position is within its buffer,
+ /// BufferPosition is set on the outermost buffering stream and all intermediate streams update their internal state via SetPosition.
+ /// If no buffering stream is present, seeks as close to the root stream as possible and updates all intermediate streams' state via SetPosition.
+ /// Seeking is never performed if any intermediate stream in the stack is buffering.
+ /// Throws if the position cannot be set.
+ ///
+ ///
+ /// The most derived (outermost) stream in the stack. The method traverses up the stack via BaseStream() until a stream can satisfy the buffer or seek request.
+ ///
+ /// The absolute position to set.
+ /// The position that was set.
+ internal static long StackSeek(this IStreamStack stream, long position)
+ {
+ var stack = new List();
+ var current = stream as Stream;
+ var lastBufferingIndex = -1;
+ Stream? firstSeekableStream = null;
+
+ // Traverse the stack, collecting info
+ while (current is IStreamStack stackStream)
+ {
+ stack.Add(stackStream);
+ if (stackStream.BufferSize > 0)
+ {
+ lastBufferingIndex = stack.Count - 1;
+ break;
+ }
+ current = stackStream.BaseStream();
+ }
+
+ // Find the first seekable stream (closest to the root)
+ if (current != null && current.CanSeek)
+ {
+ firstSeekableStream = current;
+ }
+
+ // If any buffering stream exists, try to set BufferPosition on the outermost one
+ if (lastBufferingIndex != -1)
+ {
+ var bufferingStream = stack[lastBufferingIndex];
+ if (position >= 0 && position < bufferingStream.BufferSize)
+ {
+ bufferingStream.BufferPosition = (int)position;
+ return position;
+ }
+ else
+ {
+ // If position is not in buffer, reset buffer and proceed as non-buffering
+ bufferingStream.BufferPosition = 0;
+ }
+ // Continue to seek as if no buffer is present
+ }
+
+ // If no buffering, or buffer was reset, seek at the first seekable stream (closest to the root)
+ if (firstSeekableStream != null)
+ {
+ firstSeekableStream.Seek(position, SeekOrigin.Begin);
+ return firstSeekableStream.Position;
+ }
+
+ throw new NotSupportedException(
+ "Cannot set position on this stream stack (no seekable or buffering stream supports the requested position)."
+ );
+ }
+
+ ///
+ /// Reads bytes from the stream, using the position to observe how much was actually consumed and rewind the buffer to ensure further reads are correct.
+ /// This is required to prevent buffered reads from skipping data, while also benefiting from buffering and reduced stream IO reads.
+ ///
+ /// The stream to read from.
+ /// The buffer to read data into.
+ /// The offset in the buffer to start writing data.
+ /// The maximum number of bytes to read.
+ /// Returns the buffering stream found in the stack, or null if none exists.
+ /// Returns the number of bytes actually read from the base stream, or -1 if no buffering stream was found.
+ /// The number of bytes read into the buffer.
+ internal static int Read(
+ this IStreamStack stream,
+ byte[] buffer,
+ int offset,
+ int count,
+ out IStreamStack? buffStream,
+ out int baseReadCount
+ )
+ {
+ var baseStream = stream.BaseStream();
+ var current = stream;
+ buffStream = null;
+ baseReadCount = -1;
+
+ while (buffStream == null && (current = current?.BaseStream() as IStreamStack) != null)
+ {
+ if (current.BufferSize != 0)
+ {
+ buffStream = current;
+ }
+ }
+
+ var buffPos = buffStream == null ? -1 : ((Stream)buffStream).Position;
+
+ var read = baseStream.Read(buffer, offset, count); //amount read in to buffer
+
+ if (buffPos != -1)
+ {
+ baseReadCount = (int)(((Stream)buffStream!).Position - buffPos);
+ }
+ return read;
+ }
+
+#if DEBUG_STREAMS
+ private static long _instanceCounter = 0;
+
+ private static string cleansePos(long pos)
+ {
+ if (pos < 0)
+ {
+ return "";
+ }
+
+ return "Px" + pos.ToString("x");
+ }
+
+ ///
+ /// Gets or creates a unique instance ID for the stream stack for debugging purposes.
+ ///
+ /// The stream stack.
+ /// Reference to the instance ID field.
+ /// Whether this is being called during construction.
+ /// The instance ID.
+ public static long GetInstanceId(this IStreamStack stream, ref long instanceId, bool construct)
+ {
+ if (instanceId == 0) //will not be equal to 0 when inherited IStackStream types are being used
+ {
+ instanceId = System.Threading.Interlocked.Increment(ref _instanceCounter);
+ }
+
+ return instanceId;
+ }
+
+#if DEBUG_STREAMS
+ ///
+ /// Writes a debug message for stream construction.
+ ///
+ /// The stream stack.
+ /// The type being constructed.
+ public static void DebugConstruct(this IStreamStack stream, Type constructing)
+ {
+ var id = stream.InstanceId;
+ stream.InstanceId = GetInstanceId(stream, ref id, true);
+ var frame = (new StackTrace()).GetFrame(3);
+ var parentInfo =
+ frame != null
+#pragma warning disable IL2026
+ ? $"{frame.GetMethod()?.DeclaringType?.Name}.{frame.GetMethod()?.Name}()"
+#pragma warning restore IL2026
+ : "Unknown";
+ if (constructing.FullName == stream.GetType().FullName) //don't debug base IStackStream types
+ {
+ Debug.WriteLine(
+ $"{GetStreamStackString(stream, true)} : Constructed by [{parentInfo}]"
+ );
+ }
+ }
+
+ ///
+ /// Writes a debug message for stream disposal.
+ ///
+ /// The stream stack.
+ /// The type being disposed.
+ public static void DebugDispose(this IStreamStack stream, Type constructing)
+ {
+ var frame = (new StackTrace()).GetFrame(3);
+ var parentInfo =
+ frame != null
+#pragma warning disable IL2026
+ ? $"{frame.GetMethod()?.DeclaringType?.Name}.{frame.GetMethod()?.Name}()"
+#pragma warning restore IL2026
+ : "Unknown";
+ if (constructing.FullName == stream.GetType().FullName) //don't debug base IStackStream types
+ {
+ Debug.WriteLine($"{GetStreamStackString(stream, false)} : Disposed by [{parentInfo}]");
+ }
+ }
+#endif
+
+ ///
+ /// Writes a debug trace message for the stream.
+ ///
+ /// The stream stack.
+ /// The debug message to write.
+ public static void DebugTrace(this IStreamStack stream, string message) =>
+ Debug.WriteLine(
+ $"{GetStreamStackString(stream, false)} : [{stream.GetType().Name}]{message}"
+ );
+
+ ///
+ /// Returns the full stream chain as a string, including instance IDs and positions.
+ ///
+ /// The stream stack to represent.
+ /// Whether this is being called during construction.
+ /// A string representation of the entire stream stack.
+ public static string GetStreamStackString(this IStreamStack stream, bool construct)
+ {
+ var sb = new StringBuilder();
+ var current = stream as Stream;
+ while (current != null)
+ {
+ var sStack = current as IStreamStack;
+ var id = sStack != null ? "#" + sStack.InstanceId.ToString() : "";
+ var buffSize = sStack != null ? "Bx" + sStack.BufferSize.ToString("x") : "";
+ var defBuffSize = sStack != null ? "Dx" + sStack.DefaultBufferSize.ToString("x") : "";
+
+ if (sb.Length > 0)
+ {
+ sb.Insert(0, "/");
+ }
+
+ try
+ {
+ sb.Insert(
+ 0,
+ $"{current.GetType().Name}{id}[{cleansePos(current.Position)}:{buffSize}:{defBuffSize}]"
+ );
+ }
+ catch
+ {
+ if (current is SharpCompressStream scs)
+ {
+ sb.Insert(
+ 0,
+ $"{current.GetType().Name}{id}[{cleansePos(scs.InternalPosition)}:{buffSize}:{defBuffSize}]"
+ );
+ }
+ else
+ {
+ sb.Insert(0, $"{current.GetType().Name}{id}[:{buffSize}]");
+ }
+ }
+ if (sStack != null)
+ {
+ current = sStack.BaseStream(); //current may not be a IStreamStack, allow one more loop
+ }
+ else
+ {
+ break;
+ }
+ }
+ return sb.ToString();
+ }
+#endif
+}
diff --git a/src/SharpCompress/LazyReadOnlyCollection.cs b/src/SharpCompress/LazyReadOnlyCollection.cs
index 9c1b35ebc..c2b0aed06 100644
--- a/src/SharpCompress/LazyReadOnlyCollection.cs
+++ b/src/SharpCompress/LazyReadOnlyCollection.cs
@@ -8,11 +8,14 @@ namespace SharpCompress.Helpers;
internal sealed class LazyReadOnlyCollection : ICollection
{
- private readonly List backing = new();
+ private readonly List backing = [];
private readonly IEnumerator source;
private bool fullyLoaded;
- public LazyReadOnlyCollection(IEnumerable source) => this.source = source.GetEnumerator();
+ public LazyReadOnlyCollection(IEnumerable source)
+ {
+ this.source = source.GetEnumerator();
+ }
private class LazyLoader : IEnumerator
{
@@ -20,8 +23,10 @@ private class LazyLoader : IEnumerator
private bool disposed;
private int index = -1;
- internal LazyLoader(LazyReadOnlyCollection lazyReadOnlyCollection) =>
+ internal LazyLoader(LazyReadOnlyCollection lazyReadOnlyCollection)
+ {
this.lazyReadOnlyCollection = lazyReadOnlyCollection;
+ }
#region IEnumerator Members
diff --git a/src/SharpCompress/NotNullExtensions.cs b/src/SharpCompress/NotNullExtensions.cs
index 2245612e9..023597f2d 100644
--- a/src/SharpCompress/NotNullExtensions.cs
+++ b/src/SharpCompress/NotNullExtensions.cs
@@ -1,7 +1,6 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
-using System.Linq;
using System.Runtime.CompilerServices;
namespace SharpCompress.Helpers;
@@ -9,15 +8,14 @@ namespace SharpCompress.Helpers;
internal static class NotNullExtensions
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
- public static IEnumerable Empty(this IEnumerable? source) =>
- source ?? Enumerable.Empty();
+ public static IEnumerable Empty(this IEnumerable? source) => source ?? [];
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static IEnumerable Empty(this T? source)
{
if (source is null)
{
- return Enumerable.Empty();
+ return [];
}
return source.AsEnumerable();
}
diff --git a/src/SharpCompress/Readers/Arc/ArcReader.cs b/src/SharpCompress/Readers/Arc/ArcReader.cs
index 7d58b8d42..4ca1fe7fb 100644
--- a/src/SharpCompress/Readers/Arc/ArcReader.cs
+++ b/src/SharpCompress/Readers/Arc/ArcReader.cs
@@ -1,41 +1,39 @@
-using System;
using System.Collections.Generic;
using System.IO;
-using System.Linq;
-using System.Text;
-using System.Threading.Tasks;
using SharpCompress.Common;
using SharpCompress.Common.Arc;
-namespace SharpCompress.Readers.Arc
+namespace SharpCompress.Readers.Arc;
+
+public class ArcReader : AbstractReader
{
- public class ArcReader : AbstractReader
+ private ArcReader(Stream stream, ReaderOptions options)
+ : base(options, ArchiveType.Arc)
{
- private ArcReader(Stream stream, ReaderOptions options)
- : base(options, ArchiveType.Arc) => Volume = new ArcVolume(stream, options, 0);
+ Volume = new ArcVolume(stream, options, 0);
+ }
- public override ArcVolume Volume { get; }
+ public override ArcVolume Volume { get; }
- ///
- /// Opens an ArcReader for Non-seeking usage with a single volume
- ///
- ///
- ///
- ///
- public static ArcReader Open(Stream stream, ReaderOptions? options = null)
- {
- stream.CheckNotNull(nameof(stream));
- return new ArcReader(stream, options ?? new ReaderOptions());
- }
+ ///
+ /// Opens an ArcReader for Non-seeking usage with a single volume
+ ///
+ ///
+ ///
+ ///
+ public static ArcReader Open(Stream stream, ReaderOptions? options = null)
+ {
+ stream.CheckNotNull(nameof(stream));
+ return new ArcReader(stream, options ?? new ReaderOptions());
+ }
- protected override IEnumerable GetEntries(Stream stream)
+ protected override IEnumerable GetEntries(Stream stream)
+ {
+ ArcEntryHeader headerReader = new ArcEntryHeader(new ArchiveEncoding());
+ ArcEntryHeader? header;
+ while ((header = headerReader.ReadHeader(stream)) != null)
{
- ArcEntryHeader headerReader = new ArcEntryHeader(new ArchiveEncoding());
- ArcEntryHeader? header;
- while ((header = headerReader.ReadHeader(stream)) != null)
- {
- yield return new ArcEntry(new ArcFilePart(header, stream));
- }
+ yield return new ArcEntry(new ArcFilePart(header, stream));
}
}
}
diff --git a/src/SharpCompress/Readers/GZip/GZipReader.cs b/src/SharpCompress/Readers/GZip/GZipReader.cs
index 73bc4a9d3..06f23a936 100644
--- a/src/SharpCompress/Readers/GZip/GZipReader.cs
+++ b/src/SharpCompress/Readers/GZip/GZipReader.cs
@@ -8,7 +8,10 @@ namespace SharpCompress.Readers.GZip;
public class GZipReader : AbstractReader
{
private GZipReader(Stream stream, ReaderOptions options)
- : base(options, ArchiveType.GZip) => Volume = new GZipVolume(stream, options, 0);
+ : base(options, ArchiveType.GZip)
+ {
+ Volume = new GZipVolume(stream, options, 0);
+ }
public override GZipVolume Volume { get; }
diff --git a/src/SharpCompress/Readers/IReaderExtractionListener.cs b/src/SharpCompress/Readers/IReaderExtractionListener.cs
index 49e5dae42..350824a13 100644
--- a/src/SharpCompress/Readers/IReaderExtractionListener.cs
+++ b/src/SharpCompress/Readers/IReaderExtractionListener.cs
@@ -4,5 +4,7 @@ namespace SharpCompress.Readers;
public interface IReaderExtractionListener : IExtractionListener
{
+#pragma warning disable CA1030
void FireEntryExtractionProgress(Entry entry, long sizeTransferred, int iterations);
+#pragma warning restore CA1030
}
diff --git a/src/SharpCompress/Readers/Rar/MultiVolumeRarReader.cs b/src/SharpCompress/Readers/Rar/MultiVolumeRarReader.cs
index 556ca9f55..efed72682 100644
--- a/src/SharpCompress/Readers/Rar/MultiVolumeRarReader.cs
+++ b/src/SharpCompress/Readers/Rar/MultiVolumeRarReader.cs
@@ -15,7 +15,10 @@ internal class MultiVolumeRarReader : RarReader
private Stream tempStream;
internal MultiVolumeRarReader(IEnumerable streams, ReaderOptions options)
- : base(options) => this.streams = streams.GetEnumerator();
+ : base(options)
+ {
+ this.streams = streams.GetEnumerator();
+ }
protected override void ValidateArchive(RarVolume archive) { }
diff --git a/src/SharpCompress/Readers/Rar/SingleVolumeRarReader.cs b/src/SharpCompress/Readers/Rar/SingleVolumeRarReader.cs
index 165c92f70..cb244ff75 100644
--- a/src/SharpCompress/Readers/Rar/SingleVolumeRarReader.cs
+++ b/src/SharpCompress/Readers/Rar/SingleVolumeRarReader.cs
@@ -9,7 +9,10 @@ internal class SingleVolumeRarReader : RarReader
private readonly Stream stream;
internal SingleVolumeRarReader(Stream stream, ReaderOptions options)
- : base(options) => this.stream = stream;
+ : base(options)
+ {
+ this.stream = stream;
+ }
protected override void ValidateArchive(RarVolume archive)
{
diff --git a/src/SharpCompress/Readers/ReaderFactory.cs b/src/SharpCompress/Readers/ReaderFactory.cs
index fb133d98a..03232f07d 100644
--- a/src/SharpCompress/Readers/ReaderFactory.cs
+++ b/src/SharpCompress/Readers/ReaderFactory.cs
@@ -22,18 +22,18 @@ public static IReader Open(Stream stream, ReaderOptions? options = null)
var bStream = new SharpCompressStream(stream, bufferSize: options.BufferSize);
- long pos = ((IStreamStack)bStream).GetPosition();
-
- var factories = Factories.Factory.Factories.OfType();
+ long pos = bStream.GetPosition();
Factory? testedFactory = null;
if (!string.IsNullOrWhiteSpace(options.ExtensionHint))
{
- testedFactory = factories.FirstOrDefault(a =>
- a.GetSupportedExtensions()
- .Contains(options.ExtensionHint, StringComparer.CurrentCultureIgnoreCase)
- );
+ testedFactory = Factory
+ .Factories.OfType()
+ .FirstOrDefault(a =>
+ a.GetSupportedExtensions()
+ .Contains(options.ExtensionHint, StringComparer.CurrentCultureIgnoreCase)
+ );
if (
testedFactory?.TryOpenReader(bStream, options, out var reader) == true
&& reader != null
@@ -41,16 +41,16 @@ public static IReader Open(Stream stream, ReaderOptions? options = null)
{
return reader;
}
- ((IStreamStack)bStream).StackSeek(pos);
+ bStream.StackSeek(pos);
}
- foreach (var factory in factories)
+ foreach (var factory in Factory.Factories.OfType())
{
if (testedFactory == factory)
{
continue; // Already tested above
}
- ((IStreamStack)bStream).StackSeek(pos);
+ bStream.StackSeek(pos);
if (factory.TryOpenReader(bStream, options, out var reader) && reader != null)
{
return reader;
diff --git a/src/SharpCompress/Readers/Tar/TarReader.cs b/src/SharpCompress/Readers/Tar/TarReader.cs
index aaa0005ca..dab091bbf 100644
--- a/src/SharpCompress/Readers/Tar/TarReader.cs
+++ b/src/SharpCompress/Readers/Tar/TarReader.cs
@@ -56,62 +56,62 @@ protected override Stream RequestInitialStream()
public static TarReader Open(Stream stream, ReaderOptions? options = null)
{
stream.CheckNotNull(nameof(stream));
- options = options ?? new ReaderOptions();
+ options ??= new ReaderOptions();
var rewindableStream = new SharpCompressStream(stream);
- long pos = ((IStreamStack)rewindableStream).GetPosition();
+ long pos = rewindableStream.GetPosition();
if (GZipArchive.IsGZipFile(rewindableStream))
{
- ((IStreamStack)rewindableStream).StackSeek(pos);
+ rewindableStream.StackSeek(pos);
var testStream = new GZipStream(rewindableStream, CompressionMode.Decompress);
if (TarArchive.IsTarFile(testStream))
{
- ((IStreamStack)rewindableStream).StackSeek(pos);
+ rewindableStream.StackSeek(pos);
return new TarReader(rewindableStream, options, CompressionType.GZip);
}
throw new InvalidFormatException("Not a tar file.");
}
- ((IStreamStack)rewindableStream).StackSeek(pos);
+ rewindableStream.StackSeek(pos);
if (BZip2Stream.IsBZip2(rewindableStream))
{
- ((IStreamStack)rewindableStream).StackSeek(pos);
+ rewindableStream.StackSeek(pos);
var testStream = new BZip2Stream(rewindableStream, CompressionMode.Decompress, false);
if (TarArchive.IsTarFile(testStream))
{
- ((IStreamStack)rewindableStream).StackSeek(pos);
+ rewindableStream.StackSeek(pos);
return new TarReader(rewindableStream, options, CompressionType.BZip2);
}
throw new InvalidFormatException("Not a tar file.");
}
- ((IStreamStack)rewindableStream).StackSeek(pos);
+ rewindableStream.StackSeek(pos);
if (ZStandardStream.IsZStandard(rewindableStream))
{
- ((IStreamStack)rewindableStream).StackSeek(pos);
+ rewindableStream.StackSeek(pos);
var testStream = new ZStandardStream(rewindableStream);
if (TarArchive.IsTarFile(testStream))
{
- ((IStreamStack)rewindableStream).StackSeek(pos);
+ rewindableStream.StackSeek(pos);
return new TarReader(rewindableStream, options, CompressionType.ZStandard);
}
throw new InvalidFormatException("Not a tar file.");
}
- ((IStreamStack)rewindableStream).StackSeek(pos);
+ rewindableStream.StackSeek(pos);
if (LZipStream.IsLZipFile(rewindableStream))
{
- ((IStreamStack)rewindableStream).StackSeek(pos);
+ rewindableStream.StackSeek(pos);
var testStream = new LZipStream(rewindableStream, CompressionMode.Decompress);
if (TarArchive.IsTarFile(testStream))
{
- ((IStreamStack)rewindableStream).StackSeek(pos);
+ rewindableStream.StackSeek(pos);
return new TarReader(rewindableStream, options, CompressionType.LZip);
}
throw new InvalidFormatException("Not a tar file.");
}
- ((IStreamStack)rewindableStream).StackSeek(pos);
+ rewindableStream.StackSeek(pos);
return new TarReader(rewindableStream, options, CompressionType.None);
}
diff --git a/src/SharpCompress/Writers/Tar/TarWriterOptions.cs b/src/SharpCompress/Writers/Tar/TarWriterOptions.cs
index 8f2e866df..4cecc6dfb 100755
--- a/src/SharpCompress/Writers/Tar/TarWriterOptions.cs
+++ b/src/SharpCompress/Writers/Tar/TarWriterOptions.cs
@@ -10,8 +10,14 @@ public class TarWriterOptions : WriterOptions
public bool FinalizeArchiveOnClose { get; }
public TarWriterOptions(CompressionType compressionType, bool finalizeArchiveOnClose)
- : base(compressionType) => FinalizeArchiveOnClose = finalizeArchiveOnClose;
+ : base(compressionType)
+ {
+ FinalizeArchiveOnClose = finalizeArchiveOnClose;
+ }
internal TarWriterOptions(WriterOptions options)
- : this(options.CompressionType, true) => ArchiveEncoding = options.ArchiveEncoding;
+ : this(options.CompressionType, true)
+ {
+ ArchiveEncoding = options.ArchiveEncoding;
+ }
}
diff --git a/src/SharpCompress/Writers/Zip/ZipCentralDirectoryEntry.cs b/src/SharpCompress/Writers/Zip/ZipCentralDirectoryEntry.cs
index 111c7239e..4b6ac3858 100644
--- a/src/SharpCompress/Writers/Zip/ZipCentralDirectoryEntry.cs
+++ b/src/SharpCompress/Writers/Zip/ZipCentralDirectoryEntry.cs
@@ -78,7 +78,7 @@ internal uint Write(Stream outputStream)
usedCompression = ZipCompressionMethod.None;
}
- Span intBuf = stackalloc byte[] { 80, 75, 1, 2, version, 0, version, 0 };
+ Span intBuf = [80, 75, 1, 2, version, 0, version, 0];
//constant sig, then version made by, then version to extract
outputStream.Write(intBuf);
diff --git a/src/SharpCompress/Writers/Zip/ZipWriter.cs b/src/SharpCompress/Writers/Zip/ZipWriter.cs
index 55abe4405..7d7ad4a0d 100644
--- a/src/SharpCompress/Writers/Zip/ZipWriter.cs
+++ b/src/SharpCompress/Writers/Zip/ZipWriter.cs
@@ -19,7 +19,7 @@ public class ZipWriter : AbstractWriter
{
private readonly CompressionType compressionType;
private readonly int compressionLevel;
- private readonly List entries = new();
+ private readonly List entries = [];
private readonly string zipComment;
private long streamPosition;
private PpmdProperties? ppmdProps;
@@ -162,16 +162,16 @@ bool useZip64
{
if (OutputStream.CanSeek && useZip64)
{
- OutputStream.Write(stackalloc byte[] { 45, 0 }); //smallest allowed version for zip64
+ OutputStream.Write([45, 0]); //smallest allowed version for zip64
}
else
{
- OutputStream.Write(stackalloc byte[] { 20, 0 }); //older version which is more compatible
+ OutputStream.Write([20, 0]); //older version which is more compatible
}
}
else
{
- OutputStream.Write(stackalloc byte[] { 63, 0 }); //version says we used PPMd or LZMA
+ OutputStream.Write([63, 0]); //version says we used PPMd or LZMA
}
var flags = Equals(WriterOptions.ArchiveEncoding.GetEncoding(), Encoding.UTF8)
? HeaderFlags.Efs
@@ -197,7 +197,7 @@ bool useZip64
OutputStream.Write(intBuf);
// zipping date and time
- OutputStream.Write(stackalloc byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 });
+ OutputStream.Write([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
// unused CRC, un/compressed size, updated later
BinaryPrimitives.WriteUInt16LittleEndian(intBuf, (ushort)encodedFilename.Length);
@@ -250,7 +250,7 @@ private void WriteEndRecord(ulong size)
var recordlen = 2 + 2 + 4 + 4 + 8 + 8 + 8 + 8;
// Write zip64 end of central directory record
- OutputStream.Write(stackalloc byte[] { 80, 75, 6, 6 });
+ OutputStream.Write([80, 75, 6, 6]);
BinaryPrimitives.WriteUInt64LittleEndian(intBuf, (ulong)recordlen);
OutputStream.Write(intBuf); // Size of zip64 end of central directory record
@@ -273,7 +273,7 @@ private void WriteEndRecord(ulong size)
OutputStream.Write(intBuf); // Disk offset
// Write zip64 end of central directory locator
- OutputStream.Write(stackalloc byte[] { 80, 75, 6, 7 });
+ OutputStream.Write([80, 75, 6, 7]);
BinaryPrimitives.WriteUInt32LittleEndian(intBuf, 0);
OutputStream.Write(intBuf.Slice(0, 4)); // Entry disk
@@ -286,7 +286,7 @@ private void WriteEndRecord(ulong size)
}
// Write normal end of central directory record
- OutputStream.Write(stackalloc byte[] { 80, 75, 5, 6, 0, 0, 0, 0 });
+ OutputStream.Write([80, 75, 5, 6, 0, 0, 0, 0]);
BinaryPrimitives.WriteUInt16LittleEndian(
intBuf,
(ushort)(entries.Count < 0xFFFF ? entries.Count : 0xFFFF)
diff --git a/src/SharpCompress/Writers/Zip/ZipWriterOptions.cs b/src/SharpCompress/Writers/Zip/ZipWriterOptions.cs
index 9aa80dd17..a0a653425 100644
--- a/src/SharpCompress/Writers/Zip/ZipWriterOptions.cs
+++ b/src/SharpCompress/Writers/Zip/ZipWriterOptions.cs
@@ -32,10 +32,7 @@ internal ZipWriterOptions(WriterOptions options)
/// This is a convenience method that sets the CompressionLevel property for Deflate compression.
///
/// Deflate compression level (0=no compression, 6=default, 9=best compression)
- public void SetDeflateCompressionLevel(CompressionLevel level)
- {
- CompressionLevel = (int)level;
- }
+ public void SetDeflateCompressionLevel(CompressionLevel level) => CompressionLevel = (int)level;
///
/// Sets the compression level for ZStandard compression (1-22).
@@ -46,10 +43,12 @@ public void SetDeflateCompressionLevel(CompressionLevel level)
public void SetZStandardCompressionLevel(int level)
{
if (level < 1 || level > 22)
+ {
throw new ArgumentOutOfRangeException(
nameof(level),
"ZStandard compression level must be between 1 and 22"
);
+ }
CompressionLevel = level;
}
diff --git a/tests/SharpCompress.Test/ArchiveTests.cs b/tests/SharpCompress.Test/ArchiveTests.cs
index 5a3d36ca5..714eac2a9 100644
--- a/tests/SharpCompress.Test/ArchiveTests.cs
+++ b/tests/SharpCompress.Test/ArchiveTests.cs
@@ -30,7 +30,7 @@ protected void ArchiveGetParts(IEnumerable testArchives)
protected void ArchiveStreamReadExtractAll(string testArchive, CompressionType compression)
{
testArchive = Path.Combine(TEST_ARCHIVES_PATH, testArchive);
- ArchiveStreamReadExtractAll(new[] { testArchive }, compression);
+ ArchiveStreamReadExtractAll([testArchive], compression);
}
protected void ArchiveStreamReadExtractAll(
diff --git a/tests/SharpCompress.Test/BZip2/BZip2ReaderTests.cs b/tests/SharpCompress.Test/BZip2/BZip2ReaderTests.cs
index e0fa0583f..77d0ae29c 100644
--- a/tests/SharpCompress.Test/BZip2/BZip2ReaderTests.cs
+++ b/tests/SharpCompress.Test/BZip2/BZip2ReaderTests.cs
@@ -14,8 +14,8 @@ public class BZip2ReaderTests : ReaderTests
public void BZip2_Reader_Factory()
{
Stream stream = new MemoryStream(
- new byte[] { 0x42, 0x5a, 0x68, 0x34, 0x31, 0x41, 0x59, 0x26, 0x53, 0x59, 0x35 }
+ [0x42, 0x5a, 0x68, 0x34, 0x31, 0x41, 0x59, 0x26, 0x53, 0x59, 0x35]
);
- Assert.Throws(typeof(InvalidOperationException), () => ReaderFactory.Open(stream));
+ Assert.Throws(() => ReaderFactory.Open(stream));
}
}
diff --git a/tests/SharpCompress.Test/GZip/GZipArchiveTests.cs b/tests/SharpCompress.Test/GZip/GZipArchiveTests.cs
index 6a2b9795e..db93c0c10 100644
--- a/tests/SharpCompress.Test/GZip/GZipArchiveTests.cs
+++ b/tests/SharpCompress.Test/GZip/GZipArchiveTests.cs
@@ -122,6 +122,6 @@ public void TestGzArchiveTypeGzip()
{
using var stream = File.OpenRead(Path.Combine(TEST_ARCHIVES_PATH, "Tar.tar.gz"));
using var archive = GZipArchive.Open(stream);
- Assert.Equal(archive.Type, ArchiveType.GZip);
+ Assert.Equal(ArchiveType.GZip, archive.Type);
}
}
diff --git a/tests/SharpCompress.Test/Mocks/ForwardOnlyStream.cs b/tests/SharpCompress.Test/Mocks/ForwardOnlyStream.cs
index 4c279a3c8..20a4593fa 100644
--- a/tests/SharpCompress.Test/Mocks/ForwardOnlyStream.cs
+++ b/tests/SharpCompress.Test/Mocks/ForwardOnlyStream.cs
@@ -27,8 +27,6 @@ int IStreamStack.BufferPosition
void IStreamStack.SetPosition(long position) { }
- public bool IsDisposed { get; private set; }
-
public ForwardOnlyStream(Stream stream, int bufferSize = ReaderOptions.DefaultBufferSize)
: base(stream, bufferSize: bufferSize)
{
@@ -48,7 +46,6 @@ protected override void Dispose(bool disposing)
this.DebugDispose(typeof(ForwardOnlyStream));
#endif
stream.Dispose();
- IsDisposed = true;
base.Dispose(disposing);
}
}
diff --git a/tests/SharpCompress.Test/Rar/RarArchiveTests.cs b/tests/SharpCompress.Test/Rar/RarArchiveTests.cs
index 1aca13ddc..b2c7e46bb 100644
--- a/tests/SharpCompress.Test/Rar/RarArchiveTests.cs
+++ b/tests/SharpCompress.Test/Rar/RarArchiveTests.cs
@@ -17,9 +17,8 @@ public void Rar_EncryptedFileAndHeader_Archive() =>
[Fact]
public void Rar_EncryptedFileAndHeader_NoPasswordExceptionTest() =>
- Assert.Throws(
- typeof(CryptographicException),
- () => ReadRarPassword("Rar.encrypted_filesAndHeader.rar", null)
+ Assert.Throws(() =>
+ ReadRarPassword("Rar.encrypted_filesAndHeader.rar", null)
);
[Fact]
@@ -28,16 +27,14 @@ public void Rar5_EncryptedFileAndHeader_Archive() =>
[Fact]
public void Rar5_EncryptedFileAndHeader_Archive_Err() =>
- Assert.Throws(
- typeof(CryptographicException),
- () => ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", "failed")
+ Assert.Throws(() =>
+ ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", "failed")
);
[Fact]
public void Rar5_EncryptedFileAndHeader_NoPasswordExceptionTest() =>
- Assert.Throws(
- typeof(CryptographicException),
- () => ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", null)
+ Assert.Throws(() =>
+ ReadRarPassword("Rar5.encrypted_filesAndHeader.rar", null)
);
[Fact]
@@ -46,9 +43,8 @@ public void Rar_EncryptedFileOnly_Archive() =>
[Fact]
public void Rar_EncryptedFileOnly_Archive_Err() =>
- Assert.Throws(
- typeof(CryptographicException),
- () => ReadRarPassword("Rar5.encrypted_filesOnly.rar", "failed")
+ Assert.Throws(() =>
+ ReadRarPassword("Rar5.encrypted_filesOnly.rar", "failed")
);
[Fact]
@@ -417,23 +413,21 @@ public void Rar5_ArchiveVersionTest()
public void Rar_GetPartsSplit() =>
//uses first part to search for all parts and compares against this array
ArchiveGetParts(
- new[]
- {
+ [
"Rar4.split.001",
"Rar4.split.002",
"Rar4.split.003",
"Rar4.split.004",
"Rar4.split.005",
"Rar4.split.006",
- }
+ ]
);
[Fact]
public void Rar_GetPartsOld() =>
//uses first part to search for all parts and compares against this array
ArchiveGetParts(
- new[]
- {
+ [
"Rar2.multi.rar",
"Rar2.multi.r00",
"Rar2.multi.r01",
@@ -441,15 +435,14 @@ public void Rar_GetPartsOld() =>
"Rar2.multi.r03",
"Rar2.multi.r04",
"Rar2.multi.r05",
- }
+ ]
);
[Fact]
public void Rar_GetPartsNew() =>
//uses first part to search for all parts and compares against this array
ArchiveGetParts(
- new[]
- {
+ [
"Rar4.multi.part01.rar",
"Rar4.multi.part02.rar",
"Rar4.multi.part03.rar",
@@ -457,7 +450,7 @@ public void Rar_GetPartsNew() =>
"Rar4.multi.part05.rar",
"Rar4.multi.part06.rar",
"Rar4.multi.part07.rar",
- }
+ ]
);
[Fact]
diff --git a/tests/SharpCompress.Test/ReaderTests.cs b/tests/SharpCompress.Test/ReaderTests.cs
index 0bc4dba9e..841e212c2 100644
--- a/tests/SharpCompress.Test/ReaderTests.cs
+++ b/tests/SharpCompress.Test/ReaderTests.cs
@@ -36,18 +36,22 @@ ReaderOptions options
)
{
using var file = File.OpenRead(testArchive);
- using var protectedStream = SharpCompressStream.Create(
- new ForwardOnlyStream(file, options.BufferSize),
- leaveOpen: true,
- throwOnDispose: true,
- bufferSize: options.BufferSize
- );
- using var testStream = new TestStream(protectedStream);
- using (var reader = ReaderFactory.Open(testStream, options))
+ var testStream = new TestStream(file);
+ using (
+ var protectedStream = SharpCompressStream.Create(
+ new ForwardOnlyStream(testStream, options.BufferSize),
+ leaveOpen: options.LeaveStreamOpen,
+ throwOnDispose: true,
+ bufferSize: options.BufferSize
+ )
+ )
{
- UseReader(reader, expectedCompression);
- protectedStream.ThrowOnDispose = false;
- Assert.False(testStream.IsDisposed, $"{nameof(testStream)} prematurely closed");
+ using (var reader = ReaderFactory.Open(testStream, options))
+ {
+ UseReader(reader, expectedCompression);
+ protectedStream.ThrowOnDispose = false;
+ Assert.False(testStream.IsDisposed, $"{nameof(testStream)} prematurely closed");
+ }
}
// Boolean XOR -- If the stream should be left open (true), then the stream should not be diposed (false)
diff --git a/tests/SharpCompress.Test/SevenZip/SevenZipArchiveTests.cs b/tests/SharpCompress.Test/SevenZip/SevenZipArchiveTests.cs
index b52d4bc23..a3d796912 100644
--- a/tests/SharpCompress.Test/SevenZip/SevenZipArchiveTests.cs
+++ b/tests/SharpCompress.Test/SevenZip/SevenZipArchiveTests.cs
@@ -34,9 +34,8 @@ public void SevenZipArchive_LZMAAES_PathRead() =>
[Fact]
public void SevenZipArchive_LZMAAES_NoPasswordExceptionTest() =>
- Assert.Throws(
- typeof(CryptographicException),
- () => ArchiveFileRead("7Zip.LZMA.Aes.7z", new ReaderOptions { Password = null })
+ Assert.Throws(() =>
+ ArchiveFileRead("7Zip.LZMA.Aes.7z", new ReaderOptions { Password = null })
); //was failing with ArgumentNullException not CryptographicException like rar
[Fact]
diff --git a/tests/SharpCompress.Test/Tar/TarArchiveTests.cs b/tests/SharpCompress.Test/Tar/TarArchiveTests.cs
index efbde4723..ac21143c6 100644
--- a/tests/SharpCompress.Test/Tar/TarArchiveTests.cs
+++ b/tests/SharpCompress.Test/Tar/TarArchiveTests.cs
@@ -49,7 +49,7 @@ public void Tar_FileName_Exactly_100_Characters()
var unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive);
using (var archive2 = TarArchive.Open(unmodified))
{
- Assert.Equal(1, archive2.Entries.Count);
+ Assert.Single(archive2.Entries);
Assert.Contains(filename, archive2.Entries.Select(entry => entry.Key));
foreach (var entry in archive2.Entries)
@@ -109,7 +109,7 @@ public void Tar_VeryLongFilepathReadback()
var unmodified = Path.Combine(SCRATCH2_FILES_PATH, archive);
using (var archive2 = TarArchive.Open(unmodified))
{
- Assert.Equal(1, archive2.Entries.Count);
+ Assert.Single(archive2.Entries);
Assert.Contains(longFilename, archive2.Entries.Select(entry => entry.Key));
foreach (var entry in archive2.Entries)
diff --git a/tests/SharpCompress.Test/Tar/TarReaderTests.cs b/tests/SharpCompress.Test/Tar/TarReaderTests.cs
index a676d2dae..f58b9ebbc 100644
--- a/tests/SharpCompress.Test/Tar/TarReaderTests.cs
+++ b/tests/SharpCompress.Test/Tar/TarReaderTests.cs
@@ -256,7 +256,7 @@ public void Tar_GZip_With_Symlink_Entries()
}
else
{
- Assert.True(false, "Symlink has no target");
+ Assert.Fail("Symlink has no target");
}
}
}
diff --git a/tests/SharpCompress.Test/Zip/TestPseudoTextStream.cs b/tests/SharpCompress.Test/Zip/TestPseudoTextStream.cs
index 20f1c9ce4..ddad5ba32 100644
--- a/tests/SharpCompress.Test/Zip/TestPseudoTextStream.cs
+++ b/tests/SharpCompress.Test/Zip/TestPseudoTextStream.cs
@@ -10,7 +10,7 @@ namespace SharpCompress.Test.Zip;
///
internal class TestPseudoTextStream : Stream
{
- private static readonly char[] _vowels = { 'a', 'e', 'i', 'o', 'u' };
+ private static readonly char[] _vowels = ['a', 'e', 'i', 'o', 'u'];
private static readonly char[] _consonants = "bcdfghjklmnpqrstvwxyz".ToCharArray();
private long _position = 0;
diff --git a/tests/SharpCompress.Test/Zip/Zip64Tests.cs b/tests/SharpCompress.Test/Zip/Zip64Tests.cs
index a89b11287..3fdddb9fa 100644
--- a/tests/SharpCompress.Test/Zip/Zip64Tests.cs
+++ b/tests/SharpCompress.Test/Zip/Zip64Tests.cs
@@ -150,7 +150,9 @@ bool forwardOnly
var opts = new ZipWriterOptions(CompressionType.Deflate) { UseZip64 = setZip64 };
// Use no compression to ensure we hit the limits (actually inflates a bit, but seems better than using method==Store)
+#pragma warning disable CS0618 // Type or member is obsolete
var eo = new ZipWriterEntryOptions { DeflateCompressionLevel = CompressionLevel.None };
+#pragma warning restore CS0618 // Type or member is obsolete
using var zip = File.OpenWrite(filename);
using var st = forwardOnly ? (Stream)new ForwardOnlyStream(zip) : zip;
diff --git a/tests/SharpCompress.Test/Zip/ZipArchiveTests.cs b/tests/SharpCompress.Test/Zip/ZipArchiveTests.cs
index e09df7e96..c1616f1ae 100644
--- a/tests/SharpCompress.Test/Zip/ZipArchiveTests.cs
+++ b/tests/SharpCompress.Test/Zip/ZipArchiveTests.cs
@@ -107,26 +107,25 @@ public void ZipX_Deflate_Multi_ArchiveFirstFileRead() =>
[Fact]
public void Zip_GetParts() =>
//uses first part to search for all parts and compares against this array
- ArchiveGetParts(new[] { "Infozip.nocomp.multi.zip", "Infozip.nocomp.multi.z01" });
+ ArchiveGetParts(["Infozip.nocomp.multi.zip", "Infozip.nocomp.multi.z01"]);
[Fact]
public void ZipX_GetParts() =>
//uses first part to search for all parts and compares against this array
- ArchiveGetParts(new[] { "WinZip26.nocomp.multi.zipx", "WinZip26.nocomp.multi.zx01" });
+ ArchiveGetParts(["WinZip26.nocomp.multi.zipx", "WinZip26.nocomp.multi.zx01"]);
[Fact]
public void Zip_GetPartsSplit() =>
//uses first part to search for all parts and compares against this array
ArchiveGetParts(
- new[]
- {
+ [
"Zip.deflate.split.001",
"Zip.deflate.split.002",
"Zip.deflate.split.003",
"Zip.deflate.split.004",
"Zip.deflate.split.005",
"Zip.deflate.split.006",
- }
+ ]
);
//will detect and load other files
@@ -497,7 +496,7 @@ public void Zip_Read_Volume_Comment()
new ReaderOptions { Password = "test" }
);
var isComplete = reader.IsComplete;
- Assert.Equal(1, reader.Volumes.Count);
+ Assert.Single(reader.Volumes);
var expectedComment =
"Encoding:utf-8 || Compression:Deflate levelDefault || Encrypt:None || ZIP64:Always\r\nCreated at 2017-Jan-23 14:10:43 || DotNetZip Tool v1.9.1.8\r\nTest zip64 archive";
@@ -622,7 +621,7 @@ public void TestSharpCompressWithEmptyStream()
using (var zipWriter = WriterFactory.Open(stream, ArchiveType.Zip, CompressionType.Deflate))
{
- zipWriter.Write("foo.txt", new MemoryStream(Array.Empty()));
+ zipWriter.Write("foo.txt", new MemoryStream([]));
zipWriter.Write("foo2.txt", new MemoryStream(new byte[10]));
}
diff --git a/tests/SharpCompress.Test/Zip/ZipReaderTests.cs b/tests/SharpCompress.Test/Zip/ZipReaderTests.cs
index 835521e0f..3fd07fea2 100644
--- a/tests/SharpCompress.Test/Zip/ZipReaderTests.cs
+++ b/tests/SharpCompress.Test/Zip/ZipReaderTests.cs
@@ -247,7 +247,7 @@ public void TestSharpCompressWithEmptyStream()
{
var expected = new[]
{
- new Tuple("foo.txt", Array.Empty()),
+ new Tuple("foo.txt", []),
new Tuple("foo2.txt", new byte[10]),
};