diff --git a/.credo.exs b/.credo.exs new file mode 100644 index 0000000..c5ac0a4 --- /dev/null +++ b/.credo.exs @@ -0,0 +1,192 @@ +# This file is synced with stordco/common-config-elixir. Any changes will be overwritten. + +# This file contains the configuration for Credo and you are probably reading +# this after creating it with `mix credo.gen.config`. +# +# If you find anything wrong or unclear in this file, please report an +# issue on GitHub: https://github.com/rrrene/credo/issues +# +%{ + # + # You can have as many configs as you like in the `configs:` field. + configs: [ + %{ + # + # Run any config using `mix credo -C `. If no config name is given + # "default" is used. + # + name: "default", + # + # These are the files included in the analysis: + files: %{ + # + # You can give explicit globs or simply directories. + # In the latter case `**/*.{ex,exs}` will be used. + # + included: ["lib/", "priv/", "test/"], + excluded: [~r"/_build/", ~r"/deps/", ~r"/node_modules/"] + }, + # + # Load and configure plugins here: + # + plugins: [], + # + # If you create your own checks, you must specify the source files for + # them here, so they can be loaded by Credo before running the analysis. + # + requires: [], + # + # If you want to enforce a style guide and need a more traditional linting + # experience, you can change `strict` to `true` below: + # + strict: true, + # + # To modify the timeout for parsing files, change this value: + # + parse_timeout: 5000, + # + # If you want to use uncolored output by default, you can change `color` + # to `false` below: + # + color: true, + # + # You can customize the parameters of any check by adding a second element + # to the tuple. + # + # To disable a check put `false` as second element: + # + # {Credo.Check.Design.DuplicatedCode, false} + # + checks: [ + # + ## Database Migration Checks + # + {ExcellentMigrations.CredoCheck.MigrationsSafety, []}, + + # + ## Consistency Checks + # + {Credo.Check.Consistency.ExceptionNames, []}, + {Credo.Check.Consistency.LineEndings, []}, + {Credo.Check.Consistency.ParameterPatternMatching, []}, + {Credo.Check.Consistency.SpaceAroundOperators, []}, + {Credo.Check.Consistency.SpaceInParentheses, []}, + {Credo.Check.Consistency.TabsOrSpaces, []}, + + # + ## Design Checks + # + # You can customize the priority of any check + # Priority values are: `low, normal, high, higher` + # + {Credo.Check.Design.AliasUsage, [priority: :low, if_nested_deeper_than: 2, if_called_more_often_than: 2]}, + # You can also customize the exit_status of each check. + # If you don't want TODO comments to cause `mix credo` to fail, just + # set this value to 0 (zero). + # + {Credo.Check.Design.TagTODO, [exit_status: 2]}, + {Credo.Check.Design.TagFIXME, []}, + + # + ## Readability Checks + # + {Credo.Check.Readability.AliasOrder, []}, + {Credo.Check.Readability.FunctionNames, []}, + {Credo.Check.Readability.LargeNumbers, [trailing_digits: 2]}, + {Credo.Check.Readability.MaxLineLength, false}, + {Credo.Check.Readability.ModuleAttributeNames, []}, + {Credo.Check.Readability.ModuleDoc, false}, + {Credo.Check.Readability.ModuleNames, []}, + {Credo.Check.Readability.ParenthesesInCondition, []}, + {Credo.Check.Readability.ParenthesesOnZeroArityDefs, []}, + {Credo.Check.Readability.PredicateFunctionNames, []}, + {Credo.Check.Readability.PreferImplicitTry, []}, + {Credo.Check.Readability.RedundantBlankLines, []}, + {Credo.Check.Readability.Semicolons, []}, + {Credo.Check.Readability.SpaceAfterCommas, []}, + {Credo.Check.Readability.StringSigils, []}, + {Credo.Check.Readability.TrailingBlankLine, []}, + {Credo.Check.Readability.TrailingWhiteSpace, []}, + {Credo.Check.Readability.UnnecessaryAliasExpansion, []}, + {Credo.Check.Readability.VariableNames, []}, + {Credo.Check.Readability.NestedFunctionCalls, []}, + + # + ## Refactoring Opportunities + # + {Credo.Check.Refactor.CondStatements, []}, + {Credo.Check.Refactor.CyclomaticComplexity, []}, + {Credo.Check.Refactor.FunctionArity, []}, + {Credo.Check.Refactor.LongQuoteBlocks, []}, + # {Credo.Check.Refactor.MapInto, []}, + {Credo.Check.Refactor.MatchInCondition, []}, + {Credo.Check.Refactor.NegatedConditionsInUnless, []}, + {Credo.Check.Refactor.NegatedConditionsWithElse, []}, + {Credo.Check.Refactor.Nesting, []}, + {Credo.Check.Refactor.UnlessWithElse, []}, + {Credo.Check.Refactor.WithClauses, []}, + + # + ## Warnings + # + {Credo.Check.Warning.ApplicationConfigInModuleAttribute, []}, + {Credo.Check.Warning.BoolOperationOnSameValues, []}, + {Credo.Check.Warning.ExpensiveEmptyEnumCheck, []}, + {Credo.Check.Warning.IExPry, []}, + {Credo.Check.Warning.IoInspect, []}, + # {Credo.Check.Warning.LazyLogging, []}, + {Credo.Check.Warning.MixEnv, []}, + {Credo.Check.Warning.OperationOnSameValues, []}, + {Credo.Check.Warning.OperationWithConstantResult, []}, + {Credo.Check.Warning.RaiseInsideRescue, []}, + {Credo.Check.Warning.UnusedEnumOperation, []}, + {Credo.Check.Warning.UnusedFileOperation, []}, + {Credo.Check.Warning.UnusedKeywordOperation, []}, + {Credo.Check.Warning.UnusedListOperation, []}, + {Credo.Check.Warning.UnusedPathOperation, []}, + {Credo.Check.Warning.UnusedRegexOperation, []}, + {Credo.Check.Warning.UnusedStringOperation, []}, + {Credo.Check.Warning.UnusedTupleOperation, []}, + {Credo.Check.Warning.UnsafeExec, []}, + + # + # Checks scheduled for next check update (opt-in for now, just replace `false` with `[]`) + + # + # Controversial and experimental checks (opt-in, just replace `false` with `[]`) + # + {Credo.Check.Consistency.MultiAliasImportRequireUse, []}, + {Credo.Check.Consistency.UnusedVariableNames, false}, + {Credo.Check.Design.DuplicatedCode, false}, + {Credo.Check.Readability.AliasAs, false}, + {Credo.Check.Readability.BlockPipe, []}, + {Credo.Check.Readability.ImplTrue, []}, + {Credo.Check.Readability.MultiAlias, false}, + {Credo.Check.Readability.SeparateAliasRequire, []}, + {Credo.Check.Readability.SinglePipe, []}, + {Credo.Check.Readability.Specs, false}, + {Credo.Check.Readability.StrictModuleLayout, + [ + order: + ~w(moduledoc behaviour use import require alias module_attribute defstruct callback macrocallback optional_callback)a, + ignore: [:type] + ]}, + {Credo.Check.Readability.WithCustomTaggedTuple, []}, + {Credo.Check.Refactor.ABCSize, false}, + {Credo.Check.Refactor.AppendSingleItem, []}, + {Credo.Check.Refactor.DoubleBooleanNegation, []}, + {Credo.Check.Refactor.ModuleDependencies, false}, + {Credo.Check.Refactor.NegatedIsNil, []}, + {Credo.Check.Refactor.PipeChainStart, []}, + {Credo.Check.Refactor.VariableRebinding, false}, + {Credo.Check.Warning.LeakyEnvironment, []}, + {Credo.Check.Warning.MapGetUnsafePass, []}, + {Credo.Check.Warning.UnsafeToAtom, []} + + # + # Custom checks can be created using `mix credo.gen.check`. + # + ] + } + ] +} diff --git a/.formatter.exs b/.formatter.exs index d2cda26..1de03af 100644 --- a/.formatter.exs +++ b/.formatter.exs @@ -1,4 +1,8 @@ -# Used by "mix format" +# This file is synced with stordco/common-config-elixir. Any changes will be overwritten. + [ - inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] + import_deps: [], + inputs: ["*.{ex,exs}", "{config,lib,priv,test}/**/*.{ex,exs}"], + line_length: 120, + plugins: [] ] diff --git a/.github/workflows/pr.yml b/.github/workflows/pr.yaml similarity index 59% rename from .github/workflows/pr.yml rename to .github/workflows/pr.yaml index 9f89ee2..0a92319 100644 --- a/.github/workflows/pr.yml +++ b/.github/workflows/pr.yaml @@ -1,7 +1,9 @@ ---- +# This file is synced with stordco/common-config-elixir. Any changes will be overwritten. + name: PR on: + merge_group: pull_request: types: - edited @@ -11,6 +13,7 @@ on: jobs: title: + if: github.event_name == 'pull_request' name: Check Title runs-on: ubuntu-latest @@ -25,6 +28,8 @@ jobs: hint: | You can pass the following formats: - feat: some title of the PR - fix: some title of the PR + fix: [OR-123] some title of the PR + feat: [OR-1234] some title of the PR chore: update some action + + Note: Adding ! (i.e. `feat!:`) represents a breaking change and will result in a SemVer major. diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml new file mode 100644 index 0000000..0644819 --- /dev/null +++ b/.github/workflows/release.yaml @@ -0,0 +1,23 @@ +# This file is synced with stordco/common-config-elixir. Any changes will be overwritten. + +name: Release + +on: + push: + branches: + - main + +jobs: + Please: + runs-on: ubuntu-latest + + steps: + - id: release + name: Release + uses: google-github-actions/release-please-action@v3 + with: + command: manifest + config-file: .release-please-config.json + manifest-file: .release-please-manifest.json + release-type: elixir + token: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml deleted file mode 100644 index ca26a79..0000000 --- a/.github/workflows/release.yml +++ /dev/null @@ -1,20 +0,0 @@ ---- -name: Release - -on: - push: - branches: - - main - -jobs: - please: - name: Please - runs-on: ubuntu-latest - - steps: - - uses: google-github-actions/release-please-action@v3 - with: - release-type: elixir - package-name: data_streams - token: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }} - extra-files: README.md diff --git a/.release-please-config.json b/.release-please-config.json new file mode 100644 index 0000000..c90be46 --- /dev/null +++ b/.release-please-config.json @@ -0,0 +1,39 @@ +{ + "$comment": "This file is synced with stordco/common-config-elixir. Any changes will be overwritten.", + "$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json", + "changelog-sections": [ + { + "type": "feat", + "section": "Features", + "hidden": false + }, + { + "type": "fix", + "section": "Bug Fixes", + "hidden": false + }, + { + "type": "chore", + "section": "Miscellaneous", + "hidden": false + } + ], + "draft": false, + "draft-pull-request": false, + "packages": { + ".": { + "extra-files": [ + "README.md" + ], + "release-type": "elixir" + } + }, + "plugins": [ + { + "type": "sentence-case" + } + ], + "prerelease": false, + "pull-request-header": "An automated release has been created for you.", + "separate-pull-requests": true +} diff --git a/.release-please-manifest.json b/.release-please-manifest.json new file mode 100644 index 0000000..c3f1463 --- /dev/null +++ b/.release-please-manifest.json @@ -0,0 +1,3 @@ +{ + ".": "1.2.0" +} diff --git a/lib/datadog/data_streams/aggregator.ex b/lib/datadog/data_streams/aggregator.ex index ecc2dc4..f67b094 100644 --- a/lib/datadog/data_streams/aggregator.ex +++ b/lib/datadog/data_streams/aggregator.ex @@ -30,10 +30,10 @@ defmodule Datadog.DataStreams.Aggregator do use GenServer - alias Datadog.DataStreams.{Aggregator, Config, Payload, Transport} - require Logger + alias Datadog.DataStreams.{Aggregator, Config, Payload, Transport} + @send_interval 10_000 @doc """ @@ -77,8 +77,8 @@ defmodule Datadog.DataStreams.Aggregator do iex> :ok = Aggregator.flush() """ - @spec flush() :: :ok - def flush() do + @spec flush :: :ok + def flush do Process.send(__MODULE__, :send, []) end @@ -131,8 +131,7 @@ defmodule Datadog.DataStreams.Aggregator do def handle_cast({:add, %Aggregator.Offset{} = offset}, state) do new_ts_type_current_buckets = Aggregator.Bucket.upsert(state.ts_type_current_buckets, offset.timestamp, fn bucket -> - type_key = - if offset.type == :commit, do: :latest_commit_offsets, else: :latest_produce_offsets + type_key = if offset.type == :commit, do: :latest_commit_offsets, else: :latest_produce_offsets new_offsets = bucket |> Map.get(:type_key, []) |> Aggregator.Offset.upsert(offset) Map.put(bucket, type_key, new_offsets) diff --git a/lib/datadog/data_streams/aggregator/offset.ex b/lib/datadog/data_streams/aggregator/offset.ex index 4baaeb1..3268cf7 100644 --- a/lib/datadog/data_streams/aggregator/offset.ex +++ b/lib/datadog/data_streams/aggregator/offset.ex @@ -40,7 +40,7 @@ defmodule Datadog.DataStreams.Aggregator.Offset do end) if is_nil(matching_index) do - offsets ++ [upsert_offset] + [upsert_offset | offsets] else List.replace_at(offsets, matching_index, upsert_offset) end diff --git a/lib/datadog/data_streams/config.ex b/lib/datadog/data_streams/config.ex index 8d8372e..9990e35 100644 --- a/lib/datadog/data_streams/config.ex +++ b/lib/datadog/data_streams/config.ex @@ -29,8 +29,8 @@ defmodule Datadog.DataStreams.Config do "unnamed-elixir-service" """ - @spec service() :: String.t() - def service() do + @spec service :: String.t() + def service do otel_service_name = :opentelemetry |> Application.get_env(:resource, []) @@ -70,8 +70,8 @@ defmodule Datadog.DataStreams.Config do "" """ - @spec env() :: String.t() - def env() do + @spec env :: String.t() + def env do otel_service_env = :opentelemetry |> Application.get_env(:resource, []) @@ -105,8 +105,8 @@ defmodule Datadog.DataStreams.Config do "" """ - @spec primary_tag() :: String.t() - def primary_tag() do + @spec primary_tag :: String.t() + def primary_tag do :data_streams |> Application.get_env(:metadata, []) |> Keyword.get(:primary_tag, "") diff --git a/lib/datadog/data_streams/container.ex b/lib/datadog/data_streams/container.ex index a17c472..e3392c8 100644 --- a/lib/datadog/data_streams/container.ex +++ b/lib/datadog/data_streams/container.ex @@ -15,9 +15,7 @@ defmodule Datadog.DataStreams.Container do @task_source Regex.source(~r/[0-9a-f]{32}-\d+/) @exp_line ~r/^\d+:[^:]*:(.+)$/ - @exp_container_id Regex.compile!( - "(#{@uuid_source}|#{@container_source}|#{@task_source})(?:.scope)?$" - ) + @exp_container_id Regex.compile!("(#{@uuid_source}|#{@container_source}|#{@task_source})(?:.scope)?$") @doc """ Starts the agent and stores the current container id in memory. @@ -29,16 +27,16 @@ defmodule Datadog.DataStreams.Container do @doc """ Returns the current container id. """ - @spec get() :: String.t() | nil - def get() do + @spec get :: String.t() | nil + def get do Agent.get(__MODULE__, & &1) end @doc """ Attempts to return the container id from the cgroup path (`#{@cgroup_path}`). Empty on failure. """ - @spec read_container_id() :: String.t() | nil - def read_container_id(), + @spec read_container_id :: String.t() | nil + def read_container_id, do: read_container_id(@cgroup_path) @doc """ diff --git a/lib/datadog/data_streams/context.ex b/lib/datadog/data_streams/context.ex index 49727ee..11cfaac 100644 --- a/lib/datadog/data_streams/context.ex +++ b/lib/datadog/data_streams/context.ex @@ -15,17 +15,17 @@ defmodule Datadog.DataStreams.Context do [OTB]: https://github.com/open-telemetry/opentelemetry-erlang/blob/main/apps/opentelemetry_api/src/otel_baggage.erl """ - @context_key "dd-datastreams" - alias Datadog.DataStreams.{Pathway, Tags} + @context_key "dd-datastreams" + @doc """ Returns the current existing Pathway from OpenTelemetry. If there is no Pathway in the current context, `nil` will be returned """ - @spec get() :: Pathway.t() | nil - def get() do + @spec get :: Pathway.t() | nil + def get do OpenTelemetry.Ctx.get_value(@context_key, nil) end @@ -42,8 +42,8 @@ defmodule Datadog.DataStreams.Context do Removes the current existing Pathway from OpenTelemetry. Returns the value that existing in OpenTelemetry. """ - @spec clear() :: Pathway.t() | nil - def clear() do + @spec clear :: Pathway.t() | nil + def clear do case get() do nil -> nil diff --git a/lib/datadog/data_streams/payload/payload.ex b/lib/datadog/data_streams/payload/payload.ex index c0f41f5..318c10a 100644 --- a/lib/datadog/data_streams/payload/payload.ex +++ b/lib/datadog/data_streams/payload/payload.ex @@ -63,7 +63,7 @@ defmodule Datadog.DataStreams.Payload do def add_bucket(payload, %Aggregator.Bucket{groups: %{}}), do: payload def add_bucket(%__MODULE__{} = payload, %Aggregator.Bucket{} = bucket, timestamp_type) do - %{payload | stats: payload.stats ++ [Payload.Bucket.new(bucket, timestamp_type)]} + %{payload | stats: [Payload.Bucket.new(bucket, timestamp_type) | payload.stats]} end @doc """ diff --git a/lib/datadog/data_streams/propagator.ex b/lib/datadog/data_streams/propagator.ex index a41cb4e..3269edf 100644 --- a/lib/datadog/data_streams/propagator.ex +++ b/lib/datadog/data_streams/propagator.ex @@ -61,7 +61,7 @@ defmodule Datadog.DataStreams.Propagator do key in [@propagation_key_base64, @propagation_key] end) - removed_headers ++ [{@propagation_key, encode(pathway)}] + [{@propagation_key, encode(pathway)} | removed_headers] end def encode_header(value), do: value @@ -180,6 +180,8 @@ defmodule Datadog.DataStreams.Propagator do """ @spec decode(binary()) :: Pathway.t() | nil def decode(<>) do + # This is the cleanest way I could think of to write it. + # credo:disable-for-lines:2 Credo.Check.Refactor.NegatedIsNil with pathway_start when not is_nil(pathway_start) <- decode_time(pathway), edge_start when not is_nil(edge_start) <- decode_time(edge) do %Pathway{ diff --git a/lib/datadog/data_streams/transport.ex b/lib/datadog/data_streams/transport.ex index f11b7bf..4012f73 100644 --- a/lib/datadog/data_streams/transport.ex +++ b/lib/datadog/data_streams/transport.ex @@ -44,7 +44,7 @@ defmodule Datadog.DataStreams.Transport do end end - defp request_headers() do + defp request_headers do case Container.get() do nil -> @headers container_id -> [{"Datadog-Container-ID", container_id}] ++ @headers @@ -82,7 +82,7 @@ defmodule Datadog.DataStreams.Transport do @spec header?(Mint.Types.headers(), String.t(), String.t()) :: bool() defp header?(headers, key, value) do Enum.any?(headers, fn {k, v} -> - String.downcase(k) == key and String.contains?(String.downcase(v), value) + String.downcase(k) == key and v |> String.downcase() |> String.contains?(value) end) end end diff --git a/lib/datadog/sketch/index_mapping/logarithmic.ex b/lib/datadog/sketch/index_mapping/logarithmic.ex index 6657efd..32f78f0 100644 --- a/lib/datadog/sketch/index_mapping/logarithmic.ex +++ b/lib/datadog/sketch/index_mapping/logarithmic.ex @@ -11,16 +11,16 @@ defmodule Datadog.Sketch.IndexMapping.Logarithmic do our logic handling. """ + @behaviour Datadog.Sketch.IndexMapping + + alias Datadog.Sketch.IndexMapping + defstruct gamma: 0.0, index_offset: 0.0, multiplier: 0.0 @type t :: Datadog.Sketch.IndexMapping.t() - @behaviour Datadog.Sketch.IndexMapping - - alias Datadog.Sketch.IndexMapping - @doc """ Creates a new Logarithmic index mapping with the given accuracy. @@ -87,7 +87,7 @@ defmodule Datadog.Sketch.IndexMapping.Logarithmic do false """ - @impl true + @impl Datadog.Sketch.IndexMapping @spec equals(t(), t()) :: boolean() def equals(%{gamma: sgamma, index_offset: sindex_offset}, %{ gamma: ogamma, @@ -111,7 +111,7 @@ defmodule Datadog.Sketch.IndexMapping.Logarithmic do 21979 """ - @impl true + @impl Datadog.Sketch.IndexMapping @spec index(t(), float()) :: integer() def index(%{index_offset: index_offset, multiplier: multiplier}, value) do index = :math.log(value) * multiplier + index_offset @@ -135,7 +135,7 @@ defmodule Datadog.Sketch.IndexMapping.Logarithmic do 1.23355147396003e19 """ - @impl true + @impl Datadog.Sketch.IndexMapping @spec value(t(), integer()) :: float() def value(self, index) do lower_bound(self, index) * (1 + relative_accuracy(self)) @@ -153,7 +153,7 @@ defmodule Datadog.Sketch.IndexMapping.Logarithmic do 1.2214109013609646 """ - @impl true + @impl Datadog.Sketch.IndexMapping @spec lower_bound(t(), integer()) :: float() def lower_bound(%{index_offset: index_offset, multiplier: multiplier}, index) do :math.exp((index - index_offset) / multiplier) @@ -167,7 +167,7 @@ defmodule Datadog.Sketch.IndexMapping.Logarithmic do 0.009999999999999898 """ - @impl true + @impl Datadog.Sketch.IndexMapping @spec relative_accuracy(t()) :: float() def relative_accuracy(%{gamma: gamma}) do 1 - 2 / (1 + gamma) @@ -182,7 +182,7 @@ defmodule Datadog.Sketch.IndexMapping.Logarithmic do %Datadog.Sketch.Protobuf.IndexMapping{gamma: 1.02020202020202, interpolation: :NONE} """ - @impl true + @impl Datadog.Sketch.IndexMapping @spec to_proto(t()) :: struct() def to_proto(self) do %Datadog.Sketch.Protobuf.IndexMapping{ diff --git a/lib/datadog/sketch/sketch.ex b/lib/datadog/sketch/sketch.ex index 26a5631..8f85d0a 100644 --- a/lib/datadog/sketch/sketch.ex +++ b/lib/datadog/sketch/sketch.ex @@ -39,8 +39,8 @@ defmodule Datadog.Sketch do iex> %Sketch{} = Sketch.new_default() """ - @spec new_default() :: t() - def new_default() do + @spec new_default :: t() + def new_default do %__MODULE__{ index_mapping: IndexMapping.Logarithmic.new(0.01), positive_value_store: Store.Dense.new(), diff --git a/lib/datadog/sketch/store/dense.ex b/lib/datadog/sketch/store/dense.ex index c791b68..6564498 100644 --- a/lib/datadog/sketch/store/dense.ex +++ b/lib/datadog/sketch/store/dense.ex @@ -4,8 +4,13 @@ defmodule Datadog.Sketch.Store.Dense do number of bins are bound only by the size of the `:array` that can be allocated. """ + @behaviour Datadog.Sketch.Store + alias Datadog.Sketch.Store + @array_length_overhead 64 + @array_length_growth_increment 0.1 + defstruct bins: :array.new(0, [{:fixed, true}, {:default, 0}]), count: 0.0, offset: 0, @@ -22,11 +27,6 @@ defmodule Datadog.Sketch.Store.Dense do max_index: integer() } - @behaviour Datadog.Sketch.Store - - @array_length_overhead 64 - @array_length_growth_increment 0.1 - @doc """ Creates a new dense store. @@ -36,8 +36,8 @@ defmodule Datadog.Sketch.Store.Dense do %Dense{} """ - @spec new() :: t() - def new() do + @spec new :: t() + def new do %__MODULE__{} end @@ -49,7 +49,7 @@ defmodule Datadog.Sketch.Store.Dense do iex> %Dense{} = Dense.add(Dense.new(), 100) """ - @impl true + @impl Datadog.Sketch.Store @spec add(t(), integer()) :: t() def add(store, index) do add_with_count(store, index, 1.0) @@ -63,7 +63,7 @@ defmodule Datadog.Sketch.Store.Dense do iex> %Dense{} = Dense.add_bin(Dense.new(), %{index: 100, count: 13.13}) """ - @impl true + @impl Datadog.Sketch.Store @spec add_bin(t(), Store.bin()) :: t() def add_bin(store, %{count: 0.0}), do: store @@ -82,7 +82,7 @@ defmodule Datadog.Sketch.Store.Dense do ...> ]) """ - @impl true + @impl Datadog.Sketch.Store @spec add_bins(t(), [Store.bin()]) :: t() def add_bins(store, bins) when is_list(bins), do: Enum.reduce(bins, store, &add_bin(&2, &1)) @@ -97,7 +97,7 @@ defmodule Datadog.Sketch.Store.Dense do iex> %Dense{} = Dense.add_with_count(Dense.new(), 987, 8.3e12) """ - @impl true + @impl Datadog.Sketch.Store @spec add_with_count(t(), integer(), float()) :: t() def add_with_count(store, _index, 0), do: store @@ -187,10 +187,15 @@ defmodule Datadog.Sketch.Store.Dense do @spec shift_counts(t(), integer()) :: t() defp shift_counts(store, shift) do - new_array = :array.new(:array.size(store.bins), [{:fixed, true}, {:default, 0}]) + new_array = + store.bins + |> :array.size() + |> :array.new([{:fixed, true}, {:default, 0}]) new_array = - Enum.reduce(:array.sparse_to_orddict(store.bins), new_array, fn {k, v}, new_array -> + store.bins + |> :array.sparse_to_orddict() + |> Enum.reduce(new_array, fn {k, v}, new_array -> :array.set(k + shift, v, new_array) end) @@ -211,7 +216,7 @@ defmodule Datadog.Sketch.Store.Dense do false """ - @impl true + @impl Datadog.Sketch.Store @spec empty?(t()) :: bool() def empty?(%{count: 0.0}), do: true def empty?(_store), do: false @@ -243,7 +248,7 @@ defmodule Datadog.Sketch.Store.Dense do 872.36 """ - @impl true + @impl Datadog.Sketch.Store @spec total_count(t()) :: float() def total_count(%{count: count}), do: count @@ -266,7 +271,7 @@ defmodule Datadog.Sketch.Store.Dense do 4 """ - @impl true + @impl Datadog.Sketch.Store @spec min_index(t()) :: integer() def min_index(%{count: 0.0}), do: 0 def min_index(%{min_index: min_index}), do: min_index @@ -294,7 +299,7 @@ defmodule Datadog.Sketch.Store.Dense do 65 """ - @impl true + @impl Datadog.Sketch.Store @spec max_index(t()) :: integer() def max_index(%{count: 0.0}), do: 0 def max_index(%{max_index: max_index}), do: max_index @@ -324,15 +329,16 @@ defmodule Datadog.Sketch.Store.Dense do 12 """ - @impl true + @impl Datadog.Sketch.Store @spec key_at_rank(t(), float()) :: integer() def key_at_rank(store, rank) when rank < 0.0, do: key_at_rank(store, 0.0) def key_at_rank(store, rank) do {step, result} = - Enum.reduce_while(:array.sparse_to_orddict(store.bins), {:not_end, 0.0}, fn {i, b}, - {step, n} -> + store.bins + |> :array.sparse_to_orddict() + |> Enum.reduce_while({:not_end, 0.0}, fn {i, b}, {step, n} -> n = n + b if n > rank do @@ -377,7 +383,7 @@ defmodule Datadog.Sketch.Store.Dense do } """ - @impl true + @impl Datadog.Sketch.Store @spec to_proto(t()) :: Datadog.Sketch.Protobuf.Store.t() def to_proto(%{count: 0.0}), do: %Datadog.Sketch.Protobuf.Store{contiguousBinCounts: nil} @@ -386,7 +392,9 @@ defmodule Datadog.Sketch.Store.Dense do new_array = :array.new(new_length, [{:fixed, true}, {:default, 0}]) new_array = - Enum.reduce(:array.sparse_to_orddict(store.bins), new_array, fn {k, v}, new_array -> + store.bins + |> :array.sparse_to_orddict() + |> Enum.reduce(new_array, fn {k, v}, new_array -> :array.set(k + store.offset - store.min_index, v, new_array) end) @@ -412,7 +420,7 @@ defmodule Datadog.Sketch.Store.Dense do 120.0 """ - @impl true + @impl Datadog.Sketch.Store @spec reweight(t(), float()) :: t() def reweight(store, weight) do new_bins = :array.sparse_map(fn _i, v -> v * weight end, store.bins) diff --git a/test/datadog/data_streams/container_test.exs b/test/datadog/data_streams/container_test.exs index 2ff9973..cfd881d 100644 --- a/test/datadog/data_streams/container_test.exs +++ b/test/datadog/data_streams/container_test.exs @@ -22,7 +22,7 @@ defmodule Datadog.DataStreams.ContainerTest do {:ok, stream} = StringIO.open(file) - assert Container.parse_container_id(IO.binstream(stream, :line)) === + assert stream |> IO.binstream(:line) |> Container.parse_container_id() === "8c046cb0b72cd4c99f51b5591cd5b095967f58ee003710a45280c28ee1a9c7fa" end @@ -33,7 +33,7 @@ defmodule Datadog.DataStreams.ContainerTest do {:ok, stream} = StringIO.open(file) - assert Container.parse_container_id(IO.binstream(stream, :line)) === + assert stream |> IO.binstream(:line) |> Container.parse_container_id() === "8c046cb0b72cd4c99f51b5591cd5b095967f58ee003710a45280c28ee1a9c7fa" end @@ -44,7 +44,7 @@ defmodule Datadog.DataStreams.ContainerTest do {:ok, stream} = StringIO.open(file) - assert Container.parse_container_id(IO.binstream(stream, :line)) === nil + assert stream |> IO.binstream(:line) |> Container.parse_container_id() === nil end test "parse_container_id/1 can parse a stream (example 4)" do @@ -54,7 +54,7 @@ defmodule Datadog.DataStreams.ContainerTest do {:ok, stream} = StringIO.open(file) - assert Container.parse_container_id(IO.binstream(stream, :line)) === + assert stream |> IO.binstream(:line) |> Container.parse_container_id() === "432624d2150b349fe35ba397284dea788c2bf66b885d14dfc1569b01890ca7da" end @@ -65,7 +65,7 @@ defmodule Datadog.DataStreams.ContainerTest do {:ok, stream} = StringIO.open(file) - assert Container.parse_container_id(IO.binstream(stream, :line)) === + assert stream |> IO.binstream(:line) |> Container.parse_container_id() === "34dc0b5e626f2c5c4c5170e34b10e7654ce36f0fcd532739f4445baabea03376" end @@ -76,7 +76,7 @@ defmodule Datadog.DataStreams.ContainerTest do {:ok, stream} = StringIO.open(file) - assert Container.parse_container_id(IO.binstream(stream, :line)) === + assert stream |> IO.binstream(:line) |> Container.parse_container_id() === "34dc0b5e-626f-2c5c-4c51-70e34b10e765" end @@ -87,7 +87,7 @@ defmodule Datadog.DataStreams.ContainerTest do {:ok, stream} = StringIO.open(file) - assert Container.parse_container_id(IO.binstream(stream, :line)) === + assert stream |> IO.binstream(:line) |> Container.parse_container_id() === "34dc0b5e626f2c5c4c5170e34b10e765-1234567890" end @@ -98,7 +98,7 @@ defmodule Datadog.DataStreams.ContainerTest do {:ok, stream} = StringIO.open(file) - assert Container.parse_container_id(IO.binstream(stream, :line)) === + assert stream |> IO.binstream(:line) |> Container.parse_container_id() === "34dc0b5e626f2c5c4c5170e34b10e7654ce36f0fcd532739f4445baabea03376" end @@ -111,7 +111,7 @@ defmodule Datadog.DataStreams.ContainerTest do {:ok, stream} = StringIO.open(file) - assert Container.parse_container_id(IO.binstream(stream, :line)) === + assert stream |> IO.binstream(:line) |> Container.parse_container_id() === "34dc0b5e626f2c5c4c5170e34b10e7654ce36f0fcd532739f4445baabea03376" end @@ -134,7 +134,7 @@ defmodule Datadog.DataStreams.ContainerTest do {:ok, stream} = StringIO.open(file) - assert Container.parse_container_id(IO.binstream(stream, :line)) === + assert stream |> IO.binstream(:line) |> Container.parse_container_id() === "6f265890-5165-7fab-6b52-18d1" end @@ -145,7 +145,7 @@ defmodule Datadog.DataStreams.ContainerTest do {:ok, stream} = StringIO.open(file) - assert Container.parse_container_id(IO.binstream(stream, :line)) === + assert stream |> IO.binstream(:line) |> Container.parse_container_id() === "6f265890-5165-7fab-6b52-18d1" end @@ -153,8 +153,7 @@ defmodule Datadog.DataStreams.ContainerTest do test "read_container_id/1 can parse a file", %{tmp_dir: tmp_dir} do cid = "8c046cb0b72cd4c99f51b5591cd5b095967f58ee003710a45280c28ee1a9c7fa" - cgroup_contents = - "10:hugetlb:/kubepods/burstable/podfd52ef25-a87d-11e9-9423-0800271a638e/" <> cid + cgroup_contents = "10:hugetlb:/kubepods/burstable/podfd52ef25-a87d-11e9-9423-0800271a638e/" <> cid file_path = Path.join(tmp_dir, "fake-cgroup") diff --git a/test/datadog/sketch/sketch_test.exs b/test/datadog/sketch/sketch_test.exs index 77932f9..c0eec2b 100644 --- a/test/datadog/sketch/sketch_test.exs +++ b/test/datadog/sketch/sketch_test.exs @@ -44,48 +44,38 @@ defmodule Datadog.SketchTest do |> Sketch.to_proto() |> Protobuf.encode() - assert <<10, 9, 9, 253, 74, 129, 90, 191, 82, 240, 63, 18, 230, 8, 18, 224, 8, 246, 40, 92, - 143, 194, 245, 40, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 51, 51, 51, 51, 51, 123, - 138, 64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 246, 40, 92, - 143, 194, 245, 40, 64, 24, 138, 1, 26, 0, 33, 61, 10, 215, 163, 112, 157, 68, - 64>> = protobuf + assert <<10, 9, 9, 253, 74, 129, 90, 191, 82, 240, 63, 18, 230, 8, 18, 224, 8, 246, 40, 92, 143, 194, 245, 40, 64, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 51, 51, 51, 51, 51, 123, 138, 64, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 246, 40, 92, 143, 194, + 245, 40, 64, 24, 138, 1, 26, 0, 33, 61, 10, 215, 163, 112, 157, 68, 64>> = protobuf end end