From 188ebeb5c582a75925a241bce3d51be678cf5a81 Mon Sep 17 00:00:00 2001 From: Rick Mouritzen Date: Thu, 23 May 2024 13:53:33 -0700 Subject: [PATCH 1/7] Issue #91: Add improved event validator API --- config/config.exs | 2 +- lib/schema/{inspector.ex => validator.ex} | 4 +- lib/schema/validator2.ex | 1595 +++++++++++++++++ .../controllers/schema_controller.ex | 80 +- lib/schema_web/router.ex | 1 + mix.exs | 2 +- 6 files changed, 1676 insertions(+), 8 deletions(-) rename lib/schema/{inspector.ex => validator.ex} (99%) create mode 100644 lib/schema/validator2.ex diff --git a/config/config.exs b/config/config.exs index 64c6d44..f98cd8f 100644 --- a/config/config.exs +++ b/config/config.exs @@ -15,7 +15,7 @@ config :logger, :console, handle_otp_reports: true, handle_sasl_reports: true, format: "$date $time [$level] $metadata $message\n", - metadata: [:request_id] + metadata: [:request_id, :mfa, :line] # Use Jason for JSON parsing in Phoenix config :phoenix, :json_library, Jason diff --git a/lib/schema/inspector.ex b/lib/schema/validator.ex similarity index 99% rename from lib/schema/inspector.ex rename to lib/schema/validator.ex index deb914e..b3bb368 100644 --- a/lib/schema/inspector.ex +++ b/lib/schema/validator.ex @@ -7,9 +7,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -defmodule Schema.Inspector do +defmodule Schema.Validator do @moduledoc """ - OCSF Event data inspector. + OCSF Event validator. """ require Logger diff --git a/lib/schema/validator2.ex b/lib/schema/validator2.ex new file mode 100644 index 0000000..acbe120 --- /dev/null +++ b/lib/schema/validator2.ex @@ -0,0 +1,1595 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +defmodule Schema.Validator2 do + @moduledoc """ + OCSF Event validator, version 2. + """ + + # Implementation note: + # All of the validate_* and add_* functions take a response and return one, possibly updated. + # The overall flow is to examine the event or list of events, and return a validation response. + + require Logger + + @spec validate(map() | list()) :: map() | list(map()) + def validate(data) when is_map(data), do: validate_event(data, Schema.dictionary()) + def validate(data) when is_list(data), do: validate_events(data, Schema.dictionary()) + + @spec validate_event(map(), map()) :: map() + defp validate_event(event, dictionary) do + response = new_response(event) + + {response, class} = validate_class_uid_and_return_class(response, event) + + response = + if class do + {response, profiles} = validate_and_return_profiles(response, event) + validate_event_against_class(response, event, class, profiles, dictionary) + else + # Can't continue if we can't find the class + response + end + + finalize_response(response) + end + + @spec validate_events(list(map()), map()) :: list(map()) + defp validate_events(events, dictionary) do + Enum.map(events, fn event -> validate_event(event, dictionary) end) + end + + @spec validate_class_uid_and_return_class(map(), map()) :: {map(), nil | map()} + defp validate_class_uid_and_return_class(response, event) do + if Map.has_key?(event, "class_uid") do + class_uid = event["class_uid"] + + cond do + is_integer_t(class_uid) -> + case Schema.find_class(class_uid) do + nil -> + { + add_error( + response, + "class_uid_unknown", + "Unknown \"class_uid\" value; no class is defined for #{class_uid}.", + %{attribute_path: "class_uid", attribute: "class_uid", value: class_uid} + ), + nil + } + + class -> + {response, class} + end + + true -> + { + # We need to add error here; no further validation will occur (nil returned for class). + add_error_wrong_type(response, "class_uid", "class_uid", class_uid, "integer_t"), + nil + } + end + else + # We need to add error here; no further validation will occur (nil returned for class). + {add_error_required_attribute_missing(response, "class_uid", "class_uid"), nil} + end + end + + # This _must_ return no profiles as an empty list, + # otherwise Schema.Utils.apply_profiles will return the wrong result (grrr). + # This is because the API profile parameter acts as follows: + # * Missing parameter means "include all profiles". + # * Parameter with list of profiles, including no profiles (empty list) means include only + # these specific profiles. + @spec validate_and_return_profiles(map(), map()) :: {map(), list(String.t())} + defp validate_and_return_profiles(response, event) do + metadata = event["metadata"] + + if is_map(metadata) do + profiles = metadata["profiles"] + + cond do + is_list(profiles) -> + # Ensure each profile is actually defined + schema_profiles = MapSet.new(Map.keys(Schema.profiles())) + + {response, _} = + Enum.reduce( + profiles, + {response, 0}, + fn profile, {response, index} -> + response = + if is_binary(profile) and not MapSet.member?(schema_profiles, profile) do + attribute_path = make_attribute_path_array_element("metadata.profile", index) + + add_error( + response, + "profile_unknown", + "Unknown profile at \"#{attribute_path}\";" <> + " no profile is defined for \"#{profile}\".", + %{attribute_path: attribute_path, attribute: "profiles", value: profile} + ) + else + # Either profile is wrong type (which will be caught later) + # or this is a known profile + response + end + + {response, index + 1} + end + ) + + {response, profiles} + + profiles == nil -> + # profiles are missing or null, so return [] + {response, []} + + true -> + # profiles are the wrong type, this will be caught later, so for now just return [] + {response, []} + end + else + # metadata is missing or not a map (this will become an error), so return [] + {response, []} + end + end + + @spec validate_event_against_class(map(), map(), map(), list(String.t()), map()) :: map() + defp validate_event_against_class(response, event, class, profiles, dictionary) do + response + |> validate_class_deprecated(class) + |> validate_attributes(event, nil, class, profiles, dictionary) + |> validate_version(event) + |> validate_type_uid(event) + |> validate_constraints(event, class) + end + + @spec validate_class_deprecated(map(), map()) :: map() + defp validate_class_deprecated(response, class) do + if Map.has_key?(class, :"@deprecated") do + add_warning_class_deprecated(response, class) + else + response + end + end + + @spec validate_version(map(), map()) :: map() + defp validate_version(response, event) do + metadata = event["metadata"] + + if is_map(metadata) do + version = metadata["version"] + + if is_binary(version) do + schema_version = Schema.version() + + if version != schema_version do + add_error( + response, + "version_incorrect", + "Incorrect version at \"metadata.version\"; value of \"#{version}\"" <> + " does not match schema version \"#{schema_version}\"." <> + " This can also result in incorrect and/or missing validation messages.", + %{ + attribute_path: "metadata.version", + attribute: "version", + value: version, + expected_value: schema_version + } + ) + else + response + end + else + response + end + else + response + end + end + + @spec validate_type_uid(map(), map()) :: map() + defp validate_type_uid(response, event) do + class_uid = event["class_uid"] + activity_id = event["activity_id"] + type_uid = event["type_uid"] + + if is_integer(class_uid) and is_integer(activity_id) and is_integer(type_uid) do + expected_type_uid = class_uid * 100 + activity_id + + if type_uid == expected_type_uid do + response + else + add_error( + response, + "type_uid_incorrect", + "Event's \"type_uid\" value of #{type_uid}" <> + " does not match expected value of #{expected_type_uid}" <> + " (class_uid #{class_uid} * 100 + activity_id #{activity_id} = #{expected_type_uid}).", + %{ + attribute_path: "type_uid", + attribute: "type_uid", + value: type_uid, + expected_value: expected_type_uid + } + ) + end + else + # One or more of the values is missing or the wrong type, which is caught elsewhere + response + end + end + + @spec validate_constraints(map(), map(), map(), nil | String.t()) :: map() + defp validate_constraints(response, event_item, schema_item, attribute_path \\ nil) do + if Map.has_key?(schema_item, :constraints) do + Enum.reduce( + schema_item[:constraints], + response, + fn {constraint_key, constraint_details}, response -> + case constraint_key do + :at_least_one -> + # constraint_details is a list of keys where at least one must exist + if Enum.any?(constraint_details, fn key -> Map.has_key?(event_item, key) end) do + response + else + {description, extra} = + constraint_info(schema_item, attribute_path, constraint_key, constraint_details) + + add_error( + response, + "constraint_failed", + "Constraint failed: #{description};" <> + " expected at least one constraint attribute, bot got none.", + extra + ) + end + + :just_one -> + # constraint_details is a list of keys where exactly one must exist + count = + Enum.reduce( + constraint_details, + 0, + fn key, count -> + if Map.has_key?(event_item, key), do: count + 1, else: count + end + ) + + if count == 1 do + response + else + {description, extra} = + constraint_info(schema_item, attribute_path, constraint_key, constraint_details) + + Map.put(extra, :value_count, count) + + add_error( + response, + "constraint_failed", + "Constraint failed: #{description};" <> + " expected exactly 1 constraint attribute, got #{count}.", + extra + ) + end + + _ -> + # This could be a new kind of constraint that this code needs to start handling, + # or this a private schema / private extension has an unknown constraint type, + # or its a typo in a private schema / private extension. + {description, extra} = + constraint_info(schema_item, attribute_path, constraint_key, constraint_details) + + Logger.warning("SCHEMA BUG: Unknown constraint #{description}") + + add_error( + response, + "constraint_unknown", + "SCHEMA BUG: Unknown constraint #{description}.", + extra + ) + end + end + ) + else + response + end + end + + # Helper to return class or object description and extra map + @spec constraint_info(map(), String.t(), atom(), list[String.t()]) :: {String.t(), map()} + defp constraint_info(schema_item, attribute_path, constraint_key, constraint_details) do + if attribute_path do + # attribute_path exists (is not nil) for objects + { + "\"#{constraint_key}\" from object \"#{schema_item[:name]}\" at \"#{attribute_path}\"", + %{ + attribute_path: attribute_path, + constraint: %{constraint_key => constraint_details}, + object_name: schema_item[:name] + } + } + else + { + "\"#{constraint_key}\" from class \"#{schema_item[:name]}\" uid #{schema_item[:uid]}", + %{ + constraint: %{constraint_key => constraint_details}, + class_uid: schema_item[:uid], + class_name: schema_item[:name] + } + } + end + end + + # Validates attributes of event or object (event_item parameter) + # against schema's class or object (schema_item parameter). + @spec validate_attributes( + map(), + map(), + nil | String.t(), + map(), + list(String.t()), + map() + ) :: map() + defp validate_attributes( + response, + event_item, + parent_attribute_path, + schema_item, + profiles, + dictionary + ) do + schema_attributes = Schema.Utils.apply_profiles(schema_item[:attributes], profiles) + + response + |> validate_attributes_types( + event_item, + parent_attribute_path, + schema_attributes, + profiles, + dictionary + ) + |> validate_attributes_event_item_keys( + event_item, + parent_attribute_path, + schema_item, + schema_attributes + ) + |> validate_attributes_enums(event_item, parent_attribute_path, schema_attributes) + end + + # Validate unknown attributes + # Scan event_item's attributes making sure each exists in schema_item's attributes + @spec validate_attributes_types( + map(), + map(), + nil | String.t(), + list(tuple()), + list(String.t()), + map() + ) :: map() + defp validate_attributes_types( + response, + event_item, + parent_attribute_path, + schema_attributes, + profiles, + dictionary + ) do + Enum.reduce( + schema_attributes, + response, + fn {attribute_key, attribute_details}, response -> + attribute_name = Atom.to_string(attribute_key) + attribute_path = make_attribute_path(parent_attribute_path, attribute_name) + value = event_item[attribute_name] + + validate_attribute( + response, + value, + attribute_path, + attribute_name, + attribute_details, + profiles, + dictionary + ) + end + ) + end + + @spec validate_attributes_event_item_keys( + map(), + map(), + nil | String.t(), + map(), + list(tuple()) + ) :: map() + defp validate_attributes_event_item_keys( + response, + event_item, + parent_attribute_path, + schema_item, + schema_attributes + ) do + Enum.reduce( + Map.keys(event_item), + response, + fn key, response -> + if has_attribute?(schema_attributes, key) do + response + else + attribute_path = make_attribute_path(parent_attribute_path, key) + + {struct_desc, extra} = + if Map.has_key?(schema_item, :uid) do + { + "class \"#{schema_item[:name]}\", uid #{schema_item[:uid]}", + %{ + attribute_path: attribute_path, + attribute: key, + class_uid: schema_item[:uid], + class_name: schema_item[:name] + } + } + else + { + "object \"#{schema_item[:name]}\"", + %{ + attribute_path: attribute_path, + attribute: key, + object_name: schema_item[:name] + } + } + end + + add_error( + response, + "attribute_unknown", + "Unknown attribute at \"#{attribute_path}\";" <> + " attribute \"#{key}\" is not defined in #{struct_desc}.", + extra + ) + end + end + ) + end + + @spec has_attribute?(list(tuple()), String.t()) :: boolean() + defp has_attribute?(attributes, name) do + key = String.to_atom(name) + Enum.any?(attributes, fn {attribute_key, _} -> attribute_key == key end) + end + + @spec validate_attributes_enums(map(), map(), nil | String.t(), list(tuple())) :: map() + defp validate_attributes_enums(response, event_item, parent_attribute_path, schema_attributes) do + enum_attributes = Enum.filter(schema_attributes, fn {_ak, ad} -> Map.has_key?(ad, :enum) end) + + Enum.reduce( + enum_attributes, + response, + fn {attribute_key, attribute_details}, response -> + attribute_name = Atom.to_string(attribute_key) + + if Map.has_key?(event_item, attribute_name) do + # The enum values are always strings, so rather than use elaborate conversions, + # we just use Kernel.to_string/1. (The value is type checked elsewhere anyway.) + value = event_item[attribute_name] + value_str = to_string(value) + value_atom = String.to_atom(value_str) + + if Map.has_key?(attribute_details[:enum], value_atom) do + # The enum value is good - check sibling + validate_enum_sibling( + response, + event_item, + parent_attribute_path, + value, + value_atom, + attribute_name, + attribute_details + ) + else + attribute_path = make_attribute_path(parent_attribute_path, attribute_name) + + add_error( + response, + "attribute_enum_value_unknown", + "Unknown enum value at \"#{attribute_path}\";" <> + " value #{inspect(value)} is not defined for enum \"#{attribute_name}\".", + %{ + attribute_path: attribute_path, + attribute: attribute_name, + value: value + } + ) + end + else + response + end + end + ) + end + + @spec validate_enum_sibling( + map(), + map(), + nil | String.t(), + any(), + atom(), + String.t(), + map() + ) :: map() + defp validate_enum_sibling( + response, + event_item, + parent_attribute_path, + event_enum_value, + event_enum_value_atom, + attribute_name, + attribute_details + ) do + if event_enum_value == 99 do + # Enum value is the integer 99 (Other). The enum sibling, if present, can be anything. + response + else + sibling_name = attribute_details[:sibling] + + if Map.has_key?(event_item, sibling_name) do + # Sibling is present - make sure the string value matches up + enum_caption = attribute_details[:enum][event_enum_value_atom][:caption] + sibling_value = event_item[sibling_name] + + if enum_caption == sibling_value do + # Sibling has correct value + response + else + enum_attribute_path = make_attribute_path(parent_attribute_path, attribute_name) + sibling_attribute_path = make_attribute_path(parent_attribute_path, sibling_name) + + add_error( + response, + "attribute_enum_sibling_incorrect", + "Attribute \"#{sibling_attribute_path}\" enum sibling value" <> + " #{inspect(sibling_value)} is incorrect for" <> + " enum \"#{enum_attribute_path}\" value #{inspect(event_enum_value)};" <> + " expected \"#{enum_caption}\", got #{inspect(sibling_value)}.", + %{ + attribute_path: sibling_attribute_path, + attribute: sibling_name, + value: sibling_value, + expected_value: enum_caption + } + ) + end + else + # Sibling not present, which is OK + response + end + end + end + + @spec validate_attribute( + map(), + any(), + String.t(), + String.t(), + map(), + list(String.t()), + map() + ) :: map() + defp validate_attribute( + response, + value, + attribute_path, + attribute_name, + attribute_details, + profiles, + dictionary + ) do + if value == nil do + validate_requirement(response, attribute_path, attribute_name, attribute_details) + else + response = + validate_attribute_deprecated( + response, + attribute_path, + attribute_name, + attribute_details + ) + + # Check event_item attribute value type + attribute_type_key = String.to_atom(attribute_details[:type]) + + if attribute_type_key == :object_t or + Map.has_key?(dictionary[:types][:attributes], attribute_type_key) do + if attribute_details[:is_array] do + validate_array( + response, + value, + attribute_path, + attribute_name, + attribute_details, + profiles, + dictionary + ) + else + validate_value( + response, + value, + attribute_path, + attribute_name, + attribute_details, + profiles, + dictionary + ) + end + else + # This should never happen for published schemas (validator will catch this) but + # _could_ happen for a schema that's in development and presumably running on a + # local / private OCSF Server instance. + Logger.warning( + "SCHEMA BUG: Type \"#{attribute_type_key}\" is not defined in dictionary" <> + " at attribute path \"#{attribute_path}\"" + ) + + add_error( + response, + "schema_bug_type_missing", + "SCHEMA BUG: Type \"#{attribute_type_key}\" is not defined in dictionary.", + %{ + attribute_path: attribute_path, + attribute: attribute_name, + type: attribute_type_key, + value: value + } + ) + end + end + end + + defp validate_requirement(response, attribute_path, attribute_name, attribute_details) do + case attribute_details[:requirement] do + "required" -> + add_error_required_attribute_missing(response, attribute_path, attribute_name) + + "recommended" -> + add_warning_recommended_attribute_missing(response, attribute_path, attribute_name) + + _ -> + response + end + end + + # validate an attribute whose value should be an array (is_array: true) + @spec validate_array(map(), any(), String.t(), String.t(), map(), list(String.t()), map()) :: + map() + defp validate_array( + response, + value, + attribute_path, + attribute_name, + attribute_details, + profiles, + dictionary + ) do + if is_list(value) do + {response, _} = + Enum.reduce( + value, + {response, 0}, + fn element_value, {response, index} -> + { + validate_value( + response, + element_value, + make_attribute_path_array_element(attribute_path, index), + attribute_name, + attribute_details, + profiles, + dictionary + ), + index + 1 + } + end + ) + + response + else + add_error_wrong_type( + response, + attribute_path, + attribute_name, + value, + "array of #{attribute_details[:type]}" + ) + end + end + + # validate a single value or element of an array (attribute with is_array: true) + @spec validate_value( + map(), + any(), + String.t(), + String.t(), + map(), + list(String.t()), + map() + ) :: map() + defp validate_value( + response, + value, + attribute_path, + attribute_name, + attribute_details, + profiles, + dictionary + ) do + attribute_type = attribute_details[:type] + + if attribute_type == "object_t" do + # object_t is a marker added by the schema compile to make it easy to check if attribute + # is an OCSF object (otherwise we would need to notice that the attribute type isn't a + # data dictionary type) + object_type = attribute_details[:object_type] + + if is_map(value) do + # Drill in to object + validate_map_against_object( + response, + value, + attribute_path, + attribute_name, + Schema.object(object_type), + profiles, + dictionary + ) + else + add_error_wrong_type( + response, + attribute_path, + attribute_name, + value, + "#{object_type} (object)" + ) + end + else + validate_value_against_dictionary_type( + response, + value, + attribute_path, + attribute_name, + attribute_details, + dictionary + ) + end + end + + @spec validate_map_against_object( + map(), + map(), + String.t(), + String.t(), + map(), + list(String.t()), + map() + ) :: map() + defp validate_map_against_object( + response, + event_object, + attribute_path, + attribute_name, + schema_object, + profiles, + dictionary + ) do + response + |> validate_object_deprecated(attribute_path, attribute_name, schema_object) + |> validate_attributes(event_object, attribute_path, schema_object, profiles, dictionary) + |> validate_constraints(event_object, schema_object, attribute_path) + end + + @spec validate_object_deprecated(map(), String.t(), String.t(), map()) :: map() + defp validate_object_deprecated(response, attribute_path, attribute_name, schema_object) do + if Map.has_key?(schema_object, :"@deprecated") do + add_warning_object_deprecated(response, attribute_path, attribute_name, schema_object) + else + response + end + end + + @spec validate_value_against_dictionary_type( + map(), + any(), + String.t(), + String.t(), + map(), + map() + ) :: map() + defp validate_value_against_dictionary_type( + response, + value, + attribute_path, + attribute_name, + attribute_details, + dictionary + ) do + attribute_type_key = String.to_atom(attribute_details[:type]) + dictionary_types = dictionary[:types][:attributes] + dictionary_type = dictionary_types[attribute_type_key] + + {primitive_type, expected_type, expected_type_extra} = + if Map.has_key?(dictionary_type, :type) do + # This is a subtype (e.g., username_t, a subtype of string_t) + primitive_type = String.to_atom(dictionary_type[:type]) + {primitive_type, attribute_type_key, " (#{primitive_type})"} + else + # This is a primitive type + {attribute_type_key, attribute_type_key, ""} + end + + case primitive_type do + :boolean_t -> + if is_boolean(value) do + validate_type_values( + response, + value, + attribute_path, + attribute_name, + attribute_type_key, + dictionary_types + ) + else + add_error_wrong_type( + response, + attribute_path, + attribute_name, + value, + expected_type, + expected_type_extra + ) + end + + :float_t -> + if is_float(value) do + response + |> validate_number_range( + value, + attribute_path, + attribute_name, + attribute_type_key, + dictionary_types + ) + |> validate_type_values( + value, + attribute_path, + attribute_name, + attribute_type_key, + dictionary_types + ) + else + add_error_wrong_type( + response, + attribute_path, + attribute_name, + value, + expected_type, + expected_type_extra + ) + end + + :integer_t -> + if is_integer_t(value) do + response + |> validate_number_range( + value, + attribute_path, + attribute_name, + attribute_type_key, + dictionary_types + ) + |> validate_type_values( + value, + attribute_path, + attribute_name, + attribute_type_key, + dictionary_types + ) + else + add_error_wrong_type( + response, + attribute_path, + attribute_name, + value, + expected_type, + expected_type_extra + ) + end + + :json_t -> + response + + :long_t -> + if is_long_t(value) do + response + |> validate_number_range( + value, + attribute_path, + attribute_name, + attribute_type_key, + dictionary_types + ) + |> validate_type_values( + value, + attribute_path, + attribute_name, + attribute_type_key, + dictionary_types + ) + else + add_error_wrong_type( + response, + attribute_path, + attribute_name, + value, + expected_type, + expected_type_extra + ) + end + + :string_t -> + if is_binary(value) do + response + |> validate_string_max_len( + value, + attribute_path, + attribute_name, + attribute_type_key, + dictionary_types + ) + |> validate_string_regex( + value, + attribute_path, + attribute_name, + attribute_type_key, + dictionary_types + ) + |> validate_type_values( + value, + attribute_path, + attribute_name, + attribute_type_key, + dictionary_types + ) + else + add_error_wrong_type( + response, + attribute_path, + attribute_name, + value, + expected_type, + expected_type_extra + ) + end + + _ -> + # Unhandled type (schema bug) + # This should never happen for published schemas (ocsf-validator catches this) but + # _could_ happen for a schema that's in development or with a private extension, + # and presumably running on a local / private OCSF Server instance. + Logger.warning( + "SCHEMA BUG: Unknown primitive type \"#{primitive_type}\"" <> + " at attribute path \"#{attribute_path}\"" + ) + + add_error( + response, + "schema_bug_primitive_type_unknown", + "SCHEMA BUG: Unknown primitive type \"#{primitive_type}\".", + %{ + attribute_path: attribute_path, + attribute: attribute_name, + type: attribute_type_key, + value: value + } + ) + end + end + + @spec validate_type_values( + map(), + any(), + String.t(), + String.t(), + atom(), + map() + ) :: map() + defp validate_type_values( + response, + value, + attribute_path, + attribute_name, + attribute_type_key, + dictionary_types + ) do + dictionary_type = dictionary_types[attribute_type_key] + + cond do + Map.has_key?(dictionary_type, :values) -> + # This is a primitive type or subtype with :values + values = dictionary_type[:values] + + if Enum.any?(values, fn v -> value == v end) do + response + else + add_error( + response, + "attribute_value_not_in_type_values", + "Attribute \"#{attribute_path}\" value" <> + " is not in type \"#{attribute_type_key}\" list of allowed values.", + %{ + attribute_path: attribute_path, + attribute: attribute_name, + type: attribute_type_key, + value: value, + allowed_values: values + } + ) + end + + Map.has_key?(dictionary_type, :type) -> + # This is a subtype, so check super type + super_type_key = String.to_atom(dictionary_type[:type]) + super_type = dictionary_types[super_type_key] + + if Map.has_key?(super_type, :values) do + values = super_type[:values] + + if Enum.any?(values, fn v -> value == v end) do + response + else + add_error( + response, + "attribute_value_not_in_super_type_values", + "Attribute \"#{attribute_path}\", type \"#{attribute_type_key}\"," <> + " value is not in super type \"#{super_type_key}\" list of allowed values.", + %{ + attribute_path: attribute_path, + attribute: attribute_name, + super_type: super_type_key, + type: attribute_type_key, + value: value, + allowed_values: values + } + ) + end + else + response + end + + true -> + response + end + end + + # Validate a number against a possible range constraint. + # If attribute_type_key refers to a subtype, the subtype is checked first, and if the subtype + # doesn't have a range, the supertype is checked. + @spec validate_number_range( + map(), + float() | integer(), + String.t(), + String.t(), + atom(), + map() + ) :: map() + defp validate_number_range( + response, + value, + attribute_path, + attribute_name, + attribute_type_key, + dictionary_types + ) do + dictionary_type = dictionary_types[attribute_type_key] + + cond do + Map.has_key?(dictionary_type, :range) -> + # This is a primitive type or subtype with a range + [low, high] = dictionary_type[:range] + + if value < low or value > high do + add_error( + response, + "attribute_value_exceeds_range", + "Attribute \"#{attribute_path}\" value" <> + " is outside type \"#{attribute_type_key}\" range of #{low} to #{high}.", + %{ + attribute_path: attribute_path, + attribute: attribute_name, + type: attribute_type_key, + value: value, + range: [low, high] + } + ) + else + response + end + + Map.has_key?(dictionary_type, :type) -> + # This is a subtype, so check super type + super_type_key = String.to_atom(dictionary_type[:type]) + super_type = dictionary_types[super_type_key] + + if Map.has_key?(super_type, :range) do + [low, high] = super_type[:range] + + if value < low or value > high do + add_error( + response, + "attribute_value_exceeds_super_type_range", + "Attribute \"#{attribute_path}\", type \"#{attribute_type_key}\"," <> + " value is outside super type \"#{super_type_key}\" range of #{low} to #{high}.", + %{ + attribute_path: attribute_path, + attribute: attribute_name, + super_type: super_type_key, + type: attribute_type_key, + value: value, + super_type_range: [low, high] + } + ) + else + response + end + else + response + end + + true -> + response + end + end + + # Validate a string against a possible max_len constraint. + # If attribute_type_key refers to a subtype, the subtype is checked first, and if the subtype + # doesn't have a max_len, the supertype is checked. + @spec validate_string_max_len( + map(), + String.t(), + String.t(), + String.t(), + atom(), + map() + ) :: map() + defp validate_string_max_len( + response, + value, + attribute_path, + attribute_name, + attribute_type_key, + dictionary_types + ) do + dictionary_type = dictionary_types[attribute_type_key] + + cond do + Map.has_key?(dictionary_type, :max_len) -> + # This is a primitive type or subtype with a range + max_len = dictionary_type[:max_len] + len = String.length(value) + + if len > max_len do + add_error( + response, + "attribute_value_exceeds_max_len", + "Attribute \"#{attribute_path}\" value length of #{len}" <> + " exceeds type \"#{attribute_type_key}\" max length #{max_len}.", + %{ + attribute_path: attribute_path, + attribute: attribute_name, + type: attribute_type_key, + length: len, + max_len: max_len, + value: value + } + ) + else + response + end + + Map.has_key?(dictionary_type, :type) -> + # This is a subtype, so check super type + super_type_key = String.to_atom(dictionary_type[:type]) + super_type = dictionary_types[super_type_key] + + if Map.has_key?(super_type, :max_len) do + max_len = super_type[:max_len] + len = String.length(value) + + if len > max_len do + add_error( + response, + "attribute_value_exceeds_super_type_max_len", + "Attribute \"#{attribute_path}\", type \"#{attribute_type_key}\"," <> + " value length #{len} exceeds super type \"#{super_type_key}\"" <> + " max length #{max_len}.", + %{ + attribute_path: attribute_path, + attribute: attribute_name, + super_type: super_type_key, + type: attribute_type_key, + length: len, + max_len: max_len, + value: value + } + ) + else + response + end + else + response + end + + true -> + response + end + end + + defp validate_string_regex( + response, + value, + attribute_path, + attribute_name, + attribute_type_key, + dictionary_types + ) do + dictionary_type = dictionary_types[attribute_type_key] + + cond do + Map.has_key?(dictionary_type, :regex) -> + # This is a primitive type or subtype with a range + pattern = dictionary_type[:regex] + + case Regex.compile(pattern) do + {:ok, regex} -> + if Regex.match?(regex, value) do + response + else + add_error( + response, + "attribute_value_regex_not_matched", + "Attribute \"#{attribute_path}\" value" <> + " does not match regex of type \"#{attribute_type_key}\".", + %{ + attribute_path: attribute_path, + attribute: attribute_name, + type: attribute_type_key, + regex: regex, + value: value + } + ) + end + + {:error, {message, position}} -> + Logger.warning( + "SCHEMA BUG: Type \"#{attribute_type_key}\" specifies an invalid regex:" <> + " \"#{message}\" at position #{position}, attribute path \"#{attribute_path}\"" + ) + + add_error( + response, + "schema_bug_type_regex_invalid", + "SCHEMA BUG: Type \"#{attribute_type_key}\" specifies an invalid regex:" <> + " \"#{message}\" at position #{position}.", + %{ + attribute_path: attribute_path, + attribute: attribute_name, + type: attribute_type_key, + regex: pattern, + regex_error_message: to_string(message), + regex_error_position: position + } + ) + end + + Map.has_key?(dictionary_type, :type) -> + # This is a subtype, so check super type + super_type_key = String.to_atom(dictionary_type[:type]) + super_type = dictionary_types[super_type_key] + + if Map.has_key?(super_type, :regex) do + pattern = dictionary_type[:regex] + + case Regex.compile(pattern) do + {:ok, regex} -> + if Regex.match?(regex, value) do + response + else + add_error( + response, + "attribute_value_super_type_regex_not_matched", + "Attribute \"#{attribute_path}\", type \"#{attribute_type_key}\"," <> + " value does not match regex of super type \"#{super_type_key}\".", + %{ + attribute_path: attribute_path, + attribute: attribute_name, + super_type: super_type_key, + type: attribute_type_key, + regex: pattern, + value: value + } + ) + end + + {:error, {message, position}} -> + Logger.warning( + "SCHEMA BUG: Type \"#{super_type_key}\"" <> + " (super type of \"#{attribute_type_key}\") specifies an invalid regex:" <> + " \"#{message}\" at position #{position}, attribute path \"#{attribute_path}\"" + ) + + add_error( + response, + "schema_bug_type_regex_invalid", + "SCHEMA BUG: Type \"#{super_type_key}\"" <> + " (super type of \"#{attribute_type_key}\") specifies an invalid regex:" <> + " \"#{message}\" at position #{position}.", + %{ + attribute_path: attribute_path, + attribute: attribute_name, + type: super_type_key, + regex: pattern, + regex_error_message: to_string(message), + regex_error_position: position + } + ) + end + else + response + end + + true -> + response + end + end + + defp validate_attribute_deprecated(response, attribute_path, attribute_name, attribute_details) do + if Map.has_key?(attribute_details, :"@deprecated") do + add_warning_attribute_deprecated( + response, + attribute_path, + attribute_name, + attribute_details[:"@deprecated"] + ) + else + response + end + end + + @spec make_attribute_path(nil | String.t(), String.t()) :: String.t() + defp make_attribute_path(parent_attribute_path, attribute_name) do + if parent_attribute_path != nil and parent_attribute_path != "" do + "#{parent_attribute_path}.#{attribute_name}" + else + attribute_name + end + end + + @spec make_attribute_path_array_element(String.t(), integer()) :: String.t() + defp make_attribute_path_array_element(attribute_path, index) do + "#{attribute_path}[#{index}]" + end + + @spec new_response(map()) :: map() + defp new_response(event) do + metadata = event["metadata"] + + if is_map(metadata) do + uid = metadata["uid"] + + if is_binary(uid) do + %{uid: uid} + else + %{} + end + else + %{} + end + end + + @spec add_error_required_attribute_missing(map(), String.t(), String.t()) :: map() + defp add_error_required_attribute_missing(response, attribute_path, attribute_name) do + add_error( + response, + "attribute_required_missing", + "Required attribute \"#{attribute_path}\" is missing.", + %{attribute_path: attribute_path, attribute: attribute_name} + ) + end + + @spec add_warning_recommended_attribute_missing(map(), String.t(), String.t()) :: map() + defp add_warning_recommended_attribute_missing(response, attribute_path, attribute_name) do + add_warning( + response, + "attribute_recommended_missing", + "Recommended attribute \"#{attribute_path}\" is missing.", + %{attribute_path: attribute_path, attribute: attribute_name} + ) + end + + @spec add_error_wrong_type( + map(), + String.t(), + String.t(), + any(), + atom() | String.t(), + String.t() + ) :: map() + defp add_error_wrong_type( + response, + attribute_path, + attribute_name, + value, + expected_type, + expected_type_extra \\ "" + ) do + {value_type, value_type_extra} = type_of(value) + + add_error( + response, + "attribute_wrong_type", + "Attribute \"#{attribute_path}\" value has wrong type;" <> + " expected #{expected_type}#{expected_type_extra}, got #{value_type}#{value_type_extra}.", + %{ + attribute_path: attribute_path, + attribute: attribute_name, + value: value, + value_type: value_type, + expected_type: expected_type + } + ) + end + + @spec add_warning_class_deprecated(map(), map()) :: map() + defp add_warning_class_deprecated(response, class) do + deprecated = class[:"@deprecated"] + + add_warning( + response, + "class_deprecated", + "Class #{class[:uid]} \"#{class[:name]}\" is deprecated. #{deprecated[:message]}", + %{class_uid: class[:uid], class_name: class[:name], since: deprecated[:since]} + ) + end + + @spec add_warning_attribute_deprecated(map(), String.t(), String.t(), map()) :: map() + defp add_warning_attribute_deprecated( + response, + attribute_path, + attribute_name, + attribute_details + ) do + deprecated = attribute_details[:"@deprecated"] + + add_warning( + response, + "attribute_deprecated", + "Dictionary attribute \"#{attribute_name}\" is deprecated. #{deprecated[:message]}", + %{attribute_path: attribute_path, attribute: attribute_name, since: deprecated[:since]} + ) + end + + @spec add_warning_object_deprecated(map(), String.t(), String.t(), map()) :: map() + defp add_warning_object_deprecated(response, attribute_path, attribute_name, object) do + deprecated = object[:"@deprecated"] + + add_warning( + response, + "object_deprecated", + "Object \"#{object[:name]}\" is deprecated. #{deprecated[:message]}", + %{ + attribute_path: attribute_path, + attribute: attribute_name, + object_name: object[:name], + since: deprecated[:since] + } + ) + end + + @spec add_error(map(), String.t(), String.t(), map()) :: map() + defp add_error(response, error_type, message, extra) do + _add(response, :errors, :error, error_type, message, extra) + end + + @spec add_warning(map(), String.t(), String.t(), map()) :: map() + defp add_warning(response, warning_type, message, extra) do + _add(response, :warnings, :warning, warning_type, message, extra) + end + + @spec _add(map(), atom(), atom(), String.t(), String.t(), map()) :: map() + defp _add(response, group_key, type_key, type, message, extra) do + item = Map.merge(extra, %{type_key => type, message: message}) + Map.update(response, group_key, [item], fn items -> [item | items] end) + end + + @spec finalize_response(map()) :: map() + defp finalize_response(response) do + # Reverse errors and warning so they are the order they were found, + # which is (probably) more sensible than the reverse + errors = lenient_reverse(response[:errors]) + warnings = lenient_reverse(response[:warnings]) + + Map.merge(response, %{ + error_count: length(errors), + warning_count: length(warnings), + errors: errors, + warnings: warnings + }) + end + + defp lenient_reverse(nil), do: [] + defp lenient_reverse(list) when is_list(list), do: Enum.reverse(list) + + # Returns approximate OCSF type as a string for a value parsed from JSON. This is intended for + # use when an attribute's type is incorrect. For integer values, this returns smallest type that + # can be used for value. + @spec type_of(any()) :: {String.t(), String.t()} + defp type_of(v) do + cond do + is_float(v) -> + # Elixir / Erlang floats are 64-bit IEEE floating point numbers, same as OCSF + {"float_t", ""} + + is_integer(v) -> + # Elixir / Erlang has arbitrary-precision integers, so we need to test the range + cond do + is_integer_t(v) -> + {"integer_t", " (integer in range of -2^63 to 2^63 - 1)"} + + is_long_t(v) -> + {"long_t", " (integer in range of -2^127 to 2^127 - 1)"} + + true -> + {"big integer", " (outside of long_t range of -2^127 to 2^127 - 1)"} + end + + is_boolean(v) -> + {"boolean_t", ""} + + is_binary(v) -> + {"string_t", ""} + + is_list(v) -> + {"array", ""} + + is_map(v) -> + {"object", ""} + + v == nil -> + {"null", ""} + + true -> + {"unknown type", ""} + end + end + + @min_int -Integer.pow(2, 63) + @max_int Integer.pow(2, 63) - 1 + + # Tests if value is an integer number in the OCSF integer_t range. + defp is_integer_t(v) when is_integer(v), do: v >= @min_int && v <= @max_int + defp is_integer_t(_), do: false + + @min_long -Integer.pow(2, 127) + @max_long Integer.pow(2, 127) - 1 + + # Tests if value is an integer number in the OCSF long_t range. + defp is_long_t(v) when is_integer(v), do: v >= @min_long && v <= @max_long + defp is_long_t(_), do: false +end diff --git a/lib/schema_web/controllers/schema_controller.ex b/lib/schema_web/controllers/schema_controller.ex index adc34d5..97ac2c8 100644 --- a/lib/schema_web/controllers/schema_controller.ex +++ b/lib/schema_web/controllers/schema_controller.ex @@ -1063,6 +1063,7 @@ defmodule SchemaWeb.SchemaController do produces("application/json") tag("Tools") + # TODO: This doesn't include array of events. parameters do data(:body, PhoenixSwagger.Schema.ref(:Event), "The event data to be validated", required: true @@ -1078,11 +1079,11 @@ defmodule SchemaWeb.SchemaController do case data["_json"] do # Validate a single events nil -> - Schema.Inspector.validate(data) + Schema.Validator.validate(data) # Validate a list of events list when is_list(list) -> - Enum.map(list, &Task.async(fn -> Schema.Inspector.validate(&1) end)) + Enum.map(list, &Task.async(fn -> Schema.Validator.validate(&1) end)) |> Enum.map(&Task.await/1) # some other json data @@ -1093,6 +1094,73 @@ defmodule SchemaWeb.SchemaController do send_json_resp(conn, result) end + @doc """ + Validate event data, version 2. + A single event is encoded as a JSON object and multiple events are encoded as JSON array of + object. + post /api/v2/validate + """ + swagger_path :validate2 do + post("/api/v2/validate") + summary("Validate Event (version 2)") + + # TODO: + description( + "The primary objective of this API is to validate the provided event data against the OCSF" <> + " schema. Each event is represented as a JSON object, while multiple events are encoded" <> + " as a JSON array of objects." + ) + + produces("application/json") + tag("Tools") + + # TODO: This doesn't include array of events (same as v1 API) + parameters do + data(:body, PhoenixSwagger.Schema.ref(:Event), "The event data to be validated", + required: true + ) + end + + response(200, "Success") + end + + @spec validate2(Plug.Conn.t(), map) :: Plug.Conn.t() + def validate2(conn, data) do + # Phoenix's Plug.Parsers.JSON puts JSON that isn't a map into a _json key + # (for its own technical reasons). See: + # https://hexdocs.pm/plug/Plug.Parsers.JSON.html + # https://stackoverflow.com/questions/74931653/phoenix-wraps-json-request-in-a-map-with-json-key + {status, result} = + case data["_json"] do + nil -> + # This means we have a map, so validate a single event + {200, Schema.Validator2.validate(data)} + + list when is_list(list) -> + # Validate a list of events. First make sure we have a list of maps. + if Enum.all?(list, &is_map/1) do + {200, Schema.Validator2.validate(list)} + else + {400, + %{ + error: + "Unexpected array element type." <> + " The request JSON must be an object or array of objects." + }} + end + + # some other json data + _ -> + {400, + %{ + error: + "Unexpected primitive type. The request JSON must be an object or array of objects." + }} + end + + send_json_resp(conn, status, result) + end + # -------------------------- # Request sample data API's # -------------------------- @@ -1151,6 +1219,8 @@ defmodule SchemaWeb.SchemaController do end defp sample_class(conn, id, options) do + # TODO: honor constraints + extension = extension(options) profiles = profiles(options) |> parse_options() @@ -1210,6 +1280,8 @@ defmodule SchemaWeb.SchemaController do @spec sample_object(Plug.Conn.t(), map()) :: Plug.Conn.t() def sample_object(conn, %{"id" => id} = options) do + # TODO: honor constraints + extension = extension(options) profiles = profiles(options) |> parse_options() @@ -1228,13 +1300,13 @@ defmodule SchemaWeb.SchemaController do end end - defp send_json_resp(conn, error, data) do + defp send_json_resp(conn, status, data) do conn |> put_resp_content_type("application/json") |> put_resp_header("access-control-allow-origin", "*") |> put_resp_header("access-control-allow-headers", "content-type") |> put_resp_header("access-control-allow-methods", "POST, GET, OPTIONS") - |> send_resp(error, Jason.encode!(data)) + |> send_resp(status, Jason.encode!(data)) end defp send_json_resp(conn, data) do diff --git a/lib/schema_web/router.ex b/lib/schema_web/router.ex index 1f81f54..72d0675 100644 --- a/lib/schema_web/router.ex +++ b/lib/schema_web/router.ex @@ -88,6 +88,7 @@ defmodule SchemaWeb.Router do post "/enrich", SchemaController, :enrich post "/translate", SchemaController, :translate post "/validate", SchemaController, :validate + post "/v2/validate", SchemaController, :validate2 end scope "/schema", SchemaWeb do diff --git a/mix.exs b/mix.exs index f0f9da3..d24656b 100644 --- a/mix.exs +++ b/mix.exs @@ -10,7 +10,7 @@ defmodule Schema.MixProject do use Mix.Project - @version "2.70.4" + @version "2.71.0" def project do build = System.get_env("GITHUB_RUN_NUMBER") || "SNAPSHOT" From 58b44fd8c29736c1d8ffba8b346ad9002aa1aa7c Mon Sep 17 00:00:00 2001 From: Rick Mouritzen Date: Tue, 4 Jun 2024 20:09:24 -0700 Subject: [PATCH 2/7] Change observable type_id caption so they match up better during validation of observables. Add (limited) observable validation. --- lib/schema.ex | 36 +++++- lib/schema/cache.ex | 43 ++++--- lib/schema/validator2.ex | 183 +++++++++++++++++++++++------- lib/schema_web/views/page_view.ex | 25 +++- 4 files changed, 220 insertions(+), 67 deletions(-) diff --git a/lib/schema.ex b/lib/schema.ex index ba3b5e3..1659975 100644 --- a/lib/schema.ex +++ b/lib/schema.ex @@ -556,12 +556,44 @@ defmodule Schema do defp reduce_attributes(data) do reduce_data(data) |> Map.update(:attributes, [], fn attributes -> - Enum.into(attributes, %{}, fn {name, attribute} -> - {name, reduce_data(attribute)} + Enum.into(attributes, %{}, fn {attribute_name, attribute_details} -> + {attribute_name, reduce_attribute(attribute_details)} end) end) end + defp reduce_attribute(attribute_details) do + attribute_details + |> filter_internal() + |> reduce_enum() + end + + defp filter_internal(m) do + Map.filter(m, fn {key, _} -> + s = Atom.to_string(key) + not String.starts_with?(s, "_") + end) + end + + defp reduce_enum(attribute_details) do + if Map.has_key?(attribute_details, :enum) do + Map.update!(attribute_details, :enum, fn enum -> + Enum.map( + enum, + fn {enum_value_key, enum_value_details} -> + { + enum_value_key, + filter_internal(enum_value_details) + } + end + ) + |> Enum.into(%{}) + end) + else + attribute_details + end + end + @spec reduce_class(map) :: map def reduce_class(data) do delete_attributes(data) |> delete_associations() diff --git a/lib/schema/cache.ex b/lib/schema/cache.ex index e17fd84..fe036eb 100644 --- a/lib/schema/cache.ex +++ b/lib/schema/cache.ex @@ -492,14 +492,16 @@ defmodule Schema.Cache do observable_type_id_map else + observable_kind = "#{kind}-Specific Attribute" + Map.put( observable_type_id_map, observable_type_id, - %{ - caption: "#{caption} #{kind}: #{attribute_key} (#{kind}-Specific Attribute)", - description: - "#{kind}-specific attribute \"#{attribute_key}\" for the #{caption} #{kind}." - } + make_observable_enum_entry( + "#{caption} #{kind}: #{attribute_key}", + "#{kind}-specific attribute \"#{attribute_key}\" for the #{caption} #{kind}.", + observable_kind + ) ) end else @@ -534,16 +536,16 @@ defmodule Schema.Cache do observable_type_id_map else + observable_kind = "#{kind}-Specific Attribute" + Map.put( observable_type_id_map, observable_type_id, - %{ - caption: - "#{caption} #{kind}: #{attribute_path} (#{kind}-Specific Attribute Path)", - description: - "#{kind}-specific attribute on path \"#{attribute_path}\"" <> - " for the #{caption} #{kind}." - } + make_observable_enum_entry( + "#{caption} #{kind}: #{attribute_path}", + "#{kind}-specific attribute \"#{attribute_path}\" for the #{caption} #{kind}.", + observable_kind + ) ) end end @@ -636,7 +638,7 @@ defmodule Schema.Cache do Map.put( observable_type_id_map, observable_type_id, - %{caption: "#{caption} (Object)", description: description} + make_observable_enum_entry(caption, description, "Object") ) end else @@ -673,10 +675,7 @@ defmodule Schema.Cache do Map.put( observable_type_id_map, observable_type_id, - %{ - caption: "#{item[:caption]} (#{kind})", - description: item[:description] - } + make_observable_enum_entry(item[:caption], item[:description], kind) ) end else @@ -689,6 +688,16 @@ defmodule Schema.Cache do end end + # make an observable type_id enum entry + @spec make_observable_enum_entry(String.t(), String.t(), String.t()) :: map() + defp make_observable_enum_entry(caption, description, observable_kind) do + %{ + caption: caption, + description: "Observable by #{observable_kind}.
#{description}", + _observable_kind: observable_kind + } + end + @spec find_item_caption_and_description(map(), atom(), map() | nil) :: {String.t(), String.t()} defp find_item_caption_and_description(items, item_key, item) when is_map(items) and is_atom(item_key) do diff --git a/lib/schema/validator2.ex b/lib/schema/validator2.ex index acbe120..de92def 100644 --- a/lib/schema/validator2.ex +++ b/lib/schema/validator2.ex @@ -149,6 +149,10 @@ defmodule Schema.Validator2 do |> validate_version(event) |> validate_type_uid(event) |> validate_constraints(event, class) + |> validate_observables(event, class, profiles) + + # TODO: Move observable type description out of observable enum caption. This allows existing + # observable "type" fields to validate properly. end @spec validate_class_deprecated(map(), map()) :: map() @@ -176,7 +180,7 @@ defmodule Schema.Validator2 do "version_incorrect", "Incorrect version at \"metadata.version\"; value of \"#{version}\"" <> " does not match schema version \"#{schema_version}\"." <> - " This can also result in incorrect and/or missing validation messages.", + " This can result in incorrect validation messages.", %{ attribute_path: "metadata.version", attribute: "version", @@ -304,7 +308,7 @@ defmodule Schema.Validator2 do end # Helper to return class or object description and extra map - @spec constraint_info(map(), String.t(), atom(), list[String.t()]) :: {String.t(), map()} + @spec constraint_info(map(), String.t(), atom(), list(String.t())) :: {String.t(), map()} defp constraint_info(schema_item, attribute_path, constraint_key, constraint_details) do if attribute_path do # attribute_path exists (is not nil) for objects @@ -328,6 +332,94 @@ defmodule Schema.Validator2 do end end + @spec validate_observables(map(), map(), map(), list(String.t())) :: map() + defp validate_observables(response, event, class, profiles) do + # TODO: There is no check of the "type_id" values. This gets slightly tricky (but possible). + + # TODO: There is no check to make sure the values of "name" refers to something actually in the + # event and has same (stringified) value. This would be a tricky check due to navigation + # through arrays (though possible with some effort). + + observables = event["observables"] + + if is_list(observables) do + {response, _} = + Enum.reduce( + observables, + {response, 0}, + fn observable, {response, index} -> + if is_map(observable) do + name = observable["name"] + + if is_binary(name) do + referenced_definition = + get_referenced_definition(String.split(name, "."), class, profiles) + + if referenced_definition do + # At this point we could check the definition or dictionary to make sure + # this observable is correctly defined, though that is tricky + {response, index + 1} + else + attribute_path = + make_attribute_path_array_element("observables", index) <> ".name" + + { + add_error( + response, + "observable_name_invalid_reference", + "Observable index #{index} \"name\" value \"#{name}\" does not refer to" <> + " an attribute defined in class \"#{class[:name]}\" uid #{class[:uid]}.", + %{ + attribute_path: attribute_path, + attribute: "name", + name: name, + class_uid: class[:uid], + class_name: class[:name] + } + ), + index + 1 + } + end + else + {response, index + 1} + end + else + {response, index + 1} + end + end + ) + + response + else + response + end + end + + @spec get_referenced_definition(list(String.t()), map(), list(String.t())) :: any() + defp get_referenced_definition([key | remaining_keys], schema_item, profiles) do + schema_attributes = Schema.Utils.apply_profiles(schema_item[:attributes], profiles) + key_atom = String.to_atom(key) + + attribute = Enum.find(schema_attributes, fn {a_name, _} -> key_atom == a_name end) + + if attribute do + {_, attribute_details} = attribute + + if Enum.empty?(remaining_keys) do + schema_item + else + if attribute_details[:type] == "object_t" do + object_type = String.to_atom(attribute_details[:object_type]) + get_referenced_definition(remaining_keys, Schema.object(object_type), profiles) + else + nil + end + end + else + nil + end + end + # Validates attributes of event or object (event_item parameter) # against schema's class or object (schema_item parameter). @spec validate_attributes( @@ -356,7 +448,7 @@ defmodule Schema.Validator2 do profiles, dictionary ) - |> validate_attributes_event_item_keys( + |> validate_attributes_unknown_keys( event_item, parent_attribute_path, schema_item, @@ -404,61 +496,68 @@ defmodule Schema.Validator2 do ) end - @spec validate_attributes_event_item_keys( + @spec validate_attributes_unknown_keys( map(), map(), nil | String.t(), map(), list(tuple()) ) :: map() - defp validate_attributes_event_item_keys( + defp validate_attributes_unknown_keys( response, event_item, parent_attribute_path, schema_item, schema_attributes ) do - Enum.reduce( - Map.keys(event_item), - response, - fn key, response -> - if has_attribute?(schema_attributes, key) do - response - else - attribute_path = make_attribute_path(parent_attribute_path, key) + if Enum.empty?(schema_attributes) do + # This is class or object with no attributes defined. This is a special-case that means any + # attributes are allowed. The object type "object" is the current example of this, and is + # directly used by the "unmapped" and "xattributes" attributes as open-ended objects. + response + else + Enum.reduce( + Map.keys(event_item), + response, + fn key, response -> + if has_attribute?(schema_attributes, key) do + response + else + attribute_path = make_attribute_path(parent_attribute_path, key) - {struct_desc, extra} = - if Map.has_key?(schema_item, :uid) do - { - "class \"#{schema_item[:name]}\", uid #{schema_item[:uid]}", - %{ - attribute_path: attribute_path, - attribute: key, - class_uid: schema_item[:uid], - class_name: schema_item[:name] + {struct_desc, extra} = + if Map.has_key?(schema_item, :uid) do + { + "class \"#{schema_item[:name]}\" uid #{schema_item[:uid]}", + %{ + attribute_path: attribute_path, + attribute: key, + class_uid: schema_item[:uid], + class_name: schema_item[:name] + } } - } - else - { - "object \"#{schema_item[:name]}\"", - %{ - attribute_path: attribute_path, - attribute: key, - object_name: schema_item[:name] + else + { + "object \"#{schema_item[:name]}\"", + %{ + attribute_path: attribute_path, + attribute: key, + object_name: schema_item[:name] + } } - } - end + end - add_error( - response, - "attribute_unknown", - "Unknown attribute at \"#{attribute_path}\";" <> - " attribute \"#{key}\" is not defined in #{struct_desc}.", - extra - ) + add_error( + response, + "attribute_unknown", + "Unknown attribute at \"#{attribute_path}\";" <> + " attribute \"#{key}\" is not defined in #{struct_desc}.", + extra + ) + end end - end - ) + ) + end end @spec has_attribute?(list(tuple()), String.t()) :: boolean() @@ -1463,7 +1562,7 @@ defmodule Schema.Validator2 do add_warning( response, "class_deprecated", - "Class #{class[:uid]} \"#{class[:name]}\" is deprecated. #{deprecated[:message]}", + "Class \"#{class[:name]}\" uid #{class[:uid]}, is deprecated. #{deprecated[:message]}", %{class_uid: class[:uid], class_name: class[:name], since: deprecated[:since]} ) end diff --git a/lib/schema_web/views/page_view.ex b/lib/schema_web/views/page_view.ex index 476a9f4..796dc7e 100644 --- a/lib/schema_web/views/page_view.ex +++ b/lib/schema_web/views/page_view.ex @@ -2,8 +2,6 @@ defmodule SchemaWeb.PageView do alias SchemaWeb.SchemaController use SchemaWeb, :view - require Logger - def class_graph_path(conn, data) do class_name = data[:name] @@ -144,7 +142,7 @@ defmodule SchemaWeb.PageView do if observable_object do observable_object[:attributes][:type_id][:enum] else - nil + {nil, nil} end cond do @@ -153,7 +151,12 @@ defmodule SchemaWeb.PageView do Map.has_key?(entity, :observable) -> observable_type_id = Schema.Utils.observable_type_id_to_atom(entity[:observable]) - {observable_type_id, observable_type_id_map[observable_type_id][:caption]} + enum_details = observable_type_id_map[observable_type_id] + + { + observable_type_id, + "#{enum_details[:caption]} (#{enum_details[:_observable_kind]})" + } Map.has_key?(entity, :type) -> # Check if this is a dictionary type @@ -163,7 +166,12 @@ defmodule SchemaWeb.PageView do cond do type_observable -> observable_type_id = Schema.Utils.observable_type_id_to_atom(type_observable) - {observable_type_id, observable_type_id_map[observable_type_id][:caption]} + enum_details = observable_type_id_map[observable_type_id] + + { + observable_type_id, + "#{enum_details[:caption]} (#{enum_details[:_observable_kind]})" + } Map.has_key?(entity, :object_type) -> # Check if this object is an observable @@ -172,7 +180,12 @@ defmodule SchemaWeb.PageView do if object_observable do observable_type_id = Schema.Utils.observable_type_id_to_atom(object_observable) - {observable_type_id, observable_type_id_map[observable_type_id][:caption]} + enum_details = observable_type_id_map[observable_type_id] + + { + observable_type_id, + "#{enum_details[:caption]} (#{enum_details[:_observable_kind]})" + } else {nil, nil} end From f00ddfad8d109359749d6289c5fe79b48abb1d5a Mon Sep 17 00:00:00 2001 From: Rick Mouritzen Date: Wed, 5 Jun 2024 17:27:20 -0700 Subject: [PATCH 3/7] Change validating array of elements from /api/v2/validate to use of event bundle with /api/v2/validate_bundle. Remove try/rescue pattern from schema_controller.ex as it returns exception message, leaking internal implementation details (a security problem). Improve Swagger docs for new APIs. --- lib/schema/validator2.ex | 121 +++++- .../controllers/schema_controller.ex | 350 +++++++++++------- lib/schema_web/router.ex | 1 + 3 files changed, 325 insertions(+), 147 deletions(-) diff --git a/lib/schema/validator2.ex b/lib/schema/validator2.ex index de92def..463bb45 100644 --- a/lib/schema/validator2.ex +++ b/lib/schema/validator2.ex @@ -13,14 +13,121 @@ defmodule Schema.Validator2 do """ # Implementation note: - # All of the validate_* and add_* functions take a response and return one, possibly updated. + # The validate_* and add_* functions (other than the top level validate/1 and validate_bundle/1 + # functions) take a response and return one, possibly updated. # The overall flow is to examine the event or list of events, and return a validation response. require Logger - @spec validate(map() | list()) :: map() | list(map()) + @spec validate(map()) :: map() def validate(data) when is_map(data), do: validate_event(data, Schema.dictionary()) - def validate(data) when is_list(data), do: validate_events(data, Schema.dictionary()) + + @spec validate_bundle(map()) :: map() + def validate_bundle(bundle) when is_map(bundle) do + bundle_structure = get_bundle_structure() + + # First validate the bundle itself + response = + Enum.reduce( + bundle_structure, + %{}, + fn attribute_tuple, response -> + validate_bundle_attribute(response, bundle, attribute_tuple) + end + ) + + # Check that there are no extra keys in the bundle + response = + Enum.reduce( + bundle, + response, + fn {key, _}, response -> + if Map.has_key?(bundle_structure, key) do + response + else + add_error( + response, + "attribute_unknown", + "Unknown attribute \"#{key}\" in event bundle.", + %{attribute_path: key, attribute: key} + ) + end + end + ) + + # TODO: validate the bundle times and count against events + + # Next validate the events in the bundle + response = validate_bundle_events(response, bundle, Schema.dictionary()) + finalize_response(response) + end + + # Returns structure of an event bundle. + # See "Bundling" here: https://github.com/ocsf/examples/blob/main/encodings/json/README.md + @spec get_bundle_structure() :: map() + defp get_bundle_structure() do + %{ + "events" => {:required, "array", &is_list/1}, + "start_time" => {:optional, "timestamp_t (long_t)", &is_long_t/1}, + "end_time" => {:optional, "timestamp_t (long_t)", &is_long_t/1}, + "start_time_dt" => {:optional, "datetime_t (string_t)", &is_binary/1}, + "end_time_dt" => {:optional, "datetime_t (string_t)", &is_binary/1}, + "count" => {:optional, "integer_t", &is_integer_t/1} + } + end + + @spec validate_bundle_attribute(map(), map(), tuple()) :: map() + defp validate_bundle_attribute( + response, + bundle, + {attribute_name, {requirement, type_name, is_type_fn}} + ) do + if Map.has_key?(bundle, attribute_name) do + value = bundle[attribute_name] + + if is_type_fn.(value) do + response + else + add_error_wrong_type(response, attribute_name, attribute_name, value, type_name) + end + else + if requirement == :required do + add_error_required_attribute_missing(response, attribute_name, attribute_name) + else + response + end + end + end + + @spec validate_bundle_events(map(), map(), map()) :: map() + defp validate_bundle_events(response, bundle, dictionary) do + events = bundle["events"] + + if is_list(events) do + Map.put( + response, + :event_validations, + Enum.map( + events, + fn event -> + if is_map(event) do + validate_event(event, dictionary) + else + {type, type_extra} = type_of(event) + + %{ + error: "Event has wrong type; expected object, got #{type}#{type_extra}.", + type: type, + expected_type: "object" + } + end + end + ) + ) + else + response + end + end @spec validate_event(map(), map()) :: map() defp validate_event(event, dictionary) do @@ -40,11 +147,6 @@ defmodule Schema.Validator2 do finalize_response(response) end - @spec validate_events(list(map()), map()) :: list(map()) - defp validate_events(events, dictionary) do - Enum.map(events, fn event -> validate_event(event, dictionary) end) - end - @spec validate_class_uid_and_return_class(map(), map()) :: {map(), nil | map()} defp validate_class_uid_and_return_class(response, event) do if Map.has_key?(event, "class_uid") do @@ -150,9 +252,6 @@ defmodule Schema.Validator2 do |> validate_type_uid(event) |> validate_constraints(event, class) |> validate_observables(event, class, profiles) - - # TODO: Move observable type description out of observable enum caption. This allows existing - # observable "type" fields to validate properly. end @spec validate_class_deprecated(map(), map()) :: map() diff --git a/lib/schema_web/controllers/schema_controller.ex b/lib/schema_web/controllers/schema_controller.ex index 97ac2c8..c73a3ae 100644 --- a/lib/schema_web/controllers/schema_controller.ex +++ b/lib/schema_web/controllers/schema_controller.ex @@ -143,6 +143,112 @@ defmodule SchemaWeb.SchemaController do title("Event") description("An OCSF formatted event object.") type(:object) + end, + ValidationError: + swagger_schema do + title("Validation Error") + description("A validation error. Additional error-specific properties will exist.") + + properties do + error(:string, "Error code") + message(:string, "Human readable error message") + end + + additional_properties(true) + end, + ValidationWarning: + swagger_schema do + title("Validation Warning") + description("A validation warning. Additional warning-specific properties will exist.") + + properties do + error(:string, "Warning code") + message(:string, "Human readable warning message") + end + + additional_properties(true) + end, + EventValidation: + swagger_schema do + title("Event Validation") + description("The errors and and warnings found when validating an event.") + + properties do + uid(:string, "The event's metadata.uid, if available") + error(:string, "Overall error message") + + errors( + :array, + "Validation errors", + items: %PhoenixSwagger.Schema{"$ref": "#/definitions/ValidationError"} + ) + + warnings( + :array, + "Validation warnings", + items: %PhoenixSwagger.Schema{"$ref": "#/definitions/ValidationWarning"} + ) + + error_count(:integer, "Count of errors") + warning_count(:integer, "Count of warnings") + end + + additional_properties(false) + end, + EventBundle: + swagger_schema do + title("Event Bundle") + description("A bundle of events.") + + properties do + events( + :array, + "Array of events.", + items: %PhoenixSwagger.Schema{"$ref": "#/definitions/Event"}, + required: true + ) + + start_time(:integer, "Earliest event time in Epoch milliseconds (OCSF timestamp_t)") + end_time(:integer, "Latest event time in Epoch milliseconds (OCSF timestamp_t)") + start_time_dt(:string, "Earliest event time in RFC 3339 format (OCSF datetime_t)") + end_time_dt(:string, "Latest event time in RFC 3339 format (OCSF datetime_t)") + count(:integer, "Count of events") + end + + additional_properties(false) + end, + EventBundleValidation: + swagger_schema do + title("Event Bundle Validation") + description("The errors and and warnings found when validating an event bundle.") + + properties do + error(:string, "Overall error message") + + errors( + :array, + "Validation errors of the bundle itself", + items: %PhoenixSwagger.Schema{type: :object} + ) + + warnings( + :array, + "Validation warnings of the bundle itself", + items: %PhoenixSwagger.Schema{type: :object} + ) + + error_count(:integer, "Count of errors of the bundle itself") + warning_count(:integer, "Count of warnings of the bundle itself") + + event_validations( + :array, + "Array of event validations", + items: %PhoenixSwagger.Schema{"$ref": "#/definitions/EventValidation"}, + required: true + ) + end + + additional_properties(false) end } end @@ -321,20 +427,14 @@ defmodule SchemaWeb.SchemaController do extension -> "#{extension}/#{id}" end - try do - data = Schema.profiles() + data = Schema.profiles() - case Map.get(data, name) do - nil -> - send_json_resp(conn, 404, %{error: "Profile #{name} not found"}) + case Map.get(data, name) do + nil -> + send_json_resp(conn, 404, %{error: "Profile #{name} not found"}) - profile -> - send_json_resp(conn, Schema.delete_links(profile)) - end - rescue - e -> - Logger.error("Unable to get profile: #{id}. Error: #{inspect(e)}") - send_json_resp(conn, 500, %{error: "Error: #{e[:message]}"}) + profile -> + send_json_resp(conn, Schema.delete_links(profile)) end end @@ -399,18 +499,12 @@ defmodule SchemaWeb.SchemaController do @spec category(Plug.Conn.t(), map) :: Plug.Conn.t() def category(conn, %{"id" => id} = params) do - try do - case category_classes(params) do - nil -> - send_json_resp(conn, 404, %{error: "Category #{id} not found"}) + case category_classes(params) do + nil -> + send_json_resp(conn, 404, %{error: "Category #{id} not found"}) - data -> - send_json_resp(conn, data) - end - rescue - e -> - Logger.error("Unable to load the classes for category: #{id}. Error: #{inspect(e)}") - send_json_resp(conn, 500, %{error: "Error: #{e[:message]}"}) + data -> + send_json_resp(conn, data) end end @@ -512,19 +606,13 @@ defmodule SchemaWeb.SchemaController do defp class(conn, id, params) do extension = extension(params) - try do - case Schema.class(extension, id, parse_options(profiles(params))) do - nil -> - send_json_resp(conn, 404, %{error: "Event class #{id} not found"}) + case Schema.class(extension, id, parse_options(profiles(params))) do + nil -> + send_json_resp(conn, 404, %{error: "Event class #{id} not found"}) - data -> - class = add_objects(data, params) - send_json_resp(conn, class) - end - rescue - e -> - Logger.error("Unable to get class: #{id}. Error: #{inspect(e)}") - send_json_resp(conn, 500, %{error: "Error: #{e[:message]}"}) + data -> + class = add_objects(data, params) + send_json_resp(conn, class) end end @@ -608,18 +696,12 @@ defmodule SchemaWeb.SchemaController do @spec object(Plug.Conn.t(), map) :: Plug.Conn.t() def object(conn, %{"id" => id} = params) do - try do - case object(params) do - nil -> - send_json_resp(conn, 404, %{error: "Object #{id} not found"}) + case object(params) do + nil -> + send_json_resp(conn, 404, %{error: "Object #{id} not found"}) - data -> - send_json_resp(conn, add_objects(data, params)) - end - rescue - e -> - Logger.error("Unable to get object: #{id}. Error: #{inspect(e)}") - send_json_resp(conn, 500, %{error: "Error: #{e[:message]}"}) + data -> + send_json_resp(conn, add_objects(data, params)) end end @@ -810,19 +892,13 @@ defmodule SchemaWeb.SchemaController do def json_class(conn, %{"id" => id} = params) do options = Map.get(params, "package_name") |> parse_java_package() - try do - case class_ex(id, params) do - nil -> - send_json_resp(conn, 404, %{error: "Event class #{id} not found"}) + case class_ex(id, params) do + nil -> + send_json_resp(conn, 404, %{error: "Event class #{id} not found"}) - data -> - class = Schema.JsonSchema.encode(data, options) - send_json_resp(conn, class) - end - rescue - e -> - Logger.error("Unable to get class: #{id}. Error: #{inspect(e)}") - send_json_resp(conn, 500, %{error: "Error: #{e[:message]}"}) + data -> + class = Schema.JsonSchema.encode(data, options) + send_json_resp(conn, class) end end @@ -861,19 +937,13 @@ defmodule SchemaWeb.SchemaController do def json_object(conn, %{"id" => id} = params) do options = Map.get(params, "package_name") |> parse_java_package() - try do - case object_ex(id, params) do - nil -> - send_json_resp(conn, 404, %{error: "Object #{id} not found"}) + case object_ex(id, params) do + nil -> + send_json_resp(conn, 404, %{error: "Object #{id} not found"}) - data -> - object = Schema.JsonSchema.encode(data, options) - send_json_resp(conn, object) - end - rescue - e -> - Logger.error("Unable to get object: #{id}. Error: #{inspect(e)}") - send_json_resp(conn, 500, %{error: "Error: #{e[:message]}"}) + data -> + object = Schema.JsonSchema.encode(data, options) + send_json_resp(conn, object) end end @@ -1063,7 +1133,6 @@ defmodule SchemaWeb.SchemaController do produces("application/json") tag("Tools") - # TODO: This doesn't include array of events. parameters do data(:body, PhoenixSwagger.Schema.ref(:Event), "The event data to be validated", required: true @@ -1095,33 +1164,26 @@ defmodule SchemaWeb.SchemaController do end @doc """ - Validate event data, version 2. - A single event is encoded as a JSON object and multiple events are encoded as JSON array of - object. + Validate event data, version 2. Validates a single event. post /api/v2/validate """ swagger_path :validate2 do post("/api/v2/validate") summary("Validate Event (version 2)") - # TODO: description( - "The primary objective of this API is to validate the provided event data against the OCSF" <> - " schema. Each event is represented as a JSON object, while multiple events are encoded" <> - " as a JSON array of objects." + "This API validates the provided event data against the OCSF schema, returning a response" <> + " containing validation errors and warnings." ) produces("application/json") tag("Tools") - # TODO: This doesn't include array of events (same as v1 API) parameters do - data(:body, PhoenixSwagger.Schema.ref(:Event), "The event data to be validated", - required: true - ) + data(:body, PhoenixSwagger.Schema.ref(:Event), "The event to be validated", required: true) end - response(200, "Success") + response(200, "Success", PhoenixSwagger.Schema.ref(:EventValidation)) end @spec validate2(Plug.Conn.t(), map) :: Plug.Conn.t() @@ -1136,26 +1198,54 @@ defmodule SchemaWeb.SchemaController do # This means we have a map, so validate a single event {200, Schema.Validator2.validate(data)} - list when is_list(list) -> - # Validate a list of events. First make sure we have a list of maps. - if Enum.all?(list, &is_map/1) do - {200, Schema.Validator2.validate(list)} - else - {400, - %{ - error: - "Unexpected array element type." <> - " The request JSON must be an object or array of objects." - }} - end + # some other json data + _ -> + {400, %{error: "Unexpected JSON. Expected a JSON object."}} + end + + send_json_resp(conn, status, result) + end + + @doc """ + Validate event data, version 2. Validates a single event. + post /api/v2/validate + """ + swagger_path :validate2_bundle do + post("/api/v2/validate_bundle") + summary("Validate Event Bundle (version 2)") + + description( + "This API validates the provided event bundle. The event bundle itself is validated, and" <> + " each event in the bundle's events attribute are validated." + ) + + produces("application/json") + tag("Tools") + + parameters do + data(:body, PhoenixSwagger.Schema.ref(:EventBundle), "The event bundle to be validated", + required: true + ) + end + + response(200, "Success", PhoenixSwagger.Schema.ref(:EventBundleValidation)) + end + + @spec validate2_bundle(Plug.Conn.t(), map) :: Plug.Conn.t() + def validate2_bundle(conn, data) do + # Phoenix's Plug.Parsers.JSON puts JSON that isn't a map into a _json key + # (for its own technical reasons). See: + # https://hexdocs.pm/plug/Plug.Parsers.JSON.html + # https://stackoverflow.com/questions/74931653/phoenix-wraps-json-request-in-a-map-with-json-key + {status, result} = + case data["_json"] do + nil -> + # This means we have a map, so validate a single event + {200, Schema.Validator2.validate_bundle(data)} # some other json data _ -> - {400, - %{ - error: - "Unexpected primitive type. The request JSON must be an object or array of objects." - }} + {400, %{error: "Unexpected JSON. Expected a JSON object."}} end send_json_resp(conn, status, result) @@ -1224,31 +1314,25 @@ defmodule SchemaWeb.SchemaController do extension = extension(options) profiles = profiles(options) |> parse_options() - try do - case Schema.class(extension, id) do - nil -> - send_json_resp(conn, 404, %{error: "Event class #{id} not found"}) - - class -> - event = - case Map.get(options, @verbose) do - nil -> - Schema.generate_event(class, profiles) - - verbose -> - Schema.generate_event(class, profiles) - |> Schema.Translator.translate( - spaces: options[@spaces], - verbose: verbose(verbose) - ) - end - - send_json_resp(conn, event) - end - rescue - e -> - Logger.error("Unable to generate sample for class: #{id}. Error: #{inspect(e)}") - send_json_resp(conn, 500, %{error: "Error: #{e[:message]}"}) + case Schema.class(extension, id) do + nil -> + send_json_resp(conn, 404, %{error: "Event class #{id} not found"}) + + class -> + event = + case Map.get(options, @verbose) do + nil -> + Schema.generate_event(class, profiles) + + verbose -> + Schema.generate_event(class, profiles) + |> Schema.Translator.translate( + spaces: options[@spaces], + verbose: verbose(verbose) + ) + end + + send_json_resp(conn, event) end end @@ -1285,18 +1369,12 @@ defmodule SchemaWeb.SchemaController do extension = extension(options) profiles = profiles(options) |> parse_options() - try do - case Schema.object(extension, id) do - nil -> - send_json_resp(conn, 404, %{error: "Object #{id} not found"}) + case Schema.object(extension, id) do + nil -> + send_json_resp(conn, 404, %{error: "Object #{id} not found"}) - data -> - send_json_resp(conn, Schema.generate_object(data, profiles)) - end - rescue - e -> - Logger.error("Unable to generate sample for object: #{id}. Error: #{inspect(e)}") - send_json_resp(conn, 500, %{error: "Error: #{e[:message]}"}) + data -> + send_json_resp(conn, Schema.generate_object(data, profiles)) end end diff --git a/lib/schema_web/router.ex b/lib/schema_web/router.ex index 72d0675..7126c00 100644 --- a/lib/schema_web/router.ex +++ b/lib/schema_web/router.ex @@ -89,6 +89,7 @@ defmodule SchemaWeb.Router do post "/translate", SchemaController, :translate post "/validate", SchemaController, :validate post "/v2/validate", SchemaController, :validate2 + post "/v2/validate_bundle", SchemaController, :validate2_bundle end scope "/schema", SchemaWeb do From caf98fb031efa2704b04ecde388d0088778e4eb2 Mon Sep 17 00:00:00 2001 From: Rick Mouritzen Date: Thu, 6 Jun 2024 12:20:48 -0700 Subject: [PATCH 4/7] Bump Docker elixir image version from 1.16.2 to 1.16.3 --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index b0de1e0..6ab0ed2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM elixir:1.16.2-alpine as builder +FROM elixir:1.16.3-alpine as builder # prepare build dir WORKDIR /app From 6d8de1fb08e6eff84453b2ef4c2c7882b0207acc Mon Sep 17 00:00:00 2001 From: Rick Mouritzen Date: Thu, 6 Jun 2024 12:50:32 -0700 Subject: [PATCH 5/7] Update Elixir dependencies --- mix.lock | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/mix.lock b/mix.lock index 085e5f6..769d32c 100644 --- a/mix.lock +++ b/mix.lock @@ -1,33 +1,33 @@ %{ "bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"}, - "castore": {:hex, :castore, "1.0.5", "9eeebb394cc9a0f3ae56b813459f990abb0a3dedee1be6b27fdb50301930502f", [:mix], [], "hexpm", "8d7c597c3e4a64c395980882d4bca3cebb8d74197c590dc272cfd3b6a6310578"}, - "cowboy": {:hex, :cowboy, "2.10.0", "ff9ffeff91dae4ae270dd975642997afe2a1179d94b1887863e43f681a203e26", [:make, :rebar3], [{:cowlib, "2.12.1", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "3afdccb7183cc6f143cb14d3cf51fa00e53db9ec80cdcd525482f5e99bc41d6b"}, + "castore": {:hex, :castore, "1.0.7", "b651241514e5f6956028147fe6637f7ac13802537e895a724f90bf3e36ddd1dd", [:mix], [], "hexpm", "da7785a4b0d2a021cd1292a60875a784b6caef71e76bf4917bdee1f390455cf5"}, + "cowboy": {:hex, :cowboy, "2.12.0", "f276d521a1ff88b2b9b4c54d0e753da6c66dd7be6c9fca3d9418b561828a3731", [:make, :rebar3], [{:cowlib, "2.13.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "8a7abe6d183372ceb21caa2709bec928ab2b72e18a3911aa1771639bef82651e"}, "cowboy_telemetry": {:hex, :cowboy_telemetry, "0.4.0", "f239f68b588efa7707abce16a84d0d2acf3a0f50571f8bb7f56a15865aae820c", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7d98bac1ee4565d31b62d59f8823dfd8356a169e7fcbb83831b8a5397404c9de"}, - "cowlib": {:hex, :cowlib, "2.12.1", "a9fa9a625f1d2025fe6b462cb865881329b5caff8f1854d1cbc9f9533f00e1e1", [:make, :rebar3], [], "hexpm", "163b73f6367a7341b33c794c4e88e7dbfe6498ac42dcd69ef44c5bc5507c8db0"}, - "credo": {:hex, :credo, "1.7.5", "643213503b1c766ec0496d828c90c424471ea54da77c8a168c725686377b9545", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "f799e9b5cd1891577d8c773d245668aa74a2fcd15eb277f51a0131690ebfb3fd"}, + "cowlib": {:hex, :cowlib, "2.13.0", "db8f7505d8332d98ef50a3ef34b34c1afddec7506e4ee4dd4a3a266285d282ca", [:make, :rebar3], [], "hexpm", "e1e1284dc3fc030a64b1ad0d8382ae7e99da46c3246b815318a4b848873800a4"}, + "credo": {:hex, :credo, "1.7.6", "b8f14011a5443f2839b04def0b252300842ce7388f3af177157c86da18dfbeea", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "146f347fb9f8cbc5f7e39e3f22f70acbef51d441baa6d10169dd604bfbc55296"}, "decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"}, "earmark": {:hex, :earmark, "1.4.46", "8c7287bd3137e99d26ae4643e5b7ef2129a260e3dcf41f251750cb4563c8fb81", [:mix], [], "hexpm", "798d86db3d79964e759ddc0c077d5eb254968ed426399fbf5a62de2b5ff8910a"}, "elixir_uuid": {:hex, :uuid_utils, "1.6.5", "bafd6ffcbec895513a7c10855df3954f29909fb5d05ee52681e30e84297b1a80", [:mix], [{:ecto, "~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm", "36aaeee10740eae4d357231f48571a2687cb541730f94f47cbd3f186dc07899c"}, "ex_json_schema": {:hex, :ex_json_schema, "0.7.4", "09eb5b0c8184e5702bc89625a9d0c05c7a0a845d382e9f6f406a0fc1c9a8cc3f", [:mix], [], "hexpm", "45c67fa840f0d719a2b5578126dc29bcdc1f92499c0f61bcb8a3bcb5935f9684"}, - "file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"}, + "file_system": {:hex, :file_system, "1.0.0", "b689cc7dcee665f774de94b5a832e578bd7963c8e637ef940cd44327db7de2cd", [:mix], [], "hexpm", "6752092d66aec5a10e662aefeed8ddb9531d79db0bc145bb8c40325ca1d8536d"}, "html_entities": {:hex, :html_entities, "0.5.2", "9e47e70598da7de2a9ff6af8758399251db6dbb7eebe2b013f2bbd2515895c3c", [:mix], [], "hexpm", "c53ba390403485615623b9531e97696f076ed415e8d8058b1dbaa28181f4fdcc"}, "jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"}, "mime": {:hex, :mime, "2.0.5", "dc34c8efd439abe6ae0343edbb8556f4d63f178594894720607772a041b04b02", [:mix], [], "hexpm", "da0d64a365c45bc9935cc5c8a7fc5e49a0e0f9932a761c55d6c52b142780a05c"}, - "number": {:hex, :number, "1.0.4", "3e6e6032a3c1d4c3760e77a42c580a57a15545dd993af380809da30fe51a032c", [:mix], [{:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm", "16f7516584ef2be812af4f33f2eaf3f9b9f6ed8892f45853eb93113f83721e42"}, - "phoenix": {:hex, :phoenix, "1.7.11", "1d88fc6b05ab0c735b250932c4e6e33bfa1c186f76dcf623d8dd52f07d6379c7", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.7", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.3", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "b1ec57f2e40316b306708fe59b92a16b9f6f4bf50ccfa41aa8c7feb79e0ec02a"}, - "phoenix_html": {:hex, :phoenix_html, "4.0.0", "4857ec2edaccd0934a923c2b0ba526c44a173c86b847e8db725172e9e51d11d6", [:mix], [], "hexpm", "cee794a052f243291d92fa3ccabcb4c29bb8d236f655fb03bcbdc3a8214b8d13"}, - "phoenix_live_reload": {:hex, :phoenix_live_reload, "1.4.1", "2aff698f5e47369decde4357ba91fc9c37c6487a512b41732818f2204a8ef1d3", [:mix], [{:file_system, "~> 0.2.1 or ~> 0.3", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm", "9bffb834e7ddf08467fe54ae58b5785507aaba6255568ae22b4d46e2bb3615ab"}, + "number": {:hex, :number, "1.0.5", "d92136f9b9382aeb50145782f116112078b3465b7be58df1f85952b8bb399b0f", [:mix], [{:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm", "c0733a0a90773a66582b9e92a3f01290987f395c972cb7d685f51dd927cd5169"}, + "phoenix": {:hex, :phoenix, "1.7.12", "1cc589e0eab99f593a8aa38ec45f15d25297dd6187ee801c8de8947090b5a9d3", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.7", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.3", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "d646192fbade9f485b01bc9920c139bfdd19d0f8df3d73fd8eaf2dfbe0d2837c"}, + "phoenix_html": {:hex, :phoenix_html, "4.1.1", "4c064fd3873d12ebb1388425a8f2a19348cef56e7289e1998e2d2fa758aa982e", [:mix], [], "hexpm", "f2f2df5a72bc9a2f510b21497fd7d2b86d932ec0598f0210fed4114adc546c6f"}, + "phoenix_live_reload": {:hex, :phoenix_live_reload, "1.5.3", "f2161c207fda0e4fb55165f650f7f8db23f02b29e3bff00ff7ef161d6ac1f09d", [:mix], [{:file_system, "~> 0.3 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm", "b4ec9cd73cb01ff1bd1cac92e045d13e7030330b74164297d1aee3907b54803c"}, "phoenix_markdown": {:hex, :phoenix_markdown, "1.0.3", "8095c40dd5037f4b56079ad66de3fe9136406c7c44e1222ce3c74d22e4c7870a", [:mix], [{:earmark, "~> 1.2", [hex: :earmark, repo: "hexpm", optional: false]}, {:html_entities, "~> 0.4", [hex: :html_entities, repo: "hexpm", optional: false]}, {:phoenix, ">= 1.1.0", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, ">= 2.3.0", [hex: :phoenix_html, repo: "hexpm", optional: false]}], "hexpm", "d3591c4cb3357b068cc8284952dbacedb874b287add27787eea2d1d314b18c16"}, "phoenix_pubsub": {:hex, :phoenix_pubsub, "2.1.3", "3168d78ba41835aecad272d5e8cd51aa87a7ac9eb836eabc42f6e57538e3731d", [:mix], [], "hexpm", "bba06bc1dcfd8cb086759f0edc94a8ba2bc8896d5331a1e2c2902bf8e36ee502"}, "phoenix_swagger": {:hex, :phoenix_swagger, "0.8.3", "298d6204802409d3b0b4fc1013873839478707cf3a62532a9e10fec0e26d0e37", [:mix], [{:ex_json_schema, "~> 0.7.1", [hex: :ex_json_schema, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.11", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 2.2 or ~> 3.0", [hex: :poison, repo: "hexpm", optional: true]}], "hexpm", "3bc0fa9f5b679b8a61b90a52b2c67dd932320e9a84a6f91a4af872a0ab367337"}, "phoenix_template": {:hex, :phoenix_template, "1.0.4", "e2092c132f3b5e5b2d49c96695342eb36d0ed514c5b252a77048d5969330d639", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "2c0c81f0e5c6753faf5cca2f229c9709919aba34fab866d3bc05060c9c444206"}, "phoenix_view": {:hex, :phoenix_view, "2.0.3", "4d32c4817fce933693741deeb99ef1392619f942633dde834a5163124813aad3", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}], "hexpm", "cd34049af41be2c627df99cd4eaa71fc52a328c0c3d8e7d4aa28f880c30e7f64"}, - "plug": {:hex, :plug, "1.15.3", "712976f504418f6dff0a3e554c40d705a9bcf89a7ccef92fc6a5ef8f16a30a97", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "cc4365a3c010a56af402e0809208873d113e9c38c401cabd88027ef4f5c01fd2"}, - "plug_cowboy": {:hex, :plug_cowboy, "2.7.0", "3ae9369c60641084363b08fe90267cbdd316df57e3557ea522114b30b63256ea", [:mix], [{:cowboy, "~> 2.7.0 or ~> 2.8.0 or ~> 2.9.0 or ~> 2.10.0", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "d85444fb8aa1f2fc62eabe83bbe387d81510d773886774ebdcb429b3da3c1a4a"}, - "plug_crypto": {:hex, :plug_crypto, "2.0.0", "77515cc10af06645abbfb5e6ad7a3e9714f805ae118fa1a70205f80d2d70fe73", [:mix], [], "hexpm", "53695bae57cc4e54566d993eb01074e4d894b65a3766f1c43e2c61a1b0f45ea9"}, + "plug": {:hex, :plug, "1.16.0", "1d07d50cb9bb05097fdf187b31cf087c7297aafc3fed8299aac79c128a707e47", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "cbf53aa1f5c4d758a7559c0bd6d59e286c2be0c6a1fac8cc3eee2f638243b93e"}, + "plug_cowboy": {:hex, :plug_cowboy, "2.7.1", "87677ffe3b765bc96a89be7960f81703223fe2e21efa42c125fcd0127dd9d6b2", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "02dbd5f9ab571b864ae39418db7811618506256f6d13b4a45037e5fe78dc5de3"}, + "plug_crypto": {:hex, :plug_crypto, "2.1.0", "f44309c2b06d249c27c8d3f65cfe08158ade08418cf540fd4f72d4d6863abb7b", [:mix], [], "hexpm", "131216a4b030b8f8ce0f26038bc4421ae60e4bb95c5cf5395e1421437824c4fa"}, "ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"}, "sobelow": {:hex, :sobelow, "0.13.0", "218afe9075904793f5c64b8837cc356e493d88fddde126a463839351870b8d1e", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "cd6e9026b85fc35d7529da14f95e85a078d9dd1907a9097b3ba6ac7ebbe34a0d"}, "telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"}, "websock": {:hex, :websock, "0.5.3", "2f69a6ebe810328555b6fe5c831a851f485e303a7c8ce6c5f675abeb20ebdadc", [:mix], [], "hexpm", "6105453d7fac22c712ad66fab1d45abdf049868f253cf719b625151460b8b453"}, - "websock_adapter": {:hex, :websock_adapter, "0.5.5", "9dfeee8269b27e958a65b3e235b7e447769f66b5b5925385f5a569269164a210", [:mix], [{:bandit, ">= 0.6.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "4b977ba4a01918acbf77045ff88de7f6972c2a009213c515a445c48f224ffce9"}, + "websock_adapter": {:hex, :websock_adapter, "0.5.6", "0437fe56e093fd4ac422de33bf8fc89f7bc1416a3f2d732d8b2c8fd54792fe60", [:mix], [{:bandit, ">= 0.6.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "e04378d26b0af627817ae84c92083b7e97aca3121196679b73c73b99d0d133ea"}, } From 12857b8105f89927b927e172aefea7f0e39079d3 Mon Sep 17 00:00:00 2001 From: Rick Mouritzen Date: Thu, 6 Jun 2024 12:56:59 -0700 Subject: [PATCH 6/7] Update Docker image for Elixir for final image (missed that last time) --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 6ab0ed2..cd6350f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -38,7 +38,7 @@ RUN mix release # start a new build stage so that the final image will only contain # the compiled release and other runtime necessities -FROM elixir:1.16.2-alpine +FROM elixir:1.16.3-alpine # Set the locale # RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen From 0a565da2085cfc5528e7cbd8f2381230a3f48ff5 Mon Sep 17 00:00:00 2001 From: Rick Mouritzen Date: Fri, 14 Jun 2024 12:12:50 -0700 Subject: [PATCH 7/7] Dependency updates --- Dockerfile | 6 ++++-- mix.lock | 4 ++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index cd6350f..d52b16f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,6 @@ -FROM elixir:1.16.3-alpine as builder +ARG elixir_image=elixir:1.17.0-alpine + +FROM ${elixir_image} as builder # prepare build dir WORKDIR /app @@ -38,7 +40,7 @@ RUN mix release # start a new build stage so that the final image will only contain # the compiled release and other runtime necessities -FROM elixir:1.16.3-alpine +FROM ${elixir_image} # Set the locale # RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen diff --git a/mix.lock b/mix.lock index 769d32c..5a15c84 100644 --- a/mix.lock +++ b/mix.lock @@ -4,7 +4,7 @@ "cowboy": {:hex, :cowboy, "2.12.0", "f276d521a1ff88b2b9b4c54d0e753da6c66dd7be6c9fca3d9418b561828a3731", [:make, :rebar3], [{:cowlib, "2.13.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "8a7abe6d183372ceb21caa2709bec928ab2b72e18a3911aa1771639bef82651e"}, "cowboy_telemetry": {:hex, :cowboy_telemetry, "0.4.0", "f239f68b588efa7707abce16a84d0d2acf3a0f50571f8bb7f56a15865aae820c", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7d98bac1ee4565d31b62d59f8823dfd8356a169e7fcbb83831b8a5397404c9de"}, "cowlib": {:hex, :cowlib, "2.13.0", "db8f7505d8332d98ef50a3ef34b34c1afddec7506e4ee4dd4a3a266285d282ca", [:make, :rebar3], [], "hexpm", "e1e1284dc3fc030a64b1ad0d8382ae7e99da46c3246b815318a4b848873800a4"}, - "credo": {:hex, :credo, "1.7.6", "b8f14011a5443f2839b04def0b252300842ce7388f3af177157c86da18dfbeea", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "146f347fb9f8cbc5f7e39e3f22f70acbef51d441baa6d10169dd604bfbc55296"}, + "credo": {:hex, :credo, "1.7.7", "771445037228f763f9b2afd612b6aa2fd8e28432a95dbbc60d8e03ce71ba4446", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "8bc87496c9aaacdc3f90f01b7b0582467b69b4bd2441fe8aae3109d843cc2f2e"}, "decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"}, "earmark": {:hex, :earmark, "1.4.46", "8c7287bd3137e99d26ae4643e5b7ef2129a260e3dcf41f251750cb4563c8fb81", [:mix], [], "hexpm", "798d86db3d79964e759ddc0c077d5eb254968ed426399fbf5a62de2b5ff8910a"}, "elixir_uuid": {:hex, :uuid_utils, "1.6.5", "bafd6ffcbec895513a7c10855df3954f29909fb5d05ee52681e30e84297b1a80", [:mix], [{:ecto, "~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm", "36aaeee10740eae4d357231f48571a2687cb541730f94f47cbd3f186dc07899c"}, @@ -21,7 +21,7 @@ "phoenix_pubsub": {:hex, :phoenix_pubsub, "2.1.3", "3168d78ba41835aecad272d5e8cd51aa87a7ac9eb836eabc42f6e57538e3731d", [:mix], [], "hexpm", "bba06bc1dcfd8cb086759f0edc94a8ba2bc8896d5331a1e2c2902bf8e36ee502"}, "phoenix_swagger": {:hex, :phoenix_swagger, "0.8.3", "298d6204802409d3b0b4fc1013873839478707cf3a62532a9e10fec0e26d0e37", [:mix], [{:ex_json_schema, "~> 0.7.1", [hex: :ex_json_schema, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.11", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 2.2 or ~> 3.0", [hex: :poison, repo: "hexpm", optional: true]}], "hexpm", "3bc0fa9f5b679b8a61b90a52b2c67dd932320e9a84a6f91a4af872a0ab367337"}, "phoenix_template": {:hex, :phoenix_template, "1.0.4", "e2092c132f3b5e5b2d49c96695342eb36d0ed514c5b252a77048d5969330d639", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "2c0c81f0e5c6753faf5cca2f229c9709919aba34fab866d3bc05060c9c444206"}, - "phoenix_view": {:hex, :phoenix_view, "2.0.3", "4d32c4817fce933693741deeb99ef1392619f942633dde834a5163124813aad3", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}], "hexpm", "cd34049af41be2c627df99cd4eaa71fc52a328c0c3d8e7d4aa28f880c30e7f64"}, + "phoenix_view": {:hex, :phoenix_view, "2.0.4", "b45c9d9cf15b3a1af5fb555c674b525391b6a1fe975f040fb4d913397b31abf4", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}], "hexpm", "4e992022ce14f31fe57335db27a28154afcc94e9983266835bb3040243eb620b"}, "plug": {:hex, :plug, "1.16.0", "1d07d50cb9bb05097fdf187b31cf087c7297aafc3fed8299aac79c128a707e47", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "cbf53aa1f5c4d758a7559c0bd6d59e286c2be0c6a1fac8cc3eee2f638243b93e"}, "plug_cowboy": {:hex, :plug_cowboy, "2.7.1", "87677ffe3b765bc96a89be7960f81703223fe2e21efa42c125fcd0127dd9d6b2", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "02dbd5f9ab571b864ae39418db7811618506256f6d13b4a45037e5fe78dc5de3"}, "plug_crypto": {:hex, :plug_crypto, "2.1.0", "f44309c2b06d249c27c8d3f65cfe08158ade08418cf540fd4f72d4d6863abb7b", [:mix], [], "hexpm", "131216a4b030b8f8ce0f26038bc4421ae60e4bb95c5cf5395e1421437824c4fa"},